diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile
index ac9a2e75..ff261bad 100644
--- a/.devcontainer/Dockerfile
+++ b/.devcontainer/Dockerfile
@@ -3,7 +3,7 @@ FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT}
USER vscode
-RUN curl -sSf https://rye.astral.sh/get | RYE_VERSION="0.35.0" RYE_INSTALL_OPTION="--yes" bash
+RUN curl -sSf https://rye.astral.sh/get | RYE_VERSION="0.44.0" RYE_INSTALL_OPTION="--yes" bash
ENV PATH=/home/vscode/.rye/shims:$PATH
-RUN echo "[[ -d .venv ]] && source .venv/bin/activate" >> /home/vscode/.bashrc
+RUN echo "[[ -d .venv ]] && source .venv/bin/activate || export PATH=\$PATH" >> /home/vscode/.bashrc
diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index bbeb30b1..c17fdc16 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -24,6 +24,9 @@
}
}
}
+ },
+ "features": {
+ "ghcr.io/devcontainers/features/node:1": {}
}
// Features to add to the dev container. More info: https://containers.dev/features.
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 40293964..8edf5a60 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -1,19 +1,23 @@
name: CI
on:
push:
- branches:
- - main
+ branches-ignore:
+ - 'generated'
+ - 'codegen/**'
+ - 'integrated/**'
+ - 'stl-preview-head/**'
+ - 'stl-preview-base/**'
pull_request:
- branches:
- - main
- - next
+ branches-ignore:
+ - 'stl-preview-head/**'
+ - 'stl-preview-base/**'
jobs:
lint:
+ timeout-minutes: 10
name: lint
- runs-on: ubuntu-latest
-
-
+ runs-on: ${{ github.repository == 'stainless-sdks/browserbase-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }}
+ if: github.event_name == 'push' || github.event.pull_request.head.repo.fork
steps:
- uses: actions/checkout@v4
@@ -22,7 +26,7 @@ jobs:
curl -sSf https://rye.astral.sh/get | bash
echo "$HOME/.rye/shims" >> $GITHUB_PATH
env:
- RYE_VERSION: '0.35.0'
+ RYE_VERSION: '0.44.0'
RYE_INSTALL_OPTION: '--yes'
- name: Install dependencies
@@ -30,10 +34,52 @@ jobs:
- name: Run lints
run: ./scripts/lint
+
+ build:
+ if: github.event_name == 'push' || github.event.pull_request.head.repo.fork
+ timeout-minutes: 10
+ name: build
+ permissions:
+ contents: read
+ id-token: write
+ runs-on: ${{ github.repository == 'stainless-sdks/browserbase-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }}
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Install Rye
+ run: |
+ curl -sSf https://rye.astral.sh/get | bash
+ echo "$HOME/.rye/shims" >> $GITHUB_PATH
+ env:
+ RYE_VERSION: '0.44.0'
+ RYE_INSTALL_OPTION: '--yes'
+
+ - name: Install dependencies
+ run: rye sync --all-features
+
+ - name: Run build
+ run: rye build
+
+ - name: Get GitHub OIDC Token
+ if: github.repository == 'stainless-sdks/browserbase-python'
+ id: github-oidc
+ uses: actions/github-script@v6
+ with:
+ script: core.setOutput('github_token', await core.getIDToken());
+
+ - name: Upload tarball
+ if: github.repository == 'stainless-sdks/browserbase-python'
+ env:
+ URL: https://pkg.stainless.com/s
+ AUTH: ${{ steps.github-oidc.outputs.github_token }}
+ SHA: ${{ github.sha }}
+ run: ./scripts/utils/upload-artifact.sh
+
test:
+ timeout-minutes: 10
name: test
- runs-on: ubuntu-latest
-
+ runs-on: ${{ github.repository == 'stainless-sdks/browserbase-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }}
+ if: github.event_name == 'push' || github.event.pull_request.head.repo.fork
steps:
- uses: actions/checkout@v4
@@ -42,7 +88,7 @@ jobs:
curl -sSf https://rye.astral.sh/get | bash
echo "$HOME/.rye/shims" >> $GITHUB_PATH
env:
- RYE_VERSION: '0.35.0'
+ RYE_VERSION: '0.44.0'
RYE_INSTALL_OPTION: '--yes'
- name: Bootstrap
@@ -50,4 +96,3 @@ jobs:
- name: Run tests
run: ./scripts/test
-
diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml
index db8cf944..b3c832c7 100644
--- a/.github/workflows/publish-pypi.yml
+++ b/.github/workflows/publish-pypi.yml
@@ -21,7 +21,7 @@ jobs:
curl -sSf https://rye.astral.sh/get | bash
echo "$HOME/.rye/shims" >> $GITHUB_PATH
env:
- RYE_VERSION: '0.35.0'
+ RYE_VERSION: '0.44.0'
RYE_INSTALL_OPTION: '--yes'
- name: Publish to PyPI
diff --git a/.gitignore b/.gitignore
index 46152338..117701e6 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,5 +1,4 @@
.prism.log
-.vscode
_dev
__pycache__
diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index fea34540..3d362d5e 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "1.0.0"
+ ".": "1.5.0-alpha.1"
}
\ No newline at end of file
diff --git a/.stats.yml b/.stats.yml
index 1a6b2b54..e1cd805d 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,2 +1,4 @@
configured_endpoints: 18
-openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/browserbase%2Fbrowserbase-b341dd9d5bb77c4f217b94b186763e730fd798fbb773a5e90bb4e2a8d4a2c822.yml
+openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/browserbase%2Fbrowserbase-0b96e0120f7cf3fba797371433e15a08d14727c0526d718b728faee615624297.yml
+openapi_spec_hash: 8d007eed388933bf9d74c5488a56be41
+config_hash: b3ca4ec5b02e5333af51ebc2e9fdef1b
diff --git a/.vscode/settings.json b/.vscode/settings.json
new file mode 100644
index 00000000..5b010307
--- /dev/null
+++ b/.vscode/settings.json
@@ -0,0 +1,3 @@
+{
+ "python.analysis.importFormat": "relative",
+}
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a17bbc56..76a56a68 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,298 @@
# Changelog
+## 1.5.0-alpha.1 (2025-10-07)
+
+Full Changelog: [v1.5.0-alpha.0...v1.5.0-alpha.1](https://github.com/browserbase/sdk-python/compare/v1.5.0-alpha.0...v1.5.0-alpha.1)
+
+### Features
+
+* **api:** api update ([3bdf24e](https://github.com/browserbase/sdk-python/commit/3bdf24e69fd14e6e488af830e6e5a7786c21640d))
+* **api:** manual updates ([99b1cfb](https://github.com/browserbase/sdk-python/commit/99b1cfb41a51af014f5c350f0850331cd73abf08))
+
+
+### Chores
+
+* do not install brew dependencies in ./scripts/bootstrap by default ([6915700](https://github.com/browserbase/sdk-python/commit/69157006cc0df8f9e5effd0f53d79df88fe14e7d))
+* **internal:** move mypy configurations to `pyproject.toml` file ([545938f](https://github.com/browserbase/sdk-python/commit/545938fde4ace7142c413f9e0ac25e3b9c717980))
+* **internal:** update pydantic dependency ([4dcad8e](https://github.com/browserbase/sdk-python/commit/4dcad8e96f1220e79f3e9b5cdee2e19dfb5a1e11))
+* **tests:** simplify `get_platform` test ([6421017](https://github.com/browserbase/sdk-python/commit/64210177c60ca05c5d0eead33c3ecee3f4d18718))
+* **types:** change optional parameter type from NotGiven to Omit ([a46d293](https://github.com/browserbase/sdk-python/commit/a46d293766d0eb89b93739af0fbbd038eea083bd))
+
+## 1.5.0-alpha.0 (2025-09-05)
+
+Full Changelog: [v1.4.0...v1.5.0-alpha.0](https://github.com/browserbase/sdk-python/compare/v1.4.0...v1.5.0-alpha.0)
+
+### Features
+
+* **api:** api update ([e94ddbd](https://github.com/browserbase/sdk-python/commit/e94ddbd8777b97d4e8ab193e1bf3eaad983ecec9))
+* **api:** api update ([28115fb](https://github.com/browserbase/sdk-python/commit/28115fb584336dbf5b08043ad8f9cf1d911240ea))
+* **api:** api update ([3209287](https://github.com/browserbase/sdk-python/commit/32092872a3d4d48824b4d77d517ffdb06470ad95))
+* **api:** api update ([f38e029](https://github.com/browserbase/sdk-python/commit/f38e02981ae0777cb3d922845902b2673dc832fa))
+* **api:** api update ([1d9f769](https://github.com/browserbase/sdk-python/commit/1d9f7694bc0d465ce758ddcec41359e9cd1a08ad))
+* **api:** api update ([d72f39f](https://github.com/browserbase/sdk-python/commit/d72f39fbe29342cfc77e9b224f2ad0a5a77aaae4))
+* **api:** api update ([6d449b3](https://github.com/browserbase/sdk-python/commit/6d449b3deb284a72528877a8729f4cf7a418275d))
+* **api:** api update ([8bd5f8b](https://github.com/browserbase/sdk-python/commit/8bd5f8bcca3a2e5baadfc06009546692e63eb744))
+* **api:** api update ([1ce99ef](https://github.com/browserbase/sdk-python/commit/1ce99efe89c1d0757ca3100cca8619faa4082f74))
+* **api:** api update ([1cbb849](https://github.com/browserbase/sdk-python/commit/1cbb8498bf70c15c001f620b821519216cbadd97))
+* **api:** manual updates ([5893fc6](https://github.com/browserbase/sdk-python/commit/5893fc6165cfd88378d6725317e30c7cb6faf8df))
+* **api:** manual updates ([074f06d](https://github.com/browserbase/sdk-python/commit/074f06d0dfb08554229348828afd2cc1defe94ee))
+* clean up environment call outs ([82c38c4](https://github.com/browserbase/sdk-python/commit/82c38c494a175c1b6b38bab3615916c30ba25d14))
+* **client:** add follow_redirects request option ([a8b0b5e](https://github.com/browserbase/sdk-python/commit/a8b0b5e4c6445e0e8c0d3673a090aabab09a50fd))
+* **client:** add support for aiohttp ([3516092](https://github.com/browserbase/sdk-python/commit/35160921e262f147cc723a754f14cfd9875603f5))
+* **client:** support file upload requests ([2f338f0](https://github.com/browserbase/sdk-python/commit/2f338f009e556ef9be05f49816b17cef138bda17))
+* improve future compat with pydantic v3 ([8b5256c](https://github.com/browserbase/sdk-python/commit/8b5256c801e1423a4daf6bf49de7509a32ebfde2))
+* **types:** replace List[str] with SequenceNotStr in params ([55083f6](https://github.com/browserbase/sdk-python/commit/55083f678b68020fae835af5cd58e0e5deea2888))
+
+
+### Bug Fixes
+
+* avoid newer type syntax ([85f597b](https://github.com/browserbase/sdk-python/commit/85f597b34d149138f1b5afdc52062cb131e3a30a))
+* **ci:** correct conditional ([a36b873](https://github.com/browserbase/sdk-python/commit/a36b87379b404613673720dd9f498ed76dfe5c3a))
+* **ci:** release-doctor — report correct token name ([61b97ff](https://github.com/browserbase/sdk-python/commit/61b97fff5ea92bade293c5f5f4a84b0d991375e7))
+* **client:** correctly parse binary response | stream ([9614c4c](https://github.com/browserbase/sdk-python/commit/9614c4c05bc57ea60100aec9a194aee7a39e701b))
+* **client:** don't send Content-Type header on GET requests ([c4c4185](https://github.com/browserbase/sdk-python/commit/c4c4185de32b28c09565b6fe84efd65fd411abb9))
+* fix extension types in playwright_extensions ([8b652e7](https://github.com/browserbase/sdk-python/commit/8b652e78be1493d03e13d2a116cbc6969a880e58))
+* **parsing:** correctly handle nested discriminated unions ([d020678](https://github.com/browserbase/sdk-python/commit/d0206786894ecfb22e0924edb8a227414b17788d))
+* **parsing:** ignore empty metadata ([118c4d4](https://github.com/browserbase/sdk-python/commit/118c4d41bda811d2d942793d8ab029b272c7a5c6))
+* **parsing:** parse extra field types ([c7ef875](https://github.com/browserbase/sdk-python/commit/c7ef87549e324fb06fab945e1754ef7b56b30031))
+* **tests:** fix: tests which call HTTP endpoints directly with the example parameters ([e298407](https://github.com/browserbase/sdk-python/commit/e2984077537fd6dee0191329a083ad0ccf9fd76f))
+
+
+### Chores
+
+* **ci:** change upload type ([e42da7c](https://github.com/browserbase/sdk-python/commit/e42da7c1fed216ff2b15223c49f1111bc0ef16e5))
+* **ci:** enable for pull requests ([03a6db7](https://github.com/browserbase/sdk-python/commit/03a6db72e98bf1606bf68928b2ac5029cba088df))
+* **ci:** only run for pushes and fork pull requests ([c8cb51f](https://github.com/browserbase/sdk-python/commit/c8cb51f311f4d39863127fab189c95d84a186bc6))
+* **docs:** grammar improvements ([f32a9e2](https://github.com/browserbase/sdk-python/commit/f32a9e258a9b0b4d29c24137d5a7207907f00f9b))
+* **docs:** remove reference to rye shell ([07d129a](https://github.com/browserbase/sdk-python/commit/07d129a04211037d123b06d36347741960e75323))
+* **docs:** remove unnecessary param examples ([62209dc](https://github.com/browserbase/sdk-python/commit/62209dcac034f40ac8b3b8a119e532201a227680))
+* **internal:** add Sequence related utils ([34b0dd6](https://github.com/browserbase/sdk-python/commit/34b0dd6b4297fafc2bcb9e8243c8d3c2e2e435fc))
+* **internal:** bump pinned h11 dep ([5e3270d](https://github.com/browserbase/sdk-python/commit/5e3270da2e4f41efdd345d073a42d6791eb22a84))
+* **internal:** change ci workflow machines ([14c0ac4](https://github.com/browserbase/sdk-python/commit/14c0ac49a6d9d42f5401a5c24ddb8586b3998fb2))
+* **internal:** codegen related update ([f979aff](https://github.com/browserbase/sdk-python/commit/f979aff605c0d74efb561e0b169ad39b486ab5a0))
+* **internal:** codegen related update ([12de9f3](https://github.com/browserbase/sdk-python/commit/12de9f324fbb40bec91cd7c6b16af1440c4f7373))
+* **internal:** codegen related update ([c4157cb](https://github.com/browserbase/sdk-python/commit/c4157cb8470b1d0ca67e6757f4fe9146a630cc82))
+* **internal:** codegen related update ([ccb2c95](https://github.com/browserbase/sdk-python/commit/ccb2c95002bb6a38e1eb8b9a84e4a335d5ee1a13))
+* **internal:** fix ruff target version ([e6a3df4](https://github.com/browserbase/sdk-python/commit/e6a3df40564b4ba3d23514e0b42221010d465bf6))
+* **internal:** update comment in script ([a7aec17](https://github.com/browserbase/sdk-python/commit/a7aec17c02632684dfeb7759dd6a5322efe092ce))
+* **internal:** update conftest.py ([5d3a2b1](https://github.com/browserbase/sdk-python/commit/5d3a2b1906ca5fca5c84c6d6684a8a62b6700479))
+* **internal:** update pyright exclude list ([33ba4b4](https://github.com/browserbase/sdk-python/commit/33ba4b47ddeb8c0aa19a11f35a7cea9aa9a0966d))
+* **package:** mark python 3.13 as supported ([2450b8e](https://github.com/browserbase/sdk-python/commit/2450b8eb2349adde689febd09269915d41e7a590))
+* **project:** add settings file for vscode ([a406241](https://github.com/browserbase/sdk-python/commit/a4062413b2fce397d59ea9ceaec7ed0565880fe2))
+* **readme:** fix version rendering on pypi ([a8afe1a](https://github.com/browserbase/sdk-python/commit/a8afe1a67c48080ef202cac88da9b5d59534799a))
+* **readme:** update badges ([869a3f4](https://github.com/browserbase/sdk-python/commit/869a3f4dd7e6f19225b697aeee89ce98a2174c0a))
+* **tests:** add tests for httpx client instantiation & proxies ([9c5d88c](https://github.com/browserbase/sdk-python/commit/9c5d88cb4cbbda5aa618cba2f5217bacd4a228cc))
+* **tests:** run tests in parallel ([94308de](https://github.com/browserbase/sdk-python/commit/94308dea065f54268145b175a13e0dbfd2a9cc81))
+* **tests:** skip some failing tests on the latest python versions ([7bc40f0](https://github.com/browserbase/sdk-python/commit/7bc40f068d290a479a0d4070ef54e8f8c4ef598d))
+* update @stainless-api/prism-cli to v5.15.0 ([b48933b](https://github.com/browserbase/sdk-python/commit/b48933b2f68eafaa554662eb7f41bf960a74d8b6))
+* update github action ([d57dc03](https://github.com/browserbase/sdk-python/commit/d57dc0398b083556ed7ceee265efcf282062005d))
+
+
+### Documentation
+
+* **client:** fix httpx.Timeout documentation reference ([4bbda56](https://github.com/browserbase/sdk-python/commit/4bbda56cdb4adf677f67011f42f5c3e324a5f60e))
+
+## 1.4.0 (2025-05-16)
+
+Full Changelog: [v1.3.0...v1.4.0](https://github.com/browserbase/sdk-python/compare/v1.3.0...v1.4.0)
+
+### Features
+
+* **api:** api update ([d3b2ee1](https://github.com/browserbase/sdk-python/commit/d3b2ee1e3c69efbdcb2f0e53b4625e2c8a2a7430))
+
+
+### Bug Fixes
+
+* **package:** support direct resource imports ([8feb502](https://github.com/browserbase/sdk-python/commit/8feb502c7e73e8abed43afae5a0526282c4f0dfe))
+* **pydantic v1:** more robust ModelField.annotation check ([5292730](https://github.com/browserbase/sdk-python/commit/5292730dd7b1585210d7ab8e640ed78a5dd9740a))
+
+
+### Chores
+
+* broadly detect json family of content-type headers ([ffe29f8](https://github.com/browserbase/sdk-python/commit/ffe29f8dc99d5e7462a0a9bbd488c368e836acdc))
+* **ci:** add timeout thresholds for CI jobs ([3ca4458](https://github.com/browserbase/sdk-python/commit/3ca4458cf31650ce8749c56bee4549811cadec1f))
+* **ci:** fix installation instructions ([99a7328](https://github.com/browserbase/sdk-python/commit/99a7328f22f6da3cd96d70b00c2a6fa0d4c82b37))
+* **ci:** only use depot for staging repos ([646f7d8](https://github.com/browserbase/sdk-python/commit/646f7d832f269d383a0da5fe5732a52ec10787b2))
+* **ci:** upload sdks to package manager ([ff18efd](https://github.com/browserbase/sdk-python/commit/ff18efdf051eabcb52863739942b652d86ed2231))
+* **internal:** avoid errors for isinstance checks on proxies ([b33d222](https://github.com/browserbase/sdk-python/commit/b33d222fd5fdd6eaaca62fb6eb6d9f878a01d31d))
+* **internal:** base client updates ([44f575e](https://github.com/browserbase/sdk-python/commit/44f575efd621315d9bd28e7921554980045af6ed))
+* **internal:** bump pyright version ([bb6bbd3](https://github.com/browserbase/sdk-python/commit/bb6bbd36b3b0fb7595bcc6bd9b25c0aafd6a08af))
+* **internal:** codegen related update ([9f4f8d1](https://github.com/browserbase/sdk-python/commit/9f4f8d1172d5c4b9fa36c8c97ab6e10958ff2959))
+* **internal:** fix list file params ([74b3df7](https://github.com/browserbase/sdk-python/commit/74b3df7160585d981ff5390b6f354926188aaa2a))
+* **internal:** import reformatting ([bba19e4](https://github.com/browserbase/sdk-python/commit/bba19e44eb67116740b27e1fea04abe06a97e4cd))
+* **internal:** minor formatting changes ([0c58843](https://github.com/browserbase/sdk-python/commit/0c58843c75075e3803c9a5a9790f48558a78e712))
+* **internal:** refactor retries to not use recursion ([4161fdb](https://github.com/browserbase/sdk-python/commit/4161fdbcf76a18deee8b790944369225fb4331ff))
+* **internal:** update models test ([5e5dc11](https://github.com/browserbase/sdk-python/commit/5e5dc11c53c60164829b145762818545cfe36f52))
+
+## 1.3.0 (2025-04-15)
+
+Full Changelog: [v1.2.0...v1.3.0](https://github.com/browserbase/sdk-python/compare/v1.2.0...v1.3.0)
+
+### Features
+
+* **api:** api update ([#131](https://github.com/browserbase/sdk-python/issues/131)) ([1be828d](https://github.com/browserbase/sdk-python/commit/1be828d5c83e48af8740886303f73620bc71b1ba))
+* **api:** api update ([#133](https://github.com/browserbase/sdk-python/issues/133)) ([2a08d98](https://github.com/browserbase/sdk-python/commit/2a08d98914d26cdc36d080bccebd66786b5247ff))
+* **api:** api update ([#140](https://github.com/browserbase/sdk-python/issues/140)) ([134049e](https://github.com/browserbase/sdk-python/commit/134049e29ba480a2238a08c327070bda96b05109))
+* **api:** api update ([#141](https://github.com/browserbase/sdk-python/issues/141)) ([145e5cb](https://github.com/browserbase/sdk-python/commit/145e5cbfc76ac2731b1d6eb3c069cba59a9fbcd9))
+* **api:** api update ([#143](https://github.com/browserbase/sdk-python/issues/143)) ([d55e411](https://github.com/browserbase/sdk-python/commit/d55e4118972d7badbe09a2dd46257d2e66822b85))
+* **client:** allow passing `NotGiven` for body ([#125](https://github.com/browserbase/sdk-python/issues/125)) ([6cdee1b](https://github.com/browserbase/sdk-python/commit/6cdee1ba5775d3c72e0cbd9fe757a1b7452780bd))
+
+
+### Bug Fixes
+
+* asyncify on non-asyncio runtimes ([#123](https://github.com/browserbase/sdk-python/issues/123)) ([c8b2cd7](https://github.com/browserbase/sdk-python/commit/c8b2cd77f4cb07d06a00a09cac3eaa55cf6c6925))
+* **ci:** ensure pip is always available ([#138](https://github.com/browserbase/sdk-python/issues/138)) ([173fdde](https://github.com/browserbase/sdk-python/commit/173fddeea8867f93428bddc5ab1d9e1fcd5a925e))
+* **ci:** remove publishing patch ([#139](https://github.com/browserbase/sdk-python/issues/139)) ([bd66d56](https://github.com/browserbase/sdk-python/commit/bd66d56eec53a7778ca624a7ccd00fcf8a9f69af))
+* **client:** mark some request bodies as optional ([6cdee1b](https://github.com/browserbase/sdk-python/commit/6cdee1ba5775d3c72e0cbd9fe757a1b7452780bd))
+* **perf:** optimize some hot paths ([042f048](https://github.com/browserbase/sdk-python/commit/042f048847634ed606d475a0aaeedc5fd129ddbd))
+* **perf:** skip traversing types for NotGiven values ([5cc6c58](https://github.com/browserbase/sdk-python/commit/5cc6c58561556e2b50fccbeed5e123adf3aba72d))
+* **types:** handle more discriminated union shapes ([#137](https://github.com/browserbase/sdk-python/issues/137)) ([d9e09e3](https://github.com/browserbase/sdk-python/commit/d9e09e3d2428a92c29a4411533564637ce5b3121))
+
+
+### Chores
+
+* **client:** minor internal fixes ([47df6f5](https://github.com/browserbase/sdk-python/commit/47df6f5956507649f684df46bf2b5bb18aa7bc93))
+* **docs:** update client docstring ([#129](https://github.com/browserbase/sdk-python/issues/129)) ([b2201f1](https://github.com/browserbase/sdk-python/commit/b2201f1d9f99f67a3b8fa21ba19560e72a245611))
+* fix typos ([#142](https://github.com/browserbase/sdk-python/issues/142)) ([0157632](https://github.com/browserbase/sdk-python/commit/015763281689247799dd97e46884ba3be520c2f5))
+* **internal:** bump rye to 0.44.0 ([#136](https://github.com/browserbase/sdk-python/issues/136)) ([9aeac01](https://github.com/browserbase/sdk-python/commit/9aeac01a20df8303f806e22b274bdd10adaeea49))
+* **internal:** codegen related update ([#124](https://github.com/browserbase/sdk-python/issues/124)) ([0678102](https://github.com/browserbase/sdk-python/commit/0678102eee40182b0fc2c2a2b2e3f965a2885a50))
+* **internal:** codegen related update ([#132](https://github.com/browserbase/sdk-python/issues/132)) ([3248d7e](https://github.com/browserbase/sdk-python/commit/3248d7e6242808bcb74427cb1b78ac52dee0948c))
+* **internal:** expand CI branch coverage ([4494839](https://github.com/browserbase/sdk-python/commit/449483977d4af8b56b916d555bea966f25304ac7))
+* **internal:** fix devcontainers setup ([#126](https://github.com/browserbase/sdk-python/issues/126)) ([eaf577b](https://github.com/browserbase/sdk-python/commit/eaf577b05bd72e2bb40105131a65e7c13172c3bb))
+* **internal:** properly set __pydantic_private__ ([#127](https://github.com/browserbase/sdk-python/issues/127)) ([5236106](https://github.com/browserbase/sdk-python/commit/52361065d4547b06c44a07396e0679f588181053))
+* **internal:** reduce CI branch coverage ([1bd4d8b](https://github.com/browserbase/sdk-python/commit/1bd4d8bf088ac47c01a12048cb7b3c963d18eb4a))
+* **internal:** remove extra empty newlines ([#134](https://github.com/browserbase/sdk-python/issues/134)) ([2206050](https://github.com/browserbase/sdk-python/commit/22060504e0f57402decfff129778a472717e29e1))
+* **internal:** remove trailing character ([#145](https://github.com/browserbase/sdk-python/issues/145)) ([2b055d7](https://github.com/browserbase/sdk-python/commit/2b055d730b2313227a0193cfc2b95056d4731464))
+* **internal:** remove unused http client options forwarding ([#130](https://github.com/browserbase/sdk-python/issues/130)) ([c63a3bd](https://github.com/browserbase/sdk-python/commit/c63a3bdad3f35658d87d48bbd5e746a36228a8ab))
+* **internal:** slight transform perf improvement ([#147](https://github.com/browserbase/sdk-python/issues/147)) ([2d46582](https://github.com/browserbase/sdk-python/commit/2d46582e5bb55d3ca74c2a4191144743d5f0058b))
+* **internal:** update client tests ([#121](https://github.com/browserbase/sdk-python/issues/121)) ([862cd7e](https://github.com/browserbase/sdk-python/commit/862cd7efb4c694866ab385c5a70fd450b917f057))
+* **internal:** update pyright settings ([0f0e110](https://github.com/browserbase/sdk-python/commit/0f0e110388f893b86881aa67badc30af8e271b8a))
+* slight wording improvement in README ([#148](https://github.com/browserbase/sdk-python/issues/148)) ([c40603c](https://github.com/browserbase/sdk-python/commit/c40603cafa809128edeff23eca37db97dda8de54))
+
+
+### Documentation
+
+* update URLs from stainlessapi.com to stainless.com ([#128](https://github.com/browserbase/sdk-python/issues/128)) ([5e2932f](https://github.com/browserbase/sdk-python/commit/5e2932f5c13c19eb454116ffdce38863556feaf1))
+
+## 1.2.0 (2025-02-11)
+
+Full Changelog: [v1.1.0...v1.2.0](https://github.com/browserbase/sdk-python/compare/v1.1.0...v1.2.0)
+
+### Features
+
+* **client:** send `X-Stainless-Read-Timeout` header ([#117](https://github.com/browserbase/sdk-python/issues/117)) ([e53c47a](https://github.com/browserbase/sdk-python/commit/e53c47ae14f4dca507cc146b37b81d5e59845806))
+
+
+### Chores
+
+* **internal:** bummp ruff dependency ([#115](https://github.com/browserbase/sdk-python/issues/115)) ([f687590](https://github.com/browserbase/sdk-python/commit/f68759062445e8336ca0f6c9b0bde3b0d2ca1e62))
+* **internal:** change default timeout to an int ([#113](https://github.com/browserbase/sdk-python/issues/113)) ([081bb21](https://github.com/browserbase/sdk-python/commit/081bb216f4b9a4df0dfdd51bcbcacef0154fe636))
+* **internal:** fix type traversing dictionary params ([#118](https://github.com/browserbase/sdk-python/issues/118)) ([cc59fe8](https://github.com/browserbase/sdk-python/commit/cc59fe8950fa4e66ee5efd598b69da9c0c8f08a0))
+* **internal:** minor type handling changes ([#119](https://github.com/browserbase/sdk-python/issues/119)) ([7be3940](https://github.com/browserbase/sdk-python/commit/7be3940cfb0bb947a6774ec225b5eb450a951e88))
+
+## 1.1.0 (2025-01-28)
+
+Full Changelog: [v1.0.5...v1.1.0](https://github.com/browserbase/sdk-python/compare/v1.0.5...v1.1.0)
+
+### Features
+
+* **api:** api update ([#101](https://github.com/browserbase/sdk-python/issues/101)) ([5be14e9](https://github.com/browserbase/sdk-python/commit/5be14e9b49b95daa2bc043ed8c33b2d4527a7361))
+* **api:** api update ([#104](https://github.com/browserbase/sdk-python/issues/104)) ([c13b2f9](https://github.com/browserbase/sdk-python/commit/c13b2f95924c940deece1f6e3b1e4ca2dfbd9fe7))
+* **api:** api update ([#105](https://github.com/browserbase/sdk-python/issues/105)) ([fc3b82f](https://github.com/browserbase/sdk-python/commit/fc3b82f224e92e273d484f8b0f52eb433210e38b))
+* **api:** api update ([#109](https://github.com/browserbase/sdk-python/issues/109)) ([faca7e9](https://github.com/browserbase/sdk-python/commit/faca7e94c6086d461b81f2806868af2e1506e035))
+* **api:** api update ([#111](https://github.com/browserbase/sdk-python/issues/111)) ([42ae774](https://github.com/browserbase/sdk-python/commit/42ae77474c2fbe9eefd9929e15d8d51cbf40bc00))
+
+
+### Bug Fixes
+
+* **client:** only call .close() when needed ([#97](https://github.com/browserbase/sdk-python/issues/97)) ([01d5bd5](https://github.com/browserbase/sdk-python/commit/01d5bd5eb7675fc069fe01e7651d769df182270a))
+* correctly handle deserialising `cls` fields ([#100](https://github.com/browserbase/sdk-python/issues/100)) ([b617b85](https://github.com/browserbase/sdk-python/commit/b617b85ef3cce3c16e38125bec483c72bc3d43c0))
+
+
+### Chores
+
+* add missing isclass check ([#94](https://github.com/browserbase/sdk-python/issues/94)) ([de5856d](https://github.com/browserbase/sdk-python/commit/de5856dac77567813f681615bef7d147e505a6a0))
+* **internal:** add support for TypeAliasType ([#85](https://github.com/browserbase/sdk-python/issues/85)) ([64448c6](https://github.com/browserbase/sdk-python/commit/64448c6e020aaeb4b39b7ec8f1b28a6b8f0c746a))
+* **internal:** bump httpx dependency ([#95](https://github.com/browserbase/sdk-python/issues/95)) ([d592266](https://github.com/browserbase/sdk-python/commit/d592266e85c40d14e4929089f8ae4db814d04ce7))
+* **internal:** bump pydantic dependency ([#81](https://github.com/browserbase/sdk-python/issues/81)) ([e35a0d8](https://github.com/browserbase/sdk-python/commit/e35a0d85ef0e45aed1a5f58757427bf7c16a76f5))
+* **internal:** bump pyright ([#83](https://github.com/browserbase/sdk-python/issues/83)) ([894b4c4](https://github.com/browserbase/sdk-python/commit/894b4c45b0c36963822923535391aa34dbfec766))
+* **internal:** codegen related update ([#102](https://github.com/browserbase/sdk-python/issues/102)) ([f648bbb](https://github.com/browserbase/sdk-python/commit/f648bbbae4520a1003ecaf5cbd299da9aabfb90f))
+* **internal:** codegen related update ([#106](https://github.com/browserbase/sdk-python/issues/106)) ([3fc9cde](https://github.com/browserbase/sdk-python/commit/3fc9cde212c1ea7f1010c9e688bd75841d828ace))
+* **internal:** codegen related update ([#107](https://github.com/browserbase/sdk-python/issues/107)) ([c97e138](https://github.com/browserbase/sdk-python/commit/c97e1383ac673d05861653c0818c1d1c5b0fa5c8))
+* **internal:** codegen related update ([#86](https://github.com/browserbase/sdk-python/issues/86)) ([ab76578](https://github.com/browserbase/sdk-python/commit/ab76578bdce5eba2410b09f497758fbf0e0d8cf0))
+* **internal:** codegen related update ([#87](https://github.com/browserbase/sdk-python/issues/87)) ([f7f189e](https://github.com/browserbase/sdk-python/commit/f7f189ec317394f2fc532b8f95c3d15304298027))
+* **internal:** codegen related update ([#88](https://github.com/browserbase/sdk-python/issues/88)) ([85f1492](https://github.com/browserbase/sdk-python/commit/85f1492efc58d86ebc34511ca1269a0db2a4d223))
+* **internal:** codegen related update ([#93](https://github.com/browserbase/sdk-python/issues/93)) ([57f0977](https://github.com/browserbase/sdk-python/commit/57f0977c8e050b85b2c2de91202f6775299f80bf))
+* **internal:** codegen related update ([#99](https://github.com/browserbase/sdk-python/issues/99)) ([f817bcb](https://github.com/browserbase/sdk-python/commit/f817bcb67c2080a954c476c15dc048c2c628243a))
+* **internal:** fix some typos ([#92](https://github.com/browserbase/sdk-python/issues/92)) ([51d9f42](https://github.com/browserbase/sdk-python/commit/51d9f42a32d17d2d2277eb8a7b8f35a980c7c485))
+* **internal:** minor formatting changes ([#110](https://github.com/browserbase/sdk-python/issues/110)) ([195c595](https://github.com/browserbase/sdk-python/commit/195c595bfbe2ed97ae4b551658618f4a99a255f0))
+* **internal:** remove some duplicated imports ([#89](https://github.com/browserbase/sdk-python/issues/89)) ([a82ae7d](https://github.com/browserbase/sdk-python/commit/a82ae7d418b1daf68c85e70dea61e628eb785b79))
+* **internal:** updated imports ([#90](https://github.com/browserbase/sdk-python/issues/90)) ([dc6e187](https://github.com/browserbase/sdk-python/commit/dc6e187bfe9585692b2de1b67fc83f027a52c43c))
+* make the `Omit` type public ([#78](https://github.com/browserbase/sdk-python/issues/78)) ([a7bdc57](https://github.com/browserbase/sdk-python/commit/a7bdc57ab7f327da61121986ba7b006238d0e5b5))
+
+
+### Documentation
+
+* fix typos ([#98](https://github.com/browserbase/sdk-python/issues/98)) ([d4f4bae](https://github.com/browserbase/sdk-python/commit/d4f4bae46341e91ac537e121bba38e511c7026bc))
+* **readme:** example snippet for client context manager ([#91](https://github.com/browserbase/sdk-python/issues/91)) ([950c8af](https://github.com/browserbase/sdk-python/commit/950c8af19db4581fabd5b965ca4f0af3cc5cd6dc))
+* **readme:** fix http client proxies example ([#82](https://github.com/browserbase/sdk-python/issues/82)) ([cc67c77](https://github.com/browserbase/sdk-python/commit/cc67c773b11b42b406b677f466c7c0ef090b254e))
+
+## 1.0.5 (2024-12-03)
+
+Full Changelog: [v1.0.4...v1.0.5](https://github.com/browserbase/sdk-python/compare/v1.0.4...v1.0.5)
+
+### Chores
+
+* **internal:** bump pyright ([#73](https://github.com/browserbase/sdk-python/issues/73)) ([d5f9711](https://github.com/browserbase/sdk-python/commit/d5f97119b2ec2334f47029541173e78ca846abae))
+
+## 1.0.4 (2024-11-29)
+
+Full Changelog: [v1.0.3...v1.0.4](https://github.com/browserbase/sdk-python/compare/v1.0.3...v1.0.4)
+
+### Bug Fixes
+
+* **client:** compat with new httpx 0.28.0 release ([#71](https://github.com/browserbase/sdk-python/issues/71)) ([7b87947](https://github.com/browserbase/sdk-python/commit/7b87947d0cdf555c73a1527b3e396cd40175d0b4))
+
+
+### Chores
+
+* **internal:** codegen related update ([#68](https://github.com/browserbase/sdk-python/issues/68)) ([3e4372e](https://github.com/browserbase/sdk-python/commit/3e4372ed8790e32850e1196c402e0023cd8a0f9d))
+* **internal:** exclude mypy from running on tests ([#70](https://github.com/browserbase/sdk-python/issues/70)) ([edd3628](https://github.com/browserbase/sdk-python/commit/edd3628710ed8f863bce5df336385dd6d380041e))
+
+## 1.0.3 (2024-11-22)
+
+Full Changelog: [v1.0.2...v1.0.3](https://github.com/browserbase/sdk-python/compare/v1.0.2...v1.0.3)
+
+### Chores
+
+* **internal:** fix compat model_dump method when warnings are passed ([#65](https://github.com/browserbase/sdk-python/issues/65)) ([4e999de](https://github.com/browserbase/sdk-python/commit/4e999de99372f6b348e74aa37663dd809c5d0da7))
+
+## 1.0.2 (2024-11-19)
+
+Full Changelog: [v1.0.1...v1.0.2](https://github.com/browserbase/sdk-python/compare/v1.0.1...v1.0.2)
+
+### Chores
+
+* rebuild project due to codegen change ([#59](https://github.com/browserbase/sdk-python/issues/59)) ([bd52098](https://github.com/browserbase/sdk-python/commit/bd520989c50f8353c7184930d0da661bdc8625fa))
+
+## 1.0.1 (2024-11-18)
+
+Full Changelog: [v1.0.0...v1.0.1](https://github.com/browserbase/sdk-python/compare/v1.0.0...v1.0.1)
+
+### Features
+
+* **api:** api update ([#48](https://github.com/browserbase/sdk-python/issues/48)) ([b17a3b8](https://github.com/browserbase/sdk-python/commit/b17a3b8e6984447421a7581ca56c0521cb3b55dd))
+* **api:** api update ([#51](https://github.com/browserbase/sdk-python/issues/51)) ([dc2da25](https://github.com/browserbase/sdk-python/commit/dc2da25d2e33d55e5655cbb8000fd4afdd6bbf62))
+
+
+### Chores
+
+* rebuild project due to codegen change ([#53](https://github.com/browserbase/sdk-python/issues/53)) ([b1684fa](https://github.com/browserbase/sdk-python/commit/b1684fa889aecf2fe7965a37ebd9c73977136ef6))
+* rebuild project due to codegen change ([#54](https://github.com/browserbase/sdk-python/issues/54)) ([e6a41da](https://github.com/browserbase/sdk-python/commit/e6a41dab6f0de6894a97067611166b1bc61893a2))
+* rebuild project due to codegen change ([#55](https://github.com/browserbase/sdk-python/issues/55)) ([ff17087](https://github.com/browserbase/sdk-python/commit/ff1708757bdeaa4e6b8d1959d1830105bd7f4b92))
+* rebuild project due to codegen change ([#57](https://github.com/browserbase/sdk-python/issues/57)) ([dfd0e19](https://github.com/browserbase/sdk-python/commit/dfd0e199c2447d4bd1b6704745d22f959a6b6bb1))
+* rebuild project due to codegen change ([#58](https://github.com/browserbase/sdk-python/issues/58)) ([f3be0be](https://github.com/browserbase/sdk-python/commit/f3be0bec13d95c65ab4cc81565b456cb566a62e2))
+
## 1.0.0 (2024-10-29)
Full Changelog: [v1.0.0-alpha.0...v1.0.0](https://github.com/browserbase/sdk-python/compare/v1.0.0-alpha.0...v1.0.0)
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 45a7298e..5f8bfea6 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -17,8 +17,7 @@ $ rye sync --all-features
You can then run scripts using `rye run python script.py` or by activating the virtual environment:
```sh
-$ rye shell
-# or manually activate - https://docs.python.org/3/library/venv.html#how-venvs-work
+# Activate the virtual environment - https://docs.python.org/3/library/venv.html#how-venvs-work
$ source .venv/bin/activate
# now you can omit the `rye run` prefix
diff --git a/LICENSE b/LICENSE
index 915e6f84..2cec9d4b 100644
--- a/LICENSE
+++ b/LICENSE
@@ -186,7 +186,7 @@
same "printed page" as the copyright notice for easier
identification within third-party archives.
- Copyright 2024 Browserbase
+ Copyright 2025 Browserbase
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
diff --git a/README.md b/README.md
index c790a0f3..5c1155d1 100644
--- a/README.md
+++ b/README.md
@@ -1,12 +1,13 @@
# Browserbase Python API library
-[](https://pypi.org/project/browserbase/)
+
+[)](https://pypi.org/project/browserbase/)
-The Browserbase Python library provides convenient access to the Browserbase REST API from any Python 3.7+
+The Browserbase Python library provides convenient access to the Browserbase REST API from any Python 3.8+
application. The library includes type definitions for all request params and response fields,
and offers both synchronous and asynchronous clients powered by [httpx](https://github.com/encode/httpx).
-It is generated with [Stainless](https://www.stainlessapi.com/).
+It is generated with [Stainless](https://www.stainless.com/).
## Documentation
@@ -16,7 +17,7 @@ The REST API documentation can be found on [docs.browserbase.com](https://docs.b
```sh
# install from PyPI
-pip install browserbase
+pip install --pre browserbase
```
## Usage
@@ -31,7 +32,7 @@ from browserbase import Browserbase
BROWSERBASE_API_KEY = os.environ.get("BROWSERBASE_API_KEY")
BROWSERBASE_PROJECT_ID = os.environ.get("BROWSERBASE_PROJECT_ID")
-bb = Browserbase(
+client = Browserbase(
# This is the default and can be omitted
api_key=BROWSERBASE_API_KEY,
)
@@ -41,6 +42,7 @@ session = client.sessions.create(
)
print(session.id)
+
def run(playwright: Playwright) -> None:
# Connect to the remote session
chromium = playwright.chromium
@@ -51,9 +53,7 @@ def run(playwright: Playwright) -> None:
# Execute Playwright actions on the remote browser tab
page.goto("https://news.ycombinator.com/")
page_title = page.title()
- assert (
- page_title == "Hacker News"
- ), f"Page title is not 'Hacker News', it is '{page_title}'"
+ assert page_title == "Hacker News", f"Page title is not 'Hacker News', it is '{page_title}'"
page.screenshot(path="screenshot.png")
page.close()
@@ -82,6 +82,39 @@ rye run example playwright_basic # replace with the example you want to run
> [!NOTE]
> Make sure you have a `.env` file that matches the [.env.example](.env.example) file in the root of this repository.
+### With aiohttp
+
+By default, the async client uses `httpx` for HTTP requests. However, for improved concurrency performance you may also use `aiohttp` as the HTTP backend.
+
+You can enable this by installing `aiohttp`:
+
+```sh
+# install from PyPI
+pip install --pre browserbase[aiohttp]
+```
+
+Then you can enable it by instantiating the client with `http_client=DefaultAioHttpClient()`:
+
+```python
+import asyncio
+from browserbase import DefaultAioHttpClient
+from browserbase import AsyncBrowserbase
+
+
+async def main() -> None:
+ async with AsyncBrowserbase(
+ api_key="My API Key",
+ http_client=DefaultAioHttpClient(),
+ ) as client:
+ session = await client.sessions.create(
+ project_id="your_project_id",
+ )
+ print(session.id)
+
+
+asyncio.run(main())
+```
+
## Using types
Nested request parameters are [TypedDicts](https://docs.python.org/3/library/typing.html#typing.TypedDict). Responses are [Pydantic models](https://docs.pydantic.dev) which also provide helper methods for things like:
@@ -91,6 +124,39 @@ Nested request parameters are [TypedDicts](https://docs.python.org/3/library/typ
Typed requests and responses provide autocomplete and documentation within your editor. If you would like to see type errors in VS Code to help catch bugs earlier, set `python.analysis.typeCheckingMode` to `basic`.
+## Nested params
+
+Nested parameters are dictionaries, typed using `TypedDict`, for example:
+
+```python
+from browserbase import Browserbase
+
+client = Browserbase()
+
+session = client.sessions.create(
+ project_id="projectId",
+ browser_settings={},
+)
+print(session.browser_settings)
+```
+
+## File uploads
+
+Request parameters that correspond to file uploads can be passed as `bytes`, or a [`PathLike`](https://docs.python.org/3/library/os.html#os.PathLike) instance or a tuple of `(filename, contents, media type)`.
+
+```python
+from pathlib import Path
+from browserbase import Browserbase
+
+client = Browserbase()
+
+client.extensions.create(
+ file=Path("/path/to/file"),
+)
+```
+
+The async client uses the exact same interface. If you pass a [`PathLike`](https://docs.python.org/3/library/os.html#os.PathLike) instance, the file contents will be read asynchronously automatically.
+
## Handling errors
When the library is unable to connect to the API (for example, due to network connection problems or a timeout), a subclass of `browserbase.APIConnectionError` is raised.
@@ -121,7 +187,7 @@ except browserbase.APIStatusError as e:
print(e.response)
```
-Error codes are as followed:
+Error codes are as follows:
| Status Code | Error Type |
| ----------- | -------------------------- |
@@ -160,7 +226,7 @@ client.with_options(max_retries=5).sessions.create(
### Timeouts
By default requests time out after 1 minute. You can configure this with a `timeout` option,
-which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/#fine-tuning-the-configuration) object:
+which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/timeouts/#fine-tuning-the-configuration) object:
```python
from browserbase import Browserbase
@@ -192,12 +258,14 @@ Note that requests that time out are [retried twice by default](#retries).
We use the standard library [`logging`](https://docs.python.org/3/library/logging.html) module.
-You can enable logging by setting the environment variable `BROWSERBASE_LOG` to `debug`.
+You can enable logging by setting the environment variable `BROWSERBASE_LOG` to `info`.
```shell
-$ export BROWSERBASE_LOG=debug
+$ export BROWSERBASE_LOG=info
```
+Or to `debug` for more verbose logging.
+
### How to tell whether `None` means `null` or missing
In an API response, a field may be explicitly `null`, or missing entirely; in either case, its value is `None` in this library. You can differentiate the two cases with `.model_fields_set`:
@@ -258,8 +326,7 @@ If you need to access undocumented endpoints, params, or response properties, th
#### Undocumented endpoints
To make requests to undocumented endpoints, you can make requests using `client.get`, `client.post`, and other
-http verbs. Options on the client will be respected (such as retries) will be respected when making this
-request.
+http verbs. Options on the client will be respected (such as retries) when making this request.
```py
import httpx
@@ -288,18 +355,19 @@ can also get all the extra fields on the Pydantic model as a dict with
You can directly override the [httpx client](https://www.python-httpx.org/api/#client) to customize it for your use case, including:
-- Support for proxies
-- Custom transports
+- Support for [proxies](https://www.python-httpx.org/advanced/proxies/)
+- Custom [transports](https://www.python-httpx.org/advanced/transports/)
- Additional [advanced](https://www.python-httpx.org/advanced/clients/) functionality
```python
+import httpx
from browserbase import Browserbase, DefaultHttpxClient
client = Browserbase(
# Or use the `BROWSERBASE_BASE_URL` env var
base_url="http://my.test.server.example.com:8083",
http_client=DefaultHttpxClient(
- proxies="http://my.test.proxy.example.com",
+ proxy="http://my.test.proxy.example.com",
transport=httpx.HTTPTransport(local_address="0.0.0.0"),
),
)
@@ -315,12 +383,22 @@ client.with_options(http_client=DefaultHttpxClient(...))
By default the library closes underlying HTTP connections whenever the client is [garbage collected](https://docs.python.org/3/reference/datamodel.html#object.__del__). You can manually close the client using the `.close()` method if desired, or with a context manager that closes when exiting.
+```py
+from browserbase import Browserbase
+
+with Browserbase() as client:
+ # make requests here
+ ...
+
+# HTTP client is now closed
+```
+
## Versioning
This package generally follows [SemVer](https://semver.org/spec/v2.0.0.html) conventions, though certain backwards-incompatible changes may be released as minor versions:
1. Changes that only affect static types, without breaking runtime behavior.
-2. Changes to library internals which are technically public but not intended or documented for external use. _(Please open a GitHub issue to let us know if you are relying on such internals)_.
+2. Changes to library internals which are technically public but not intended or documented for external use. _(Please open a GitHub issue to let us know if you are relying on such internals.)_
3. Changes that we do not expect to impact the vast majority of users in practice.
We take backwards-compatibility seriously and work hard to ensure you can rely on a smooth upgrade experience.
@@ -340,7 +418,7 @@ print(browserbase.__version__)
## Requirements
-Python 3.7 or higher.
+Python 3.8 or higher.
## Contributing
diff --git a/SECURITY.md b/SECURITY.md
index 4fdede87..ad64e4b9 100644
--- a/SECURITY.md
+++ b/SECURITY.md
@@ -2,9 +2,9 @@
## Reporting Security Issues
-This SDK is generated by [Stainless Software Inc](http://stainlessapi.com). Stainless takes security seriously, and encourages you to report any security vulnerability promptly so that appropriate action can be taken.
+This SDK is generated by [Stainless Software Inc](http://stainless.com). Stainless takes security seriously, and encourages you to report any security vulnerability promptly so that appropriate action can be taken.
-To report a security issue, please contact the Stainless team at security@stainlessapi.com.
+To report a security issue, please contact the Stainless team at security@stainless.com.
## Responsible Disclosure
@@ -16,11 +16,11 @@ before making any information public.
## Reporting Non-SDK Related Security Issues
If you encounter security issues that are not directly related to SDKs but pertain to the services
-or products provided by Browserbase please follow the respective company's security reporting guidelines.
+or products provided by Browserbase, please follow the respective company's security reporting guidelines.
### Browserbase Terms and Policies
-Please contact support@browserbase.com for any questions or concerns regarding security of our services.
+Please contact support@browserbase.com for any questions or concerns regarding the security of our services.
---
diff --git a/api.md b/api.md
index 3f21eb29..01454851 100644
--- a/api.md
+++ b/api.md
@@ -3,13 +3,13 @@
Types:
```python
-from browserbase.types import Context, ContextCreateResponse, ContextUpdateResponse
+from browserbase.types import ContextCreateResponse, ContextRetrieveResponse, ContextUpdateResponse
```
Methods:
- client.contexts.create(\*\*params) -> ContextCreateResponse
-- client.contexts.retrieve(id) -> Context
+- client.contexts.retrieve(id) -> ContextRetrieveResponse
- client.contexts.update(id) -> ContextUpdateResponse
# Extensions
@@ -17,13 +17,13 @@ Methods:
Types:
```python
-from browserbase.types import Extension
+from browserbase.types import ExtensionCreateResponse, ExtensionRetrieveResponse
```
Methods:
-- client.extensions.create(\*\*params) -> Extension
-- client.extensions.retrieve(id) -> Extension
+- client.extensions.create(\*\*params) -> ExtensionCreateResponse
+- client.extensions.retrieve(id) -> ExtensionRetrieveResponse
- client.extensions.delete(id) -> None
# Projects
@@ -31,30 +31,36 @@ Methods:
Types:
```python
-from browserbase.types import Project, ProjectUsage, ProjectListResponse
+from browserbase.types import ProjectRetrieveResponse, ProjectListResponse, ProjectUsageResponse
```
Methods:
-- client.projects.retrieve(id) -> Project
+- client.projects.retrieve(id) -> ProjectRetrieveResponse
- client.projects.list() -> ProjectListResponse
-- client.projects.usage(id) -> ProjectUsage
+- client.projects.usage(id) -> ProjectUsageResponse
# Sessions
Types:
```python
-from browserbase.types import Session, SessionLiveURLs, SessionCreateResponse, SessionListResponse
+from browserbase.types import (
+ SessionCreateResponse,
+ SessionRetrieveResponse,
+ SessionUpdateResponse,
+ SessionListResponse,
+ SessionDebugResponse,
+)
```
Methods:
- client.sessions.create(\*\*params) -> SessionCreateResponse
-- client.sessions.retrieve(id) -> Session
-- client.sessions.update(id, \*\*params) -> Session
+- client.sessions.retrieve(id) -> SessionRetrieveResponse
+- client.sessions.update(id, \*\*params) -> SessionUpdateResponse
- client.sessions.list(\*\*params) -> SessionListResponse
-- client.sessions.debug(id) -> SessionLiveURLs
+- client.sessions.debug(id) -> SessionDebugResponse
## Downloads
@@ -67,7 +73,7 @@ Methods:
Types:
```python
-from browserbase.types.sessions import SessionLog, LogListResponse
+from browserbase.types.sessions import LogListResponse
```
Methods:
@@ -79,7 +85,7 @@ Methods:
Types:
```python
-from browserbase.types.sessions import SessionRecording, RecordingRetrieveResponse
+from browserbase.types.sessions import RecordingRetrieveResponse
```
Methods:
diff --git a/bin/check-release-environment b/bin/check-release-environment
index 6ad04d35..b845b0f4 100644
--- a/bin/check-release-environment
+++ b/bin/check-release-environment
@@ -3,7 +3,7 @@
errors=()
if [ -z "${PYPI_TOKEN}" ]; then
- errors+=("The BROWSERBASE_PYPI_TOKEN secret has not been set. Please set it in either this repository's secrets or your organization secrets.")
+ errors+=("The PYPI_TOKEN secret has not been set. Please set it in either this repository's secrets or your organization secrets.")
fi
lenErrors=${#errors[@]}
diff --git a/bin/publish-pypi b/bin/publish-pypi
index 05bfccbb..826054e9 100644
--- a/bin/publish-pypi
+++ b/bin/publish-pypi
@@ -3,7 +3,4 @@
set -eux
mkdir -p dist
rye build --clean
-# Patching importlib-metadata version until upstream library version is updated
-# https://github.com/pypa/twine/issues/977#issuecomment-2189800841
-"$HOME/.rye/self/bin/python3" -m pip install 'importlib-metadata==7.2.1'
rye publish --yes --token=$PYPI_TOKEN
diff --git a/examples/e2e/test_playwright.py b/examples/e2e/test_playwright.py
index 2e58c70a..afd94f13 100644
--- a/examples/e2e/test_playwright.py
+++ b/examples/e2e/test_playwright.py
@@ -29,6 +29,7 @@ def playwright() -> Generator[Playwright, None, None]:
with sync_playwright() as p:
yield p
+
def test_playwright_basic(playwright: Playwright) -> None:
playwright_basic.run(playwright)
diff --git a/examples/playwright_basic.py b/examples/playwright_basic.py
index 06fc93ca..33bf88a9 100644
--- a/examples/playwright_basic.py
+++ b/examples/playwright_basic.py
@@ -19,9 +19,7 @@ def run(playwright: Playwright) -> None:
# Execute Playwright actions on the remote browser tab
page.goto("https://news.ycombinator.com/")
page_title = page.title()
- assert (
- page_title == "Hacker News"
- ), f"Page title is not 'Hacker News', it is '{page_title}'"
+ assert page_title == "Hacker News", f"Page title is not 'Hacker News', it is '{page_title}'"
page.screenshot(path="screenshot.png")
page.close()
diff --git a/examples/playwright_captcha.py b/examples/playwright_captcha.py
index 7bc2ff42..980504f3 100644
--- a/examples/playwright_captcha.py
+++ b/examples/playwright_captcha.py
@@ -34,9 +34,7 @@ def handle_console(msg: ConsoleMessage) -> None:
page.on("console", handle_console)
page.goto(DEFAULT_CAPTCHA_URL, wait_until="networkidle")
- page.wait_for_function(
- "() => window.captchaSolvingFinished === true", timeout=OVERRIDE_TIMEOUT
- )
+ page.wait_for_function("() => window.captchaSolvingFinished === true", timeout=OVERRIDE_TIMEOUT)
assert captcha_solving_started, "Captcha solving did not start"
assert captcha_solving_finished, "Captcha solving did not finish"
diff --git a/examples/playwright_contexts.py b/examples/playwright_contexts.py
index 610636ff..acf46338 100644
--- a/examples/playwright_contexts.py
+++ b/examples/playwright_contexts.py
@@ -41,15 +41,11 @@ def run(playwright: Playwright) -> None:
# Step 2: Creates a session with the context
session = bb.sessions.create(
project_id=BROWSERBASE_PROJECT_ID,
- browser_settings=TypeAdapter(BrowserSettings).validate_python(
- {"context": {"id": context_id, "persist": True}}
- ),
+ browser_settings=TypeAdapter(BrowserSettings).validate_python({"context": {"id": context_id, "persist": True}}),
)
print(session)
- assert (
- session.context_id == context_id
- ), f"Session context_id is {session.context_id}, expected {context_id}"
+ assert session.context_id == context_id, f"Session context_id is {session.context_id}, expected {context_id}"
session_id = session.id
# Step 3: Populates and persists the context
@@ -90,13 +86,9 @@ def run(playwright: Playwright) -> None:
# Step 4: Creates another session with the same context
session = bb.sessions.create(
project_id=BROWSERBASE_PROJECT_ID,
- browser_settings=BrowserSettings(
- context=BrowserSettingsContext(id=context_id, persist=True)
- ),
+ browser_settings=BrowserSettings(context=BrowserSettingsContext(id=context_id, persist=True)),
)
- assert (
- session.context_id == context_id
- ), f"Session context_id is {session.context_id}, expected {context_id}"
+ assert session.context_id == context_id, f"Session context_id is {session.context_id}, expected {context_id}"
session_id = session.id
# Step 5: Uses context to find previous state
diff --git a/examples/playwright_extensions.py b/examples/playwright_extensions.py
index 7bdb9426..f2c2f7f9 100644
--- a/examples/playwright_extensions.py
+++ b/examples/playwright_extensions.py
@@ -10,11 +10,9 @@
BROWSERBASE_PROJECT_ID,
bb,
)
-from browserbase.types import Extension, SessionCreateResponse
+from browserbase.types import SessionCreateResponse, ExtensionRetrieveResponse
-PATH_TO_EXTENSION = (
- Path.cwd() / "examples" / "packages" / "extensions" / "browserbase-test"
-)
+PATH_TO_EXTENSION = Path.cwd() / "examples" / "packages" / "extensions" / "browserbase-test"
def zip_extension(path: Path = PATH_TO_EXTENSION, save_local: bool = False) -> BytesIO:
@@ -23,9 +21,7 @@ def zip_extension(path: Path = PATH_TO_EXTENSION, save_local: bool = False) -> B
Mark save_local=True to save the zip file to a local file.
"""
# Ensure we're looking at an extension
- assert "manifest.json" in os.listdir(
- path
- ), "No manifest.json found in the extension folder."
+ assert "manifest.json" in os.listdir(path), "No manifest.json found in the extension folder."
# Create a BytesIO object to hold the zip file in memory
memory_zip = BytesIO()
@@ -51,13 +47,11 @@ def zip_extension(path: Path = PATH_TO_EXTENSION, save_local: bool = False) -> B
def create_extension() -> str:
zip_data = zip_extension(save_local=True)
- extension: Extension = bb.extensions.create(
- file=("extension.zip", zip_data.getvalue())
- )
+ extension = bb.extensions.create(file=("extension.zip", zip_data.getvalue()))
return extension.id
-def get_extension(id: str) -> Extension:
+def get_extension(id: str) -> ExtensionRetrieveResponse:
return bb.extensions.retrieve(id)
@@ -75,9 +69,7 @@ def check_for_message(page: Page, message: str) -> None:
while time.time() - start < 10:
if message in console_messages:
break
- assert (
- message in console_messages
- ), f"Expected message not found in console logs. Messages: {console_messages}"
+ assert message in console_messages, f"Expected message not found in console logs. Messages: {console_messages}"
def run(playwright: Playwright) -> None:
@@ -141,9 +133,7 @@ def run(playwright: Playwright) -> None:
project_id=BROWSERBASE_PROJECT_ID,
extension_id=extension_id,
)
- raise AssertionError(
- "Expected to fail when creating session with deleted extension"
- )
+ raise AssertionError("Expected to fail when creating session with deleted extension")
except Exception as e:
print(f"Failed to create session with deleted extension as expected: {str(e)}")
diff --git a/examples/playwright_proxy.py b/examples/playwright_proxy.py
index 8378e290..47c706b2 100644
--- a/examples/playwright_proxy.py
+++ b/examples/playwright_proxy.py
@@ -11,9 +11,7 @@
def check_proxy_bytes(session_id: str) -> None:
- bb.sessions.update(
- id=session_id, project_id=BROWSERBASE_PROJECT_ID, status="REQUEST_RELEASE"
- )
+ bb.sessions.update(id=session_id, project_id=BROWSERBASE_PROJECT_ID, status="REQUEST_RELEASE")
time.sleep(GRACEFUL_SHUTDOWN_TIMEOUT / 1000)
updated_session = bb.sessions.retrieve(id=session_id)
assert (
diff --git a/examples/playwright_upload.py b/examples/playwright_upload.py
index da1e32c0..6bba6e0c 100644
--- a/examples/playwright_upload.py
+++ b/examples/playwright_upload.py
@@ -33,12 +33,8 @@ def run(playwright: Playwright) -> None:
file_size = int(file_size_span.inner_text())
# Assert the file name and size
- assert (
- file_name == "logo.png"
- ), f"Expected file name to be 'logo.png', but got '{file_name}'"
- assert (
- file_size > 0
- ), f"Expected file size to be greater than 0, but got {file_size}"
+ assert file_name == "logo.png", f"Expected file name to be 'logo.png', but got '{file_name}'"
+ assert file_size > 0, f"Expected file size to be greater than 0, but got {file_size}"
print("File upload test passed successfully!")
diff --git a/examples/selenium_basic.py b/examples/selenium_basic.py
index 83b73078..91c7c32c 100644
--- a/examples/selenium_basic.py
+++ b/examples/selenium_basic.py
@@ -34,7 +34,8 @@ def run() -> None:
session = bb.sessions.create(project_id=BROWSERBASE_PROJECT_ID)
connection = BrowserbaseConnection(session.id, session.selenium_remote_url)
driver = webdriver.Remote(
- command_executor=connection, options=webdriver.ChromeOptions() # type: ignore
+ command_executor=connection,
+ options=webdriver.ChromeOptions(), # type: ignore
)
# Print a bit of info about the browser we've connected to
diff --git a/mypy.ini b/mypy.ini
deleted file mode 100644
index 94f60e5e..00000000
--- a/mypy.ini
+++ /dev/null
@@ -1,47 +0,0 @@
-[mypy]
-pretty = True
-show_error_codes = True
-
-# Exclude _files.py because mypy isn't smart enough to apply
-# the correct type narrowing and as this is an internal module
-# it's fine to just use Pyright.
-exclude = ^(src/browserbase/_files\.py|_dev/.*\.py)$
-
-strict_equality = True
-implicit_reexport = True
-check_untyped_defs = True
-no_implicit_optional = True
-
-warn_return_any = True
-warn_unreachable = True
-warn_unused_configs = True
-
-# Turn these options off as it could cause conflicts
-# with the Pyright options.
-warn_unused_ignores = False
-warn_redundant_casts = False
-
-disallow_any_generics = True
-disallow_untyped_defs = True
-disallow_untyped_calls = True
-disallow_subclassing_any = True
-disallow_incomplete_defs = True
-disallow_untyped_decorators = True
-cache_fine_grained = True
-
-# By default, mypy reports an error if you assign a value to the result
-# of a function call that doesn't return anything. We do this in our test
-# cases:
-# ```
-# result = ...
-# assert result is None
-# ```
-# Changing this codegen to make mypy happy would increase complexity
-# and would not be worth it.
-disable_error_code = func-returns-value
-
-# https://github.com/python/mypy/issues/12162
-[mypy.overrides]
-module = "black.files.*"
-ignore_errors = true
-ignore_missing_imports = true
diff --git a/pyproject.toml b/pyproject.toml
index 9cdf351a..217c1da2 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "browserbase"
-version = "1.0.0"
+version = "1.5.0-alpha.1"
description = "The official Python library for the Browserbase API"
dynamic = ["readme"]
license = "Apache-2.0"
@@ -10,22 +10,21 @@ authors = [
dependencies = [
"httpx>=0.23.0, <1",
"pydantic>=1.9.0, <3",
- "typing-extensions>=4.7, <5",
+ "typing-extensions>=4.10, <5",
"anyio>=3.5.0, <5",
"distro>=1.7.0, <2",
"sniffio",
- "cached-property; python_version < '3.8'",
]
-requires-python = ">= 3.7"
+requires-python = ">= 3.8"
classifiers = [
"Typing :: Typed",
"Intended Audience :: Developers",
- "Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
"Operating System :: OS Independent",
"Operating System :: POSIX",
"Operating System :: MacOS",
@@ -39,13 +38,14 @@ classifiers = [
Homepage = "https://github.com/browserbase/sdk-python"
Repository = "https://github.com/browserbase/sdk-python"
-
+[project.optional-dependencies]
+aiohttp = ["aiohttp", "httpx_aiohttp>=0.1.8"]
[tool.rye]
managed = true
# version pins are in requirements-dev.lock
dev-dependencies = [
- "pyright>=1.1.359",
+ "pyright==1.1.399",
"mypy",
"respx",
"pytest",
@@ -57,6 +57,7 @@ dev-dependencies = [
"dirty-equals>=0.6.0",
"importlib-metadata>=6.7.0",
"rich>=13.7.1",
+ "pytest-xdist>=3.6.1",
"python-dotenv",
"playwright",
"selenium",
@@ -97,7 +98,7 @@ typecheck = { chain = [
"typecheck:mypy" = "mypy ."
[build-system]
-requires = ["hatchling", "hatch-fancy-pypi-readme"]
+requires = ["hatchling==1.26.3", "hatch-fancy-pypi-readme"]
build-backend = "hatchling.build"
[tool.hatch.build]
@@ -136,9 +137,10 @@ replacement = '[\1](https://github.com/browserbase/sdk-python/tree/main/\g<2>)'
[tool.pytest.ini_options]
testpaths = ["tests"]
-addopts = "--tb=short"
+addopts = "--tb=short -n auto"
xfail_strict = true
asyncio_mode = "auto"
+asyncio_default_fixture_loop_scope = "session"
filterwarnings = [
"error"
]
@@ -148,24 +150,77 @@ filterwarnings = [
# there are a couple of flags that are still disabled by
# default in strict mode as they are experimental and niche.
typeCheckingMode = "strict"
-pythonVersion = "3.7"
+pythonVersion = "3.8"
exclude = [
"_dev",
".venv",
".nox",
+ ".git",
]
reportImplicitOverride = true
+reportOverlappingOverload = false
reportImportCycles = false
reportPrivateUsage = false
+[tool.mypy]
+pretty = true
+show_error_codes = true
+
+# Exclude _files.py because mypy isn't smart enough to apply
+# the correct type narrowing and as this is an internal module
+# it's fine to just use Pyright.
+#
+# We also exclude our `tests` as mypy doesn't always infer
+# types correctly and Pyright will still catch any type errors.
+exclude = ['src/browserbase/_files.py', '_dev/.*.py', 'tests/.*']
+
+strict_equality = true
+implicit_reexport = true
+check_untyped_defs = true
+no_implicit_optional = true
+
+warn_return_any = true
+warn_unreachable = true
+warn_unused_configs = true
+
+# Turn these options off as it could cause conflicts
+# with the Pyright options.
+warn_unused_ignores = false
+warn_redundant_casts = false
+
+disallow_any_generics = true
+disallow_untyped_defs = true
+disallow_untyped_calls = true
+disallow_subclassing_any = true
+disallow_incomplete_defs = true
+disallow_untyped_decorators = true
+cache_fine_grained = true
+
+# By default, mypy reports an error if you assign a value to the result
+# of a function call that doesn't return anything. We do this in our test
+# cases:
+# ```
+# result = ...
+# assert result is None
+# ```
+# Changing this codegen to make mypy happy would increase complexity
+# and would not be worth it.
+disable_error_code = "func-returns-value,overload-cannot-match"
+
+# https://github.com/python/mypy/issues/12162
+[[tool.mypy.overrides]]
+module = "black.files.*"
+ignore_errors = true
+ignore_missing_imports = true
+
[tool.ruff]
line-length = 120
output-format = "grouped"
-target-version = "py37"
+target-version = "py38"
[tool.ruff.format]
docstring-code-format = true
@@ -186,7 +241,7 @@ select = [
"T201",
"T203",
# misuse of typing.TYPE_CHECKING
- "TCH004",
+ # "TC004", # fails lint
# import rules
"TID251",
]
diff --git a/requirements-dev.lock b/requirements-dev.lock
index 83ce0203..e6008248 100644
--- a/requirements-dev.lock
+++ b/requirements-dev.lock
@@ -4,12 +4,19 @@
# last locked with the following flags:
# pre: false
# features: []
-# all-features: false
+# all-features: true
# with-sources: false
# generate-hashes: false
# universal: false
-e file:.
+aiohappyeyeballs==2.6.1
+ # via aiohttp
+aiohttp==3.12.15
+ # via browserbase
+ # via httpx-aiohttp
+aiosignal==1.4.0
+ # via aiohttp
annotated-types==0.7.0
# via pydantic
anyio==4.6.2.post1
@@ -17,7 +24,10 @@ anyio==4.6.2.post1
# via httpx
argcomplete==3.5.1
# via nox
+async-timeout==5.0.1
+ # via aiohttp
attrs==24.2.0
+ # via aiohttp
# via outcome
# via trio
certifi==2024.8.30
@@ -39,8 +49,13 @@ exceptiongroup==1.2.2
# via pytest
# via trio
# via trio-websocket
+execnet==2.1.1
+ # via pytest-xdist
filelock==3.16.1
# via virtualenv
+frozenlist==1.7.0
+ # via aiohttp
+ # via aiosignal
greenlet==3.1.1
# via playwright
h11==0.14.0
@@ -48,14 +63,18 @@ h11==0.14.0
# via wsproto
httpcore==1.0.6
# via httpx
-httpx==0.27.2
+httpx==0.28.1
# via browserbase
+ # via httpx-aiohttp
# via respx
+httpx-aiohttp==0.1.8
+ # via browserbase
idna==3.10
# via anyio
# via httpx
# via requests
# via trio
+ # via yarl
importlib-metadata==8.5.0
iniconfig==2.0.0
# via pytest
@@ -63,7 +82,10 @@ markdown-it-py==3.0.0
# via rich
mdurl==0.1.2
# via markdown-it-py
-mypy==1.13.0
+multidict==6.6.4
+ # via aiohttp
+ # via yarl
+mypy==1.14.1
mypy-extensions==1.0.0
# via mypy
nodeenv==1.9.1
@@ -74,31 +96,36 @@ outcome==1.3.0.post0
packaging==24.1
# via nox
# via pytest
-platformdirs==4.3.6
+platformdirs==3.11.0
# via virtualenv
playwright==1.48.0
# via pytest-playwright
pluggy==1.5.0
# via pytest
-pydantic==2.9.2
+propcache==0.3.2
+ # via aiohttp
+ # via yarl
+pydantic==2.11.9
# via browserbase
-pydantic-core==2.23.4
+pydantic-core==2.33.2
# via pydantic
pyee==12.0.0
# via playwright
pygments==2.18.0
# via rich
-pyright==1.1.386
+pyright==1.1.399
pysocks==1.7.1
# via urllib3
pytest==8.3.3
# via pytest-asyncio
# via pytest-base-url
# via pytest-playwright
+ # via pytest-xdist
pytest-asyncio==0.24.0
pytest-base-url==2.1.0
# via pytest-playwright
pytest-playwright==0.5.2
+pytest-xdist==3.8.0
python-dateutil==2.9.0.post0
# via time-machine
python-dotenv==1.0.1
@@ -106,7 +133,7 @@ python-slugify==8.0.4
# via pytest-playwright
requests==2.32.3
# via pytest-base-url
-respx==0.21.1
+respx==0.22.0
rich==13.9.3
ruff==0.7.1
selenium==4.25.0
@@ -115,7 +142,6 @@ six==1.16.0
sniffio==1.3.1
# via anyio
# via browserbase
- # via httpx
# via trio
sortedcontainers==2.4.0
# via trio
@@ -132,23 +158,30 @@ trio==0.27.0
trio-websocket==0.11.1
# via selenium
typing-extensions==4.12.2
+ # via aiosignal
# via anyio
# via browserbase
+ # via multidict
# via mypy
# via pydantic
# via pydantic-core
# via pyee
# via pyright
+ # via typing-inspection
+typing-inspection==0.4.1
+ # via pydantic
# via rich
# via selenium
urllib3==2.2.3
# via requests
# via selenium
-virtualenv==20.27.1
+virtualenv==20.24.5
# via nox
websocket-client==1.8.0
# via selenium
wsproto==1.2.0
# via trio-websocket
+yarl==1.20.1
+ # via aiohttp
zipp==3.20.2
# via importlib-metadata
diff --git a/requirements.lock b/requirements.lock
index 4bff3dd0..2495a260 100644
--- a/requirements.lock
+++ b/requirements.lock
@@ -10,11 +10,22 @@
# universal: false
-e file:.
+aiohappyeyeballs==2.6.1
+ # via aiohttp
+aiohttp==3.12.8
+ # via browserbase
+ # via httpx-aiohttp
+aiosignal==1.3.2
+ # via aiohttp
annotated-types==0.6.0
# via pydantic
anyio==4.4.0
# via browserbase
# via httpx
+async-timeout==5.0.1
+ # via aiohttp
+attrs==25.3.0
+ # via aiohttp
certifi==2023.7.22
# via httpcore
# via httpx
@@ -22,25 +33,43 @@ distro==1.8.0
# via browserbase
exceptiongroup==1.2.2
# via anyio
-h11==0.14.0
+frozenlist==1.6.2
+ # via aiohttp
+ # via aiosignal
+h11==0.16.0
# via httpcore
-httpcore==1.0.2
+httpcore==1.0.9
# via httpx
-httpx==0.25.2
+httpx==0.28.1
+ # via browserbase
+ # via httpx-aiohttp
+httpx-aiohttp==0.1.8
# via browserbase
idna==3.4
# via anyio
# via httpx
-pydantic==2.9.2
+ # via yarl
+multidict==6.4.4
+ # via aiohttp
+ # via yarl
+propcache==0.3.1
+ # via aiohttp
+ # via yarl
+pydantic==2.11.9
# via browserbase
-pydantic-core==2.23.4
+pydantic-core==2.33.2
# via pydantic
sniffio==1.3.0
# via anyio
# via browserbase
- # via httpx
typing-extensions==4.12.2
# via anyio
# via browserbase
+ # via multidict
# via pydantic
# via pydantic-core
+ # via typing-inspection
+typing-inspection==0.4.1
+ # via pydantic
+yarl==1.20.0
+ # via aiohttp
diff --git a/scripts/bootstrap b/scripts/bootstrap
index 8c5c60eb..b430fee3 100755
--- a/scripts/bootstrap
+++ b/scripts/bootstrap
@@ -4,10 +4,18 @@ set -e
cd "$(dirname "$0")/.."
-if [ -f "Brewfile" ] && [ "$(uname -s)" = "Darwin" ]; then
+if [ -f "Brewfile" ] && [ "$(uname -s)" = "Darwin" ] && [ "$SKIP_BREW" != "1" ] && [ -t 0 ]; then
brew bundle check >/dev/null 2>&1 || {
- echo "==> Installing Homebrew dependencies…"
- brew bundle
+ echo -n "==> Install Homebrew dependencies? (y/N): "
+ read -r response
+ case "$response" in
+ [yY][eE][sS]|[yY])
+ brew bundle
+ ;;
+ *)
+ ;;
+ esac
+ echo
}
fi
diff --git a/scripts/lint b/scripts/lint
index a74a1988..feccbdde 100755
--- a/scripts/lint
+++ b/scripts/lint
@@ -9,4 +9,3 @@ rye run lint
echo "==> Making sure it imports"
rye run python -c 'import browserbase'
-
diff --git a/scripts/mock b/scripts/mock
index d2814ae6..0b28f6ea 100755
--- a/scripts/mock
+++ b/scripts/mock
@@ -21,7 +21,7 @@ echo "==> Starting mock server with URL ${URL}"
# Run prism mock on the given spec
if [ "$1" == "--daemon" ]; then
- npm exec --package=@stainless-api/prism-cli@5.8.5 -- prism mock "$URL" &> .prism.log &
+ npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock "$URL" &> .prism.log &
# Wait for server to come online
echo -n "Waiting for server"
@@ -37,5 +37,5 @@ if [ "$1" == "--daemon" ]; then
echo
else
- npm exec --package=@stainless-api/prism-cli@5.8.5 -- prism mock "$URL"
+ npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock "$URL"
fi
diff --git a/scripts/test b/scripts/test
index 4fa5698b..dbeda2d2 100755
--- a/scripts/test
+++ b/scripts/test
@@ -43,7 +43,7 @@ elif ! prism_is_running ; then
echo -e "To run the server, pass in the path or url of your OpenAPI"
echo -e "spec to the prism command:"
echo
- echo -e " \$ ${YELLOW}npm exec --package=@stoplight/prism-cli@~5.3.2 -- prism mock path/to/your.openapi.yml${NC}"
+ echo -e " \$ ${YELLOW}npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock path/to/your.openapi.yml${NC}"
echo
exit 1
@@ -52,6 +52,8 @@ else
echo
fi
+export DEFER_PYDANTIC_BUILD=false
+
echo "==> Running tests"
rye run pytest "$@"
diff --git a/scripts/utils/ruffen-docs.py b/scripts/utils/ruffen-docs.py
index 37b3d94f..0cf2bd2f 100644
--- a/scripts/utils/ruffen-docs.py
+++ b/scripts/utils/ruffen-docs.py
@@ -47,7 +47,7 @@ def _md_match(match: Match[str]) -> str:
with _collect_error(match):
code = format_code_block(code)
code = textwrap.indent(code, match["indent"])
- return f'{match["before"]}{code}{match["after"]}'
+ return f"{match['before']}{code}{match['after']}"
def _pycon_match(match: Match[str]) -> str:
code = ""
@@ -97,7 +97,7 @@ def finish_fragment() -> None:
def _md_pycon_match(match: Match[str]) -> str:
code = _pycon_match(match)
code = textwrap.indent(code, match["indent"])
- return f'{match["before"]}{code}{match["after"]}'
+ return f"{match['before']}{code}{match['after']}"
src = MD_RE.sub(_md_match, src)
src = MD_PYCON_RE.sub(_md_pycon_match, src)
diff --git a/scripts/utils/upload-artifact.sh b/scripts/utils/upload-artifact.sh
new file mode 100755
index 00000000..4fa57664
--- /dev/null
+++ b/scripts/utils/upload-artifact.sh
@@ -0,0 +1,27 @@
+#!/usr/bin/env bash
+set -exuo pipefail
+
+FILENAME=$(basename dist/*.whl)
+
+RESPONSE=$(curl -X POST "$URL?filename=$FILENAME" \
+ -H "Authorization: Bearer $AUTH" \
+ -H "Content-Type: application/json")
+
+SIGNED_URL=$(echo "$RESPONSE" | jq -r '.url')
+
+if [[ "$SIGNED_URL" == "null" ]]; then
+ echo -e "\033[31mFailed to get signed URL.\033[0m"
+ exit 1
+fi
+
+UPLOAD_RESPONSE=$(curl -v -X PUT \
+ -H "Content-Type: binary/octet-stream" \
+ --data-binary "@dist/$FILENAME" "$SIGNED_URL" 2>&1)
+
+if echo "$UPLOAD_RESPONSE" | grep -q "HTTP/[0-9.]* 200"; then
+ echo -e "\033[32mUploaded build to Stainless storage.\033[0m"
+ echo -e "\033[32mInstallation: pip install 'https://pkg.stainless.com/s/browserbase-python/$SHA/$FILENAME'\033[0m"
+else
+ echo -e "\033[31mFailed to upload artifact.\033[0m"
+ exit 1
+fi
diff --git a/src/browserbase/__init__.py b/src/browserbase/__init__.py
index 4b1d2804..a7356c1e 100644
--- a/src/browserbase/__init__.py
+++ b/src/browserbase/__init__.py
@@ -1,7 +1,9 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+import typing as _t
+
from . import types
-from ._types import NOT_GIVEN, NoneType, NotGiven, Transport, ProxiesTypes
+from ._types import NOT_GIVEN, Omit, NoneType, NotGiven, Transport, ProxiesTypes, omit, not_given
from ._utils import file_from_path
from ._client import (
Client,
@@ -34,7 +36,7 @@
UnprocessableEntityError,
APIResponseValidationError,
)
-from ._base_client import DefaultHttpxClient, DefaultAsyncHttpxClient
+from ._base_client import DefaultHttpxClient, DefaultAioHttpClient, DefaultAsyncHttpxClient
from ._utils._logs import setup_logging as _setup_logging
__all__ = [
@@ -46,6 +48,9 @@
"ProxiesTypes",
"NotGiven",
"NOT_GIVEN",
+ "not_given",
+ "Omit",
+ "omit",
"BrowserbaseError",
"APIError",
"APIStatusError",
@@ -75,8 +80,12 @@
"DEFAULT_CONNECTION_LIMITS",
"DefaultHttpxClient",
"DefaultAsyncHttpxClient",
+ "DefaultAioHttpClient",
]
+if not _t.TYPE_CHECKING:
+ from ._utils._resources_proxy import resources as resources
+
_setup_logging()
# Update the __module__ attribute for exported symbols so that
diff --git a/src/browserbase/_base_client.py b/src/browserbase/_base_client.py
index f17e8d2b..2485e4e6 100644
--- a/src/browserbase/_base_client.py
+++ b/src/browserbase/_base_client.py
@@ -9,7 +9,6 @@
import inspect
import logging
import platform
-import warnings
import email.utils
from types import TracebackType
from random import random
@@ -36,14 +35,13 @@
import httpx
import distro
import pydantic
-from httpx import URL, Limits
+from httpx import URL
from pydantic import PrivateAttr
from . import _exceptions
from ._qs import Querystring
from ._files import to_httpx_files, async_to_httpx_files
from ._types import (
- NOT_GIVEN,
Body,
Omit,
Query,
@@ -51,19 +49,17 @@
Timeout,
NotGiven,
ResponseT,
- Transport,
AnyMapping,
PostParser,
- ProxiesTypes,
RequestFiles,
HttpxSendArgs,
- AsyncTransport,
RequestOptions,
HttpxRequestFiles,
ModelBuilderProtocol,
+ not_given,
)
from ._utils import is_dict, is_list, asyncify, is_given, lru_cache, is_mapping
-from ._compat import model_copy, model_dump
+from ._compat import PYDANTIC_V1, model_copy, model_dump
from ._models import GenericModel, FinalRequestOptions, validate_type, construct_type
from ._response import (
APIResponse,
@@ -102,7 +98,11 @@
_AsyncStreamT = TypeVar("_AsyncStreamT", bound=AsyncStream[Any])
if TYPE_CHECKING:
- from httpx._config import DEFAULT_TIMEOUT_CONFIG as HTTPX_DEFAULT_TIMEOUT
+ from httpx._config import (
+ DEFAULT_TIMEOUT_CONFIG, # pyright: ignore[reportPrivateImportUsage]
+ )
+
+ HTTPX_DEFAULT_TIMEOUT = DEFAULT_TIMEOUT_CONFIG
else:
try:
from httpx._config import DEFAULT_TIMEOUT_CONFIG as HTTPX_DEFAULT_TIMEOUT
@@ -119,6 +119,7 @@ class PageInfo:
url: URL | NotGiven
params: Query | NotGiven
+ json: Body | NotGiven
@overload
def __init__(
@@ -134,19 +135,30 @@ def __init__(
params: Query,
) -> None: ...
+ @overload
def __init__(
self,
*,
- url: URL | NotGiven = NOT_GIVEN,
- params: Query | NotGiven = NOT_GIVEN,
+ json: Body,
+ ) -> None: ...
+
+ def __init__(
+ self,
+ *,
+ url: URL | NotGiven = not_given,
+ json: Body | NotGiven = not_given,
+ params: Query | NotGiven = not_given,
) -> None:
self.url = url
+ self.json = json
self.params = params
@override
def __repr__(self) -> str:
if self.url:
return f"{self.__class__.__name__}(url={self.url})"
+ if self.json:
+ return f"{self.__class__.__name__}(json={self.json})"
return f"{self.__class__.__name__}(params={self.params})"
@@ -195,6 +207,19 @@ def _info_to_options(self, info: PageInfo) -> FinalRequestOptions:
options.url = str(url)
return options
+ if not isinstance(info.json, NotGiven):
+ if not is_mapping(info.json):
+ raise TypeError("Pagination is only supported with mappings")
+
+ if not options.json_data:
+ options.json_data = {**info.json}
+ else:
+ if not is_mapping(options.json_data):
+ raise TypeError("Pagination is only supported with mappings")
+
+ options.json_data = {**options.json_data, **info.json}
+ return options
+
raise ValueError("Unexpected PageInfo state")
@@ -207,6 +232,9 @@ def _set_private_attributes(
model: Type[_T],
options: FinalRequestOptions,
) -> None:
+ if (not PYDANTIC_V1) and getattr(self, "__pydantic_private__", None) is None:
+ self.__pydantic_private__ = {}
+
self._model = model
self._client = client
self._options = options
@@ -292,6 +320,9 @@ def _set_private_attributes(
client: AsyncAPIClient,
options: FinalRequestOptions,
) -> None:
+ if (not PYDANTIC_V1) and getattr(self, "__pydantic_private__", None) is None:
+ self.__pydantic_private__ = {}
+
self._model = model
self._client = client
self._options = options
@@ -331,9 +362,6 @@ class BaseClient(Generic[_HttpxClientT, _DefaultStreamT]):
_base_url: URL
max_retries: int
timeout: Union[float, Timeout, None]
- _limits: httpx.Limits
- _proxies: ProxiesTypes | None
- _transport: Transport | AsyncTransport | None
_strict_response_validation: bool
_idempotency_header: str | None
_default_stream_cls: type[_DefaultStreamT] | None = None
@@ -346,9 +374,6 @@ def __init__(
_strict_response_validation: bool,
max_retries: int = DEFAULT_MAX_RETRIES,
timeout: float | Timeout | None = DEFAULT_TIMEOUT,
- limits: httpx.Limits,
- transport: Transport | AsyncTransport | None,
- proxies: ProxiesTypes | None,
custom_headers: Mapping[str, str] | None = None,
custom_query: Mapping[str, object] | None = None,
) -> None:
@@ -356,9 +381,6 @@ def __init__(
self._base_url = self._enforce_trailing_slash(URL(base_url))
self.max_retries = max_retries
self.timeout = timeout
- self._limits = limits
- self._proxies = proxies
- self._transport = transport
self._custom_headers = custom_headers or {}
self._custom_query = custom_query or {}
self._strict_response_validation = _strict_response_validation
@@ -415,13 +437,20 @@ def _build_headers(self, options: FinalRequestOptions, *, retries_taken: int = 0
headers = httpx.Headers(headers_dict)
idempotency_header = self._idempotency_header
- if idempotency_header and options.method.lower() != "get" and idempotency_header not in headers:
- headers[idempotency_header] = options.idempotency_key or self._idempotency_key()
+ if idempotency_header and options.idempotency_key and idempotency_header not in headers:
+ headers[idempotency_header] = options.idempotency_key
- # Don't set the retry count header if it was already set or removed by the caller. We check
+ # Don't set these headers if they were already set or removed by the caller. We check
# `custom_headers`, which can contain `Omit()`, instead of `headers` to account for the removal case.
- if "x-stainless-retry-count" not in (header.lower() for header in custom_headers):
+ lower_custom_headers = [header.lower() for header in custom_headers]
+ if "x-stainless-retry-count" not in lower_custom_headers:
headers["x-stainless-retry-count"] = str(retries_taken)
+ if "x-stainless-read-timeout" not in lower_custom_headers:
+ timeout = self.timeout if isinstance(options.timeout, NotGiven) else options.timeout
+ if isinstance(timeout, Timeout):
+ timeout = timeout.read
+ if timeout is not None:
+ headers["x-stainless-read-timeout"] = str(timeout)
return headers
@@ -500,6 +529,18 @@ def _build_request(
# work around https://github.com/encode/httpx/discussions/2880
kwargs["extensions"] = {"sni_hostname": prepared_url.host.replace("_", "-")}
+ is_body_allowed = options.method.lower() != "get"
+
+ if is_body_allowed:
+ if isinstance(json_data, bytes):
+ kwargs["content"] = json_data
+ else:
+ kwargs["json"] = json_data if is_given(json_data) else None
+ kwargs["files"] = files
+ else:
+ headers.pop("Content-Type", None)
+ kwargs.pop("data", None)
+
# TODO: report this error to httpx
return self._client.build_request( # pyright: ignore[reportUnknownMemberType]
headers=headers,
@@ -511,8 +552,6 @@ def _build_request(
# so that passing a `TypedDict` doesn't cause an error.
# https://github.com/microsoft/pyright/issues/3526#event-6715453066
params=self.qs.stringify(cast(Mapping[str, Any], params)) if params else None,
- json=json_data,
- files=files,
**kwargs,
)
@@ -556,7 +595,7 @@ def _maybe_override_cast_to(self, cast_to: type[ResponseT], options: FinalReques
# we internally support defining a temporary header to override the
# default `cast_to` type for use with `.with_raw_response` and `.with_streaming_response`
# see _response.py for implementation details
- override_cast_to = headers.pop(OVERRIDE_CAST_TO_HEADER, NOT_GIVEN)
+ override_cast_to = headers.pop(OVERRIDE_CAST_TO_HEADER, not_given)
if is_given(override_cast_to):
options.headers = headers
return cast(Type[ResponseT], override_cast_to)
@@ -767,6 +806,9 @@ def __init__(self, **kwargs: Any) -> None:
class SyncHttpxClientWrapper(DefaultHttpxClient):
def __del__(self) -> None:
+ if self.is_closed:
+ return
+
try:
self.close()
except Exception:
@@ -783,44 +825,12 @@ def __init__(
version: str,
base_url: str | URL,
max_retries: int = DEFAULT_MAX_RETRIES,
- timeout: float | Timeout | None | NotGiven = NOT_GIVEN,
- transport: Transport | None = None,
- proxies: ProxiesTypes | None = None,
- limits: Limits | None = None,
+ timeout: float | Timeout | None | NotGiven = not_given,
http_client: httpx.Client | None = None,
custom_headers: Mapping[str, str] | None = None,
custom_query: Mapping[str, object] | None = None,
_strict_response_validation: bool,
) -> None:
- if limits is not None:
- warnings.warn(
- "The `connection_pool_limits` argument is deprecated. The `http_client` argument should be passed instead",
- category=DeprecationWarning,
- stacklevel=3,
- )
- if http_client is not None:
- raise ValueError("The `http_client` argument is mutually exclusive with `connection_pool_limits`")
- else:
- limits = DEFAULT_CONNECTION_LIMITS
-
- if transport is not None:
- warnings.warn(
- "The `transport` argument is deprecated. The `http_client` argument should be passed instead",
- category=DeprecationWarning,
- stacklevel=3,
- )
- if http_client is not None:
- raise ValueError("The `http_client` argument is mutually exclusive with `transport`")
-
- if proxies is not None:
- warnings.warn(
- "The `proxies` argument is deprecated. The `http_client` argument should be passed instead",
- category=DeprecationWarning,
- stacklevel=3,
- )
- if http_client is not None:
- raise ValueError("The `http_client` argument is mutually exclusive with `proxies`")
-
if not is_given(timeout):
# if the user passed in a custom http client with a non-default
# timeout set then we use that timeout.
@@ -841,12 +851,9 @@ def __init__(
super().__init__(
version=version,
- limits=limits,
# cast to a valid type because mypy doesn't understand our type narrowing
timeout=cast(Timeout, timeout),
- proxies=proxies,
base_url=base_url,
- transport=transport,
max_retries=max_retries,
custom_query=custom_query,
custom_headers=custom_headers,
@@ -856,10 +863,6 @@ def __init__(
base_url=base_url,
# cast to a valid type because mypy doesn't understand our type narrowing
timeout=cast(Timeout, timeout),
- proxies=proxies,
- transport=transport,
- limits=limits,
- follow_redirects=True,
)
def is_closed(self) -> bool:
@@ -909,7 +912,6 @@ def request(
self,
cast_to: Type[ResponseT],
options: FinalRequestOptions,
- remaining_retries: Optional[int] = None,
*,
stream: Literal[True],
stream_cls: Type[_StreamT],
@@ -920,7 +922,6 @@ def request(
self,
cast_to: Type[ResponseT],
options: FinalRequestOptions,
- remaining_retries: Optional[int] = None,
*,
stream: Literal[False] = False,
) -> ResponseT: ...
@@ -930,7 +931,6 @@ def request(
self,
cast_to: Type[ResponseT],
options: FinalRequestOptions,
- remaining_retries: Optional[int] = None,
*,
stream: bool = False,
stream_cls: Type[_StreamT] | None = None,
@@ -940,121 +940,112 @@ def request(
self,
cast_to: Type[ResponseT],
options: FinalRequestOptions,
- remaining_retries: Optional[int] = None,
*,
stream: bool = False,
stream_cls: type[_StreamT] | None = None,
) -> ResponseT | _StreamT:
- if remaining_retries is not None:
- retries_taken = options.get_max_retries(self.max_retries) - remaining_retries
- else:
- retries_taken = 0
-
- return self._request(
- cast_to=cast_to,
- options=options,
- stream=stream,
- stream_cls=stream_cls,
- retries_taken=retries_taken,
- )
+ cast_to = self._maybe_override_cast_to(cast_to, options)
- def _request(
- self,
- *,
- cast_to: Type[ResponseT],
- options: FinalRequestOptions,
- retries_taken: int,
- stream: bool,
- stream_cls: type[_StreamT] | None,
- ) -> ResponseT | _StreamT:
# create a copy of the options we were given so that if the
# options are mutated later & we then retry, the retries are
# given the original options
input_options = model_copy(options)
+ if input_options.idempotency_key is None and input_options.method.lower() != "get":
+ # ensure the idempotency key is reused between requests
+ input_options.idempotency_key = self._idempotency_key()
- cast_to = self._maybe_override_cast_to(cast_to, options)
- options = self._prepare_options(options)
+ response: httpx.Response | None = None
+ max_retries = input_options.get_max_retries(self.max_retries)
- remaining_retries = options.get_max_retries(self.max_retries) - retries_taken
- request = self._build_request(options, retries_taken=retries_taken)
- self._prepare_request(request)
+ retries_taken = 0
+ for retries_taken in range(max_retries + 1):
+ options = model_copy(input_options)
+ options = self._prepare_options(options)
- kwargs: HttpxSendArgs = {}
- if self.custom_auth is not None:
- kwargs["auth"] = self.custom_auth
+ remaining_retries = max_retries - retries_taken
+ request = self._build_request(options, retries_taken=retries_taken)
+ self._prepare_request(request)
- log.debug("Sending HTTP Request: %s %s", request.method, request.url)
+ kwargs: HttpxSendArgs = {}
+ if self.custom_auth is not None:
+ kwargs["auth"] = self.custom_auth
- try:
- response = self._client.send(
- request,
- stream=stream or self._should_stream_response_body(request=request),
- **kwargs,
- )
- except httpx.TimeoutException as err:
- log.debug("Encountered httpx.TimeoutException", exc_info=True)
-
- if remaining_retries > 0:
- return self._retry_request(
- input_options,
- cast_to,
- retries_taken=retries_taken,
- stream=stream,
- stream_cls=stream_cls,
- response_headers=None,
- )
+ if options.follow_redirects is not None:
+ kwargs["follow_redirects"] = options.follow_redirects
- log.debug("Raising timeout error")
- raise APITimeoutError(request=request) from err
- except Exception as err:
- log.debug("Encountered Exception", exc_info=True)
+ log.debug("Sending HTTP Request: %s %s", request.method, request.url)
- if remaining_retries > 0:
- return self._retry_request(
- input_options,
- cast_to,
- retries_taken=retries_taken,
- stream=stream,
- stream_cls=stream_cls,
- response_headers=None,
+ response = None
+ try:
+ response = self._client.send(
+ request,
+ stream=stream or self._should_stream_response_body(request=request),
+ **kwargs,
)
+ except httpx.TimeoutException as err:
+ log.debug("Encountered httpx.TimeoutException", exc_info=True)
+
+ if remaining_retries > 0:
+ self._sleep_for_retry(
+ retries_taken=retries_taken,
+ max_retries=max_retries,
+ options=input_options,
+ response=None,
+ )
+ continue
+
+ log.debug("Raising timeout error")
+ raise APITimeoutError(request=request) from err
+ except Exception as err:
+ log.debug("Encountered Exception", exc_info=True)
+
+ if remaining_retries > 0:
+ self._sleep_for_retry(
+ retries_taken=retries_taken,
+ max_retries=max_retries,
+ options=input_options,
+ response=None,
+ )
+ continue
+
+ log.debug("Raising connection error")
+ raise APIConnectionError(request=request) from err
+
+ log.debug(
+ 'HTTP Response: %s %s "%i %s" %s',
+ request.method,
+ request.url,
+ response.status_code,
+ response.reason_phrase,
+ response.headers,
+ )
- log.debug("Raising connection error")
- raise APIConnectionError(request=request) from err
-
- log.debug(
- 'HTTP Response: %s %s "%i %s" %s',
- request.method,
- request.url,
- response.status_code,
- response.reason_phrase,
- response.headers,
- )
+ try:
+ response.raise_for_status()
+ except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code
+ log.debug("Encountered httpx.HTTPStatusError", exc_info=True)
+
+ if remaining_retries > 0 and self._should_retry(err.response):
+ err.response.close()
+ self._sleep_for_retry(
+ retries_taken=retries_taken,
+ max_retries=max_retries,
+ options=input_options,
+ response=response,
+ )
+ continue
- try:
- response.raise_for_status()
- except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code
- log.debug("Encountered httpx.HTTPStatusError", exc_info=True)
-
- if remaining_retries > 0 and self._should_retry(err.response):
- err.response.close()
- return self._retry_request(
- input_options,
- cast_to,
- retries_taken=retries_taken,
- response_headers=err.response.headers,
- stream=stream,
- stream_cls=stream_cls,
- )
+ # If the response is streamed then we need to explicitly read the response
+ # to completion before attempting to access the response text.
+ if not err.response.is_closed:
+ err.response.read()
- # If the response is streamed then we need to explicitly read the response
- # to completion before attempting to access the response text.
- if not err.response.is_closed:
- err.response.read()
+ log.debug("Re-raising status error")
+ raise self._make_status_error_from_response(err.response) from None
- log.debug("Re-raising status error")
- raise self._make_status_error_from_response(err.response) from None
+ break
+ assert response is not None, "could not resolve response (should never happen)"
return self._process_response(
cast_to=cast_to,
options=options,
@@ -1064,37 +1055,20 @@ def _request(
retries_taken=retries_taken,
)
- def _retry_request(
- self,
- options: FinalRequestOptions,
- cast_to: Type[ResponseT],
- *,
- retries_taken: int,
- response_headers: httpx.Headers | None,
- stream: bool,
- stream_cls: type[_StreamT] | None,
- ) -> ResponseT | _StreamT:
- remaining_retries = options.get_max_retries(self.max_retries) - retries_taken
+ def _sleep_for_retry(
+ self, *, retries_taken: int, max_retries: int, options: FinalRequestOptions, response: httpx.Response | None
+ ) -> None:
+ remaining_retries = max_retries - retries_taken
if remaining_retries == 1:
log.debug("1 retry left")
else:
log.debug("%i retries left", remaining_retries)
- timeout = self._calculate_retry_timeout(remaining_retries, options, response_headers)
+ timeout = self._calculate_retry_timeout(remaining_retries, options, response.headers if response else None)
log.info("Retrying request to %s in %f seconds", options.url, timeout)
- # In a synchronous context we are blocking the entire thread. Up to the library user to run the client in a
- # different thread if necessary.
time.sleep(timeout)
- return self._request(
- options=options,
- cast_to=cast_to,
- retries_taken=retries_taken + 1,
- stream=stream,
- stream_cls=stream_cls,
- )
-
def _process_response(
self,
*,
@@ -1107,7 +1081,14 @@ def _process_response(
) -> ResponseT:
origin = get_origin(cast_to) or cast_to
- if inspect.isclass(origin) and issubclass(origin, BaseAPIResponse):
+ if (
+ inspect.isclass(origin)
+ and issubclass(origin, BaseAPIResponse)
+ # we only want to actually return the custom BaseAPIResponse class if we're
+ # returning the raw response, or if we're not streaming SSE, as if we're streaming
+ # SSE then `cast_to` doesn't actively reflect the type we need to parse into
+ and (not stream or bool(response.request.headers.get(RAW_RESPONSE_HEADER)))
+ ):
if not issubclass(origin, APIResponse):
raise TypeError(f"API Response types must subclass {APIResponse}; Received {origin}")
@@ -1318,6 +1299,24 @@ def __init__(self, **kwargs: Any) -> None:
super().__init__(**kwargs)
+try:
+ import httpx_aiohttp
+except ImportError:
+
+ class _DefaultAioHttpClient(httpx.AsyncClient):
+ def __init__(self, **_kwargs: Any) -> None:
+ raise RuntimeError("To use the aiohttp client you must have installed the package with the `aiohttp` extra")
+else:
+
+ class _DefaultAioHttpClient(httpx_aiohttp.HttpxAiohttpClient): # type: ignore
+ def __init__(self, **kwargs: Any) -> None:
+ kwargs.setdefault("timeout", DEFAULT_TIMEOUT)
+ kwargs.setdefault("limits", DEFAULT_CONNECTION_LIMITS)
+ kwargs.setdefault("follow_redirects", True)
+
+ super().__init__(**kwargs)
+
+
if TYPE_CHECKING:
DefaultAsyncHttpxClient = httpx.AsyncClient
"""An alias to `httpx.AsyncClient` that provides the same defaults that this SDK
@@ -1326,12 +1325,19 @@ def __init__(self, **kwargs: Any) -> None:
This is useful because overriding the `http_client` with your own instance of
`httpx.AsyncClient` will result in httpx's defaults being used, not ours.
"""
+
+ DefaultAioHttpClient = httpx.AsyncClient
+ """An alias to `httpx.AsyncClient` that changes the default HTTP transport to `aiohttp`."""
else:
DefaultAsyncHttpxClient = _DefaultAsyncHttpxClient
+ DefaultAioHttpClient = _DefaultAioHttpClient
class AsyncHttpxClientWrapper(DefaultAsyncHttpxClient):
def __del__(self) -> None:
+ if self.is_closed:
+ return
+
try:
# TODO(someday): support non asyncio runtimes here
asyncio.get_running_loop().create_task(self.aclose())
@@ -1350,43 +1356,11 @@ def __init__(
base_url: str | URL,
_strict_response_validation: bool,
max_retries: int = DEFAULT_MAX_RETRIES,
- timeout: float | Timeout | None | NotGiven = NOT_GIVEN,
- transport: AsyncTransport | None = None,
- proxies: ProxiesTypes | None = None,
- limits: Limits | None = None,
+ timeout: float | Timeout | None | NotGiven = not_given,
http_client: httpx.AsyncClient | None = None,
custom_headers: Mapping[str, str] | None = None,
custom_query: Mapping[str, object] | None = None,
) -> None:
- if limits is not None:
- warnings.warn(
- "The `connection_pool_limits` argument is deprecated. The `http_client` argument should be passed instead",
- category=DeprecationWarning,
- stacklevel=3,
- )
- if http_client is not None:
- raise ValueError("The `http_client` argument is mutually exclusive with `connection_pool_limits`")
- else:
- limits = DEFAULT_CONNECTION_LIMITS
-
- if transport is not None:
- warnings.warn(
- "The `transport` argument is deprecated. The `http_client` argument should be passed instead",
- category=DeprecationWarning,
- stacklevel=3,
- )
- if http_client is not None:
- raise ValueError("The `http_client` argument is mutually exclusive with `transport`")
-
- if proxies is not None:
- warnings.warn(
- "The `proxies` argument is deprecated. The `http_client` argument should be passed instead",
- category=DeprecationWarning,
- stacklevel=3,
- )
- if http_client is not None:
- raise ValueError("The `http_client` argument is mutually exclusive with `proxies`")
-
if not is_given(timeout):
# if the user passed in a custom http client with a non-default
# timeout set then we use that timeout.
@@ -1408,11 +1382,8 @@ def __init__(
super().__init__(
version=version,
base_url=base_url,
- limits=limits,
# cast to a valid type because mypy doesn't understand our type narrowing
timeout=cast(Timeout, timeout),
- proxies=proxies,
- transport=transport,
max_retries=max_retries,
custom_query=custom_query,
custom_headers=custom_headers,
@@ -1422,10 +1393,6 @@ def __init__(
base_url=base_url,
# cast to a valid type because mypy doesn't understand our type narrowing
timeout=cast(Timeout, timeout),
- proxies=proxies,
- transport=transport,
- limits=limits,
- follow_redirects=True,
)
def is_closed(self) -> bool:
@@ -1474,7 +1441,6 @@ async def request(
options: FinalRequestOptions,
*,
stream: Literal[False] = False,
- remaining_retries: Optional[int] = None,
) -> ResponseT: ...
@overload
@@ -1485,7 +1451,6 @@ async def request(
*,
stream: Literal[True],
stream_cls: type[_AsyncStreamT],
- remaining_retries: Optional[int] = None,
) -> _AsyncStreamT: ...
@overload
@@ -1496,7 +1461,6 @@ async def request(
*,
stream: bool,
stream_cls: type[_AsyncStreamT] | None = None,
- remaining_retries: Optional[int] = None,
) -> ResponseT | _AsyncStreamT: ...
async def request(
@@ -1506,116 +1470,114 @@ async def request(
*,
stream: bool = False,
stream_cls: type[_AsyncStreamT] | None = None,
- remaining_retries: Optional[int] = None,
- ) -> ResponseT | _AsyncStreamT:
- if remaining_retries is not None:
- retries_taken = options.get_max_retries(self.max_retries) - remaining_retries
- else:
- retries_taken = 0
-
- return await self._request(
- cast_to=cast_to,
- options=options,
- stream=stream,
- stream_cls=stream_cls,
- retries_taken=retries_taken,
- )
-
- async def _request(
- self,
- cast_to: Type[ResponseT],
- options: FinalRequestOptions,
- *,
- stream: bool,
- stream_cls: type[_AsyncStreamT] | None,
- retries_taken: int,
) -> ResponseT | _AsyncStreamT:
if self._platform is None:
# `get_platform` can make blocking IO calls so we
# execute it earlier while we are in an async context
self._platform = await asyncify(get_platform)()
+ cast_to = self._maybe_override_cast_to(cast_to, options)
+
# create a copy of the options we were given so that if the
# options are mutated later & we then retry, the retries are
# given the original options
input_options = model_copy(options)
+ if input_options.idempotency_key is None and input_options.method.lower() != "get":
+ # ensure the idempotency key is reused between requests
+ input_options.idempotency_key = self._idempotency_key()
- cast_to = self._maybe_override_cast_to(cast_to, options)
- options = await self._prepare_options(options)
+ response: httpx.Response | None = None
+ max_retries = input_options.get_max_retries(self.max_retries)
- remaining_retries = options.get_max_retries(self.max_retries) - retries_taken
- request = self._build_request(options, retries_taken=retries_taken)
- await self._prepare_request(request)
+ retries_taken = 0
+ for retries_taken in range(max_retries + 1):
+ options = model_copy(input_options)
+ options = await self._prepare_options(options)
- kwargs: HttpxSendArgs = {}
- if self.custom_auth is not None:
- kwargs["auth"] = self.custom_auth
+ remaining_retries = max_retries - retries_taken
+ request = self._build_request(options, retries_taken=retries_taken)
+ await self._prepare_request(request)
- try:
- response = await self._client.send(
- request,
- stream=stream or self._should_stream_response_body(request=request),
- **kwargs,
- )
- except httpx.TimeoutException as err:
- log.debug("Encountered httpx.TimeoutException", exc_info=True)
+ kwargs: HttpxSendArgs = {}
+ if self.custom_auth is not None:
+ kwargs["auth"] = self.custom_auth
- if remaining_retries > 0:
- return await self._retry_request(
- input_options,
- cast_to,
- retries_taken=retries_taken,
- stream=stream,
- stream_cls=stream_cls,
- response_headers=None,
- )
+ if options.follow_redirects is not None:
+ kwargs["follow_redirects"] = options.follow_redirects
- log.debug("Raising timeout error")
- raise APITimeoutError(request=request) from err
- except Exception as err:
- log.debug("Encountered Exception", exc_info=True)
+ log.debug("Sending HTTP Request: %s %s", request.method, request.url)
- if remaining_retries > 0:
- return await self._retry_request(
- input_options,
- cast_to,
- retries_taken=retries_taken,
- stream=stream,
- stream_cls=stream_cls,
- response_headers=None,
+ response = None
+ try:
+ response = await self._client.send(
+ request,
+ stream=stream or self._should_stream_response_body(request=request),
+ **kwargs,
)
+ except httpx.TimeoutException as err:
+ log.debug("Encountered httpx.TimeoutException", exc_info=True)
+
+ if remaining_retries > 0:
+ await self._sleep_for_retry(
+ retries_taken=retries_taken,
+ max_retries=max_retries,
+ options=input_options,
+ response=None,
+ )
+ continue
+
+ log.debug("Raising timeout error")
+ raise APITimeoutError(request=request) from err
+ except Exception as err:
+ log.debug("Encountered Exception", exc_info=True)
+
+ if remaining_retries > 0:
+ await self._sleep_for_retry(
+ retries_taken=retries_taken,
+ max_retries=max_retries,
+ options=input_options,
+ response=None,
+ )
+ continue
+
+ log.debug("Raising connection error")
+ raise APIConnectionError(request=request) from err
+
+ log.debug(
+ 'HTTP Response: %s %s "%i %s" %s',
+ request.method,
+ request.url,
+ response.status_code,
+ response.reason_phrase,
+ response.headers,
+ )
- log.debug("Raising connection error")
- raise APIConnectionError(request=request) from err
-
- log.debug(
- 'HTTP Request: %s %s "%i %s"', request.method, request.url, response.status_code, response.reason_phrase
- )
+ try:
+ response.raise_for_status()
+ except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code
+ log.debug("Encountered httpx.HTTPStatusError", exc_info=True)
+
+ if remaining_retries > 0 and self._should_retry(err.response):
+ await err.response.aclose()
+ await self._sleep_for_retry(
+ retries_taken=retries_taken,
+ max_retries=max_retries,
+ options=input_options,
+ response=response,
+ )
+ continue
- try:
- response.raise_for_status()
- except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code
- log.debug("Encountered httpx.HTTPStatusError", exc_info=True)
-
- if remaining_retries > 0 and self._should_retry(err.response):
- await err.response.aclose()
- return await self._retry_request(
- input_options,
- cast_to,
- retries_taken=retries_taken,
- response_headers=err.response.headers,
- stream=stream,
- stream_cls=stream_cls,
- )
+ # If the response is streamed then we need to explicitly read the response
+ # to completion before attempting to access the response text.
+ if not err.response.is_closed:
+ await err.response.aread()
- # If the response is streamed then we need to explicitly read the response
- # to completion before attempting to access the response text.
- if not err.response.is_closed:
- await err.response.aread()
+ log.debug("Re-raising status error")
+ raise self._make_status_error_from_response(err.response) from None
- log.debug("Re-raising status error")
- raise self._make_status_error_from_response(err.response) from None
+ break
+ assert response is not None, "could not resolve response (should never happen)"
return await self._process_response(
cast_to=cast_to,
options=options,
@@ -1625,35 +1587,20 @@ async def _request(
retries_taken=retries_taken,
)
- async def _retry_request(
- self,
- options: FinalRequestOptions,
- cast_to: Type[ResponseT],
- *,
- retries_taken: int,
- response_headers: httpx.Headers | None,
- stream: bool,
- stream_cls: type[_AsyncStreamT] | None,
- ) -> ResponseT | _AsyncStreamT:
- remaining_retries = options.get_max_retries(self.max_retries) - retries_taken
+ async def _sleep_for_retry(
+ self, *, retries_taken: int, max_retries: int, options: FinalRequestOptions, response: httpx.Response | None
+ ) -> None:
+ remaining_retries = max_retries - retries_taken
if remaining_retries == 1:
log.debug("1 retry left")
else:
log.debug("%i retries left", remaining_retries)
- timeout = self._calculate_retry_timeout(remaining_retries, options, response_headers)
+ timeout = self._calculate_retry_timeout(remaining_retries, options, response.headers if response else None)
log.info("Retrying request to %s in %f seconds", options.url, timeout)
await anyio.sleep(timeout)
- return await self._request(
- options=options,
- cast_to=cast_to,
- retries_taken=retries_taken + 1,
- stream=stream,
- stream_cls=stream_cls,
- )
-
async def _process_response(
self,
*,
@@ -1666,7 +1613,14 @@ async def _process_response(
) -> ResponseT:
origin = get_origin(cast_to) or cast_to
- if inspect.isclass(origin) and issubclass(origin, BaseAPIResponse):
+ if (
+ inspect.isclass(origin)
+ and issubclass(origin, BaseAPIResponse)
+ # we only want to actually return the custom BaseAPIResponse class if we're
+ # returning the raw response, or if we're not streaming SSE, as if we're streaming
+ # SSE then `cast_to` doesn't actively reflect the type we need to parse into
+ and (not stream or bool(response.request.headers.get(RAW_RESPONSE_HEADER)))
+ ):
if not issubclass(origin, AsyncAPIResponse):
raise TypeError(f"API Response types must subclass {AsyncAPIResponse}; Received {origin}")
@@ -1864,8 +1818,8 @@ def make_request_options(
extra_query: Query | None = None,
extra_body: Body | None = None,
idempotency_key: str | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- post_parser: PostParser | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ post_parser: PostParser | NotGiven = not_given,
) -> RequestOptions:
"""Create a dict of type RequestOptions without keys of NotGiven values."""
options: RequestOptions = {}
diff --git a/src/browserbase/_client.py b/src/browserbase/_client.py
index b3ed32f6..8b54a5bb 100644
--- a/src/browserbase/_client.py
+++ b/src/browserbase/_client.py
@@ -3,27 +3,25 @@
from __future__ import annotations
import os
-from typing import Any, Union, Mapping
+from typing import Any, Mapping
from typing_extensions import Self, override
import httpx
-from . import resources, _exceptions
+from . import _exceptions
from ._qs import Querystring
from ._types import (
- NOT_GIVEN,
Omit,
Timeout,
NotGiven,
Transport,
ProxiesTypes,
RequestOptions,
+ not_given,
)
-from ._utils import (
- is_given,
- get_async_library,
-)
+from ._utils import is_given, get_async_library
from ._version import __version__
+from .resources import contexts, projects, extensions
from ._streaming import Stream as Stream, AsyncStream as AsyncStream
from ._exceptions import APIStatusError, BrowserbaseError
from ._base_client import (
@@ -31,13 +29,13 @@
SyncAPIClient,
AsyncAPIClient,
)
+from .resources.sessions import sessions
__all__ = [
"Timeout",
"Transport",
"ProxiesTypes",
"RequestOptions",
- "resources",
"Browserbase",
"AsyncBrowserbase",
"Client",
@@ -46,10 +44,10 @@
class Browserbase(SyncAPIClient):
- contexts: resources.ContextsResource
- extensions: resources.ExtensionsResource
- projects: resources.ProjectsResource
- sessions: resources.SessionsResource
+ contexts: contexts.ContextsResource
+ extensions: extensions.ExtensionsResource
+ projects: projects.ProjectsResource
+ sessions: sessions.SessionsResource
with_raw_response: BrowserbaseWithRawResponse
with_streaming_response: BrowserbaseWithStreamedResponse
@@ -61,7 +59,7 @@ def __init__(
*,
api_key: str | None = None,
base_url: str | httpx.URL | None = None,
- timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN,
+ timeout: float | Timeout | None | NotGiven = not_given,
max_retries: int = DEFAULT_MAX_RETRIES,
default_headers: Mapping[str, str] | None = None,
default_query: Mapping[str, object] | None = None,
@@ -107,10 +105,10 @@ def __init__(
_strict_response_validation=_strict_response_validation,
)
- self.contexts = resources.ContextsResource(self)
- self.extensions = resources.ExtensionsResource(self)
- self.projects = resources.ProjectsResource(self)
- self.sessions = resources.SessionsResource(self)
+ self.contexts = contexts.ContextsResource(self)
+ self.extensions = extensions.ExtensionsResource(self)
+ self.projects = projects.ProjectsResource(self)
+ self.sessions = sessions.SessionsResource(self)
self.with_raw_response = BrowserbaseWithRawResponse(self)
self.with_streaming_response = BrowserbaseWithStreamedResponse(self)
@@ -139,9 +137,9 @@ def copy(
*,
api_key: str | None = None,
base_url: str | httpx.URL | None = None,
- timeout: float | Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | Timeout | None | NotGiven = not_given,
http_client: httpx.Client | None = None,
- max_retries: int | NotGiven = NOT_GIVEN,
+ max_retries: int | NotGiven = not_given,
default_headers: Mapping[str, str] | None = None,
set_default_headers: Mapping[str, str] | None = None,
default_query: Mapping[str, object] | None = None,
@@ -220,10 +218,10 @@ def _make_status_error(
class AsyncBrowserbase(AsyncAPIClient):
- contexts: resources.AsyncContextsResource
- extensions: resources.AsyncExtensionsResource
- projects: resources.AsyncProjectsResource
- sessions: resources.AsyncSessionsResource
+ contexts: contexts.AsyncContextsResource
+ extensions: extensions.AsyncExtensionsResource
+ projects: projects.AsyncProjectsResource
+ sessions: sessions.AsyncSessionsResource
with_raw_response: AsyncBrowserbaseWithRawResponse
with_streaming_response: AsyncBrowserbaseWithStreamedResponse
@@ -235,7 +233,7 @@ def __init__(
*,
api_key: str | None = None,
base_url: str | httpx.URL | None = None,
- timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN,
+ timeout: float | Timeout | None | NotGiven = not_given,
max_retries: int = DEFAULT_MAX_RETRIES,
default_headers: Mapping[str, str] | None = None,
default_query: Mapping[str, object] | None = None,
@@ -253,7 +251,7 @@ def __init__(
# part of our public interface in the future.
_strict_response_validation: bool = False,
) -> None:
- """Construct a new async Browserbase client instance.
+ """Construct a new async AsyncBrowserbase client instance.
This automatically infers the `api_key` argument from the `BROWSERBASE_API_KEY` environment variable if it is not provided.
"""
@@ -281,10 +279,10 @@ def __init__(
_strict_response_validation=_strict_response_validation,
)
- self.contexts = resources.AsyncContextsResource(self)
- self.extensions = resources.AsyncExtensionsResource(self)
- self.projects = resources.AsyncProjectsResource(self)
- self.sessions = resources.AsyncSessionsResource(self)
+ self.contexts = contexts.AsyncContextsResource(self)
+ self.extensions = extensions.AsyncExtensionsResource(self)
+ self.projects = projects.AsyncProjectsResource(self)
+ self.sessions = sessions.AsyncSessionsResource(self)
self.with_raw_response = AsyncBrowserbaseWithRawResponse(self)
self.with_streaming_response = AsyncBrowserbaseWithStreamedResponse(self)
@@ -313,9 +311,9 @@ def copy(
*,
api_key: str | None = None,
base_url: str | httpx.URL | None = None,
- timeout: float | Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | Timeout | None | NotGiven = not_given,
http_client: httpx.AsyncClient | None = None,
- max_retries: int | NotGiven = NOT_GIVEN,
+ max_retries: int | NotGiven = not_given,
default_headers: Mapping[str, str] | None = None,
set_default_headers: Mapping[str, str] | None = None,
default_query: Mapping[str, object] | None = None,
@@ -395,34 +393,34 @@ def _make_status_error(
class BrowserbaseWithRawResponse:
def __init__(self, client: Browserbase) -> None:
- self.contexts = resources.ContextsResourceWithRawResponse(client.contexts)
- self.extensions = resources.ExtensionsResourceWithRawResponse(client.extensions)
- self.projects = resources.ProjectsResourceWithRawResponse(client.projects)
- self.sessions = resources.SessionsResourceWithRawResponse(client.sessions)
+ self.contexts = contexts.ContextsResourceWithRawResponse(client.contexts)
+ self.extensions = extensions.ExtensionsResourceWithRawResponse(client.extensions)
+ self.projects = projects.ProjectsResourceWithRawResponse(client.projects)
+ self.sessions = sessions.SessionsResourceWithRawResponse(client.sessions)
class AsyncBrowserbaseWithRawResponse:
def __init__(self, client: AsyncBrowserbase) -> None:
- self.contexts = resources.AsyncContextsResourceWithRawResponse(client.contexts)
- self.extensions = resources.AsyncExtensionsResourceWithRawResponse(client.extensions)
- self.projects = resources.AsyncProjectsResourceWithRawResponse(client.projects)
- self.sessions = resources.AsyncSessionsResourceWithRawResponse(client.sessions)
+ self.contexts = contexts.AsyncContextsResourceWithRawResponse(client.contexts)
+ self.extensions = extensions.AsyncExtensionsResourceWithRawResponse(client.extensions)
+ self.projects = projects.AsyncProjectsResourceWithRawResponse(client.projects)
+ self.sessions = sessions.AsyncSessionsResourceWithRawResponse(client.sessions)
class BrowserbaseWithStreamedResponse:
def __init__(self, client: Browserbase) -> None:
- self.contexts = resources.ContextsResourceWithStreamingResponse(client.contexts)
- self.extensions = resources.ExtensionsResourceWithStreamingResponse(client.extensions)
- self.projects = resources.ProjectsResourceWithStreamingResponse(client.projects)
- self.sessions = resources.SessionsResourceWithStreamingResponse(client.sessions)
+ self.contexts = contexts.ContextsResourceWithStreamingResponse(client.contexts)
+ self.extensions = extensions.ExtensionsResourceWithStreamingResponse(client.extensions)
+ self.projects = projects.ProjectsResourceWithStreamingResponse(client.projects)
+ self.sessions = sessions.SessionsResourceWithStreamingResponse(client.sessions)
class AsyncBrowserbaseWithStreamedResponse:
def __init__(self, client: AsyncBrowserbase) -> None:
- self.contexts = resources.AsyncContextsResourceWithStreamingResponse(client.contexts)
- self.extensions = resources.AsyncExtensionsResourceWithStreamingResponse(client.extensions)
- self.projects = resources.AsyncProjectsResourceWithStreamingResponse(client.projects)
- self.sessions = resources.AsyncSessionsResourceWithStreamingResponse(client.sessions)
+ self.contexts = contexts.AsyncContextsResourceWithStreamingResponse(client.contexts)
+ self.extensions = extensions.AsyncExtensionsResourceWithStreamingResponse(client.extensions)
+ self.projects = projects.AsyncProjectsResourceWithStreamingResponse(client.projects)
+ self.sessions = sessions.AsyncSessionsResourceWithStreamingResponse(client.sessions)
Client = Browserbase
diff --git a/src/browserbase/_compat.py b/src/browserbase/_compat.py
index d89920d9..bdef67f0 100644
--- a/src/browserbase/_compat.py
+++ b/src/browserbase/_compat.py
@@ -2,7 +2,7 @@
from typing import TYPE_CHECKING, Any, Union, Generic, TypeVar, Callable, cast, overload
from datetime import date, datetime
-from typing_extensions import Self
+from typing_extensions import Self, Literal
import pydantic
from pydantic.fields import FieldInfo
@@ -12,14 +12,13 @@
_T = TypeVar("_T")
_ModelT = TypeVar("_ModelT", bound=pydantic.BaseModel)
-# --------------- Pydantic v2 compatibility ---------------
+# --------------- Pydantic v2, v3 compatibility ---------------
# Pyright incorrectly reports some of our functions as overriding a method when they don't
# pyright: reportIncompatibleMethodOverride=false
-PYDANTIC_V2 = pydantic.VERSION.startswith("2.")
+PYDANTIC_V1 = pydantic.VERSION.startswith("1.")
-# v1 re-exports
if TYPE_CHECKING:
def parse_date(value: date | StrBytesIntFloat) -> date: # noqa: ARG001
@@ -44,90 +43,92 @@ def is_typeddict(type_: type[Any]) -> bool: # noqa: ARG001
...
else:
- if PYDANTIC_V2:
- from pydantic.v1.typing import (
+ # v1 re-exports
+ if PYDANTIC_V1:
+ from pydantic.typing import (
get_args as get_args,
is_union as is_union,
get_origin as get_origin,
is_typeddict as is_typeddict,
is_literal_type as is_literal_type,
)
- from pydantic.v1.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
+ from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
else:
- from pydantic.typing import (
+ from ._utils import (
get_args as get_args,
is_union as is_union,
get_origin as get_origin,
+ parse_date as parse_date,
is_typeddict as is_typeddict,
+ parse_datetime as parse_datetime,
is_literal_type as is_literal_type,
)
- from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
# refactored config
if TYPE_CHECKING:
from pydantic import ConfigDict as ConfigDict
else:
- if PYDANTIC_V2:
- from pydantic import ConfigDict
- else:
+ if PYDANTIC_V1:
# TODO: provide an error message here?
ConfigDict = None
+ else:
+ from pydantic import ConfigDict as ConfigDict
# renamed methods / properties
def parse_obj(model: type[_ModelT], value: object) -> _ModelT:
- if PYDANTIC_V2:
- return model.model_validate(value)
- else:
+ if PYDANTIC_V1:
return cast(_ModelT, model.parse_obj(value)) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
+ else:
+ return model.model_validate(value)
def field_is_required(field: FieldInfo) -> bool:
- if PYDANTIC_V2:
- return field.is_required()
- return field.required # type: ignore
+ if PYDANTIC_V1:
+ return field.required # type: ignore
+ return field.is_required()
def field_get_default(field: FieldInfo) -> Any:
value = field.get_default()
- if PYDANTIC_V2:
- from pydantic_core import PydanticUndefined
-
- if value == PydanticUndefined:
- return None
+ if PYDANTIC_V1:
return value
+ from pydantic_core import PydanticUndefined
+
+ if value == PydanticUndefined:
+ return None
return value
def field_outer_type(field: FieldInfo) -> Any:
- if PYDANTIC_V2:
- return field.annotation
- return field.outer_type_ # type: ignore
+ if PYDANTIC_V1:
+ return field.outer_type_ # type: ignore
+ return field.annotation
def get_model_config(model: type[pydantic.BaseModel]) -> Any:
- if PYDANTIC_V2:
- return model.model_config
- return model.__config__ # type: ignore
+ if PYDANTIC_V1:
+ return model.__config__ # type: ignore
+ return model.model_config
def get_model_fields(model: type[pydantic.BaseModel]) -> dict[str, FieldInfo]:
- if PYDANTIC_V2:
- return model.model_fields
- return model.__fields__ # type: ignore
+ if PYDANTIC_V1:
+ return model.__fields__ # type: ignore
+ return model.model_fields
def model_copy(model: _ModelT, *, deep: bool = False) -> _ModelT:
- if PYDANTIC_V2:
- return model.model_copy(deep=deep)
- return model.copy(deep=deep) # type: ignore
+ if PYDANTIC_V1:
+ return model.copy(deep=deep) # type: ignore
+ return model.model_copy(deep=deep)
def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str:
- if PYDANTIC_V2:
- return model.model_dump_json(indent=indent)
- return model.json(indent=indent) # type: ignore
+ if PYDANTIC_V1:
+ return model.json(indent=indent) # type: ignore
+ return model.model_dump_json(indent=indent)
def model_dump(
@@ -137,13 +138,16 @@ def model_dump(
exclude_unset: bool = False,
exclude_defaults: bool = False,
warnings: bool = True,
+ mode: Literal["json", "python"] = "python",
) -> dict[str, Any]:
- if PYDANTIC_V2:
+ if (not PYDANTIC_V1) or hasattr(model, "model_dump"):
return model.model_dump(
+ mode=mode,
exclude=exclude,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
- warnings=warnings,
+ # warnings are not supported in Pydantic v1
+ warnings=True if PYDANTIC_V1 else warnings,
)
return cast(
"dict[str, Any]",
@@ -156,9 +160,9 @@ def model_dump(
def model_parse(model: type[_ModelT], data: Any) -> _ModelT:
- if PYDANTIC_V2:
- return model.model_validate(data)
- return model.parse_obj(data) # pyright: ignore[reportDeprecated]
+ if PYDANTIC_V1:
+ return model.parse_obj(data) # pyright: ignore[reportDeprecated]
+ return model.model_validate(data)
# generic models
@@ -167,17 +171,16 @@ def model_parse(model: type[_ModelT], data: Any) -> _ModelT:
class GenericModel(pydantic.BaseModel): ...
else:
- if PYDANTIC_V2:
+ if PYDANTIC_V1:
+ import pydantic.generics
+
+ class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ...
+ else:
# there no longer needs to be a distinction in v2 but
# we still have to create our own subclass to avoid
# inconsistent MRO ordering errors
class GenericModel(pydantic.BaseModel): ...
- else:
- import pydantic.generics
-
- class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ...
-
# cached properties
if TYPE_CHECKING:
@@ -211,9 +214,6 @@ def __set_name__(self, owner: type[Any], name: str) -> None: ...
# __set__ is not defined at runtime, but @cached_property is designed to be settable
def __set__(self, instance: object, value: _T) -> None: ...
else:
- try:
- from functools import cached_property as cached_property
- except ImportError:
- from cached_property import cached_property as cached_property
+ from functools import cached_property as cached_property
typed_cached_property = cached_property
diff --git a/src/browserbase/_constants.py b/src/browserbase/_constants.py
index a2ac3b6f..6ddf2c71 100644
--- a/src/browserbase/_constants.py
+++ b/src/browserbase/_constants.py
@@ -6,7 +6,7 @@
OVERRIDE_CAST_TO_HEADER = "____stainless_override_cast_to"
# default timeout is 1 minute
-DEFAULT_TIMEOUT = httpx.Timeout(timeout=60.0, connect=5.0)
+DEFAULT_TIMEOUT = httpx.Timeout(timeout=60, connect=5.0)
DEFAULT_MAX_RETRIES = 2
DEFAULT_CONNECTION_LIMITS = httpx.Limits(max_connections=100, max_keepalive_connections=20)
diff --git a/src/browserbase/_files.py b/src/browserbase/_files.py
index 715cc207..ff951be7 100644
--- a/src/browserbase/_files.py
+++ b/src/browserbase/_files.py
@@ -34,7 +34,7 @@ def assert_is_file_content(obj: object, *, key: str | None = None) -> None:
if not is_file_content(obj):
prefix = f"Expected entry at `{key}`" if key is not None else f"Expected file input `{obj!r}`"
raise RuntimeError(
- f"{prefix} to be bytes, an io.IOBase instance, PathLike or a tuple but received {type(obj)} instead."
+ f"{prefix} to be bytes, an io.IOBase instance, PathLike or a tuple but received {type(obj)} instead. See https://github.com/browserbase/sdk-python/tree/main#file-uploads"
) from None
@@ -69,12 +69,12 @@ def _transform_file(file: FileTypes) -> HttpxFileTypes:
return file
if is_tuple_t(file):
- return (file[0], _read_file_content(file[1]), *file[2:])
+ return (file[0], read_file_content(file[1]), *file[2:])
raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple")
-def _read_file_content(file: FileContent) -> HttpxFileContent:
+def read_file_content(file: FileContent) -> HttpxFileContent:
if isinstance(file, os.PathLike):
return pathlib.Path(file).read_bytes()
return file
@@ -111,12 +111,12 @@ async def _async_transform_file(file: FileTypes) -> HttpxFileTypes:
return file
if is_tuple_t(file):
- return (file[0], await _async_read_file_content(file[1]), *file[2:])
+ return (file[0], await async_read_file_content(file[1]), *file[2:])
raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple")
-async def _async_read_file_content(file: FileContent) -> HttpxFileContent:
+async def async_read_file_content(file: FileContent) -> HttpxFileContent:
if isinstance(file, os.PathLike):
return await anyio.Path(file).read_bytes()
diff --git a/src/browserbase/_models.py b/src/browserbase/_models.py
index 42551b76..6a3cd1d2 100644
--- a/src/browserbase/_models.py
+++ b/src/browserbase/_models.py
@@ -2,9 +2,10 @@
import os
import inspect
-from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, cast
+from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, Optional, cast
from datetime import date, datetime
from typing_extensions import (
+ List,
Unpack,
Literal,
ClassVar,
@@ -19,7 +20,6 @@
)
import pydantic
-import pydantic.generics
from pydantic.fields import FieldInfo
from ._types import (
@@ -37,6 +37,7 @@
PropertyInfo,
is_list,
is_given,
+ json_safe,
lru_cache,
is_mapping,
parse_date,
@@ -45,10 +46,11 @@
strip_not_given,
extract_type_arg,
is_annotated_type,
+ is_type_alias_type,
strip_annotated_type,
)
from ._compat import (
- PYDANTIC_V2,
+ PYDANTIC_V1,
ConfigDict,
GenericModel as BaseGenericModel,
get_args,
@@ -63,7 +65,7 @@
from ._constants import RAW_RESPONSE_HEADER
if TYPE_CHECKING:
- from pydantic_core.core_schema import ModelField, LiteralSchema, ModelFieldsSchema
+ from pydantic_core.core_schema import ModelField, ModelSchema, LiteralSchema, ModelFieldsSchema
__all__ = ["BaseModel", "GenericModel"]
@@ -79,11 +81,7 @@ class _ConfigProtocol(Protocol):
class BaseModel(pydantic.BaseModel):
- if PYDANTIC_V2:
- model_config: ClassVar[ConfigDict] = ConfigDict(
- extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true"))
- )
- else:
+ if PYDANTIC_V1:
@property
@override
@@ -93,6 +91,10 @@ def model_fields_set(self) -> set[str]:
class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated]
extra: Any = pydantic.Extra.allow # type: ignore
+ else:
+ model_config: ClassVar[ConfigDict] = ConfigDict(
+ extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true"))
+ )
def to_dict(
self,
@@ -170,21 +172,21 @@ def to_json(
@override
def __str__(self) -> str:
# mypy complains about an invalid self arg
- return f'{self.__repr_name__()}({self.__repr_str__(", ")})' # type: ignore[misc]
+ return f"{self.__repr_name__()}({self.__repr_str__(', ')})" # type: ignore[misc]
# Override the 'construct' method in a way that supports recursive parsing without validation.
# Based on https://github.com/samuelcolvin/pydantic/issues/1168#issuecomment-817742836.
@classmethod
@override
def construct( # pyright: ignore[reportIncompatibleMethodOverride]
- cls: Type[ModelT],
+ __cls: Type[ModelT],
_fields_set: set[str] | None = None,
**values: object,
) -> ModelT:
- m = cls.__new__(cls)
+ m = __cls.__new__(__cls)
fields_values: dict[str, object] = {}
- config = get_model_config(cls)
+ config = get_model_config(__cls)
populate_by_name = (
config.allow_population_by_field_name
if isinstance(config, _ConfigProtocol)
@@ -194,7 +196,7 @@ def construct( # pyright: ignore[reportIncompatibleMethodOverride]
if _fields_set is None:
_fields_set = set()
- model_fields = get_model_fields(cls)
+ model_fields = get_model_fields(__cls)
for name, field in model_fields.items():
key = field.alias
if key is None or (key not in values and populate_by_name):
@@ -206,28 +208,32 @@ def construct( # pyright: ignore[reportIncompatibleMethodOverride]
else:
fields_values[name] = field_get_default(field)
+ extra_field_type = _get_extra_fields_type(__cls)
+
_extra = {}
for key, value in values.items():
if key not in model_fields:
- if PYDANTIC_V2:
- _extra[key] = value
- else:
+ parsed = construct_type(value=value, type_=extra_field_type) if extra_field_type is not None else value
+
+ if PYDANTIC_V1:
_fields_set.add(key)
- fields_values[key] = value
+ fields_values[key] = parsed
+ else:
+ _extra[key] = parsed
object.__setattr__(m, "__dict__", fields_values)
- if PYDANTIC_V2:
- # these properties are copied from Pydantic's `model_construct()` method
- object.__setattr__(m, "__pydantic_private__", None)
- object.__setattr__(m, "__pydantic_extra__", _extra)
- object.__setattr__(m, "__pydantic_fields_set__", _fields_set)
- else:
+ if PYDANTIC_V1:
# init_private_attributes() does not exist in v2
m._init_private_attributes() # type: ignore
# copied from Pydantic v1's `construct()` method
object.__setattr__(m, "__fields_set__", _fields_set)
+ else:
+ # these properties are copied from Pydantic's `model_construct()` method
+ object.__setattr__(m, "__pydantic_private__", None)
+ object.__setattr__(m, "__pydantic_extra__", _extra)
+ object.__setattr__(m, "__pydantic_fields_set__", _fields_set)
return m
@@ -237,7 +243,7 @@ def construct( # pyright: ignore[reportIncompatibleMethodOverride]
# although not in practice
model_construct = construct
- if not PYDANTIC_V2:
+ if PYDANTIC_V1:
# we define aliases for some of the new pydantic v2 methods so
# that we can just document these methods without having to specify
# a specific pydantic version as some users may not know which
@@ -250,7 +256,7 @@ def model_dump(
mode: Literal["json", "python"] | str = "python",
include: IncEx | None = None,
exclude: IncEx | None = None,
- by_alias: bool = False,
+ by_alias: bool | None = None,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
@@ -258,6 +264,7 @@ def model_dump(
warnings: bool | Literal["none", "warn", "error"] = True,
context: dict[str, Any] | None = None,
serialize_as_any: bool = False,
+ fallback: Callable[[Any], Any] | None = None,
) -> dict[str, Any]:
"""Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump
@@ -279,8 +286,8 @@ def model_dump(
Returns:
A dictionary representation of the model.
"""
- if mode != "python":
- raise ValueError("mode is only supported in Pydantic v2")
+ if mode not in {"json", "python"}:
+ raise ValueError("mode must be either 'json' or 'python'")
if round_trip != False:
raise ValueError("round_trip is only supported in Pydantic v2")
if warnings != True:
@@ -289,15 +296,19 @@ def model_dump(
raise ValueError("context is only supported in Pydantic v2")
if serialize_as_any != False:
raise ValueError("serialize_as_any is only supported in Pydantic v2")
- return super().dict( # pyright: ignore[reportDeprecated]
+ if fallback is not None:
+ raise ValueError("fallback is only supported in Pydantic v2")
+ dumped = super().dict( # pyright: ignore[reportDeprecated]
include=include,
exclude=exclude,
- by_alias=by_alias,
+ by_alias=by_alias if by_alias is not None else False,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
+ return cast("dict[str, Any]", json_safe(dumped)) if mode == "json" else dumped
+
@override
def model_dump_json(
self,
@@ -305,13 +316,14 @@ def model_dump_json(
indent: int | None = None,
include: IncEx | None = None,
exclude: IncEx | None = None,
- by_alias: bool = False,
+ by_alias: bool | None = None,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
round_trip: bool = False,
warnings: bool | Literal["none", "warn", "error"] = True,
context: dict[str, Any] | None = None,
+ fallback: Callable[[Any], Any] | None = None,
serialize_as_any: bool = False,
) -> str:
"""Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump_json
@@ -340,11 +352,13 @@ def model_dump_json(
raise ValueError("context is only supported in Pydantic v2")
if serialize_as_any != False:
raise ValueError("serialize_as_any is only supported in Pydantic v2")
+ if fallback is not None:
+ raise ValueError("fallback is only supported in Pydantic v2")
return super().json( # type: ignore[reportDeprecated]
indent=indent,
include=include,
exclude=exclude,
- by_alias=by_alias,
+ by_alias=by_alias if by_alias is not None else False,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
@@ -355,15 +369,32 @@ def _construct_field(value: object, field: FieldInfo, key: str) -> object:
if value is None:
return field_get_default(field)
- if PYDANTIC_V2:
- type_ = field.annotation
- else:
+ if PYDANTIC_V1:
type_ = cast(type, field.outer_type_) # type: ignore
+ else:
+ type_ = field.annotation # type: ignore
if type_ is None:
raise RuntimeError(f"Unexpected field type is None for {key}")
- return construct_type(value=value, type_=type_)
+ return construct_type(value=value, type_=type_, metadata=getattr(field, "metadata", None))
+
+
+def _get_extra_fields_type(cls: type[pydantic.BaseModel]) -> type | None:
+ if PYDANTIC_V1:
+ # TODO
+ return None
+
+ schema = cls.__pydantic_core_schema__
+ if schema["type"] == "model":
+ fields = schema["schema"]
+ if fields["type"] == "model-fields":
+ extras = fields.get("extras_schema")
+ if extras and "cls" in extras:
+ # mypy can't narrow the type
+ return extras["cls"] # type: ignore[no-any-return]
+
+ return None
def is_basemodel(type_: type) -> bool:
@@ -417,18 +448,28 @@ def construct_type_unchecked(*, value: object, type_: type[_T]) -> _T:
return cast(_T, construct_type(value=value, type_=type_))
-def construct_type(*, value: object, type_: object) -> object:
+def construct_type(*, value: object, type_: object, metadata: Optional[List[Any]] = None) -> object:
"""Loose coercion to the expected type with construction of nested values.
If the given value does not match the expected type then it is returned as-is.
"""
+
+ # store a reference to the original type we were given before we extract any inner
+ # types so that we can properly resolve forward references in `TypeAliasType` annotations
+ original_type = None
+
# we allow `object` as the input type because otherwise, passing things like
# `Literal['value']` will be reported as a type error by type checkers
type_ = cast("type[object]", type_)
+ if is_type_alias_type(type_):
+ original_type = type_ # type: ignore[unreachable]
+ type_ = type_.__value__ # type: ignore[unreachable]
# unwrap `Annotated[T, ...]` -> `T`
- if is_annotated_type(type_):
- meta: tuple[Any, ...] = get_args(type_)[1:]
+ if metadata is not None and len(metadata) > 0:
+ meta: tuple[Any, ...] = tuple(metadata)
+ elif is_annotated_type(type_):
+ meta = get_args(type_)[1:]
type_ = extract_type_arg(type_, 0)
else:
meta = tuple()
@@ -440,7 +481,7 @@ def construct_type(*, value: object, type_: object) -> object:
if is_union(origin):
try:
- return validate_type(type_=cast("type[object]", type_), value=value)
+ return validate_type(type_=cast("type[object]", original_type or type_), value=value)
except Exception:
pass
@@ -482,7 +523,11 @@ def construct_type(*, value: object, type_: object) -> object:
_, items_type = get_args(type_) # Dict[_, items_type]
return {key: construct_type(value=item, type_=items_type) for key, item in value.items()}
- if not is_literal_type(type_) and (issubclass(origin, BaseModel) or issubclass(origin, GenericModel)):
+ if (
+ not is_literal_type(type_)
+ and inspect.isclass(origin)
+ and (issubclass(origin, BaseModel) or issubclass(origin, GenericModel))
+ ):
if is_list(value):
return [cast(Any, type_).construct(**entry) if is_mapping(entry) else entry for entry in value]
@@ -589,30 +634,30 @@ def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any,
for variant in get_args(union):
variant = strip_annotated_type(variant)
if is_basemodel_type(variant):
- if PYDANTIC_V2:
- field = _extract_field_schema_pv2(variant, discriminator_field_name)
- if not field:
+ if PYDANTIC_V1:
+ field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
+ if not field_info:
continue
# Note: if one variant defines an alias then they all should
- discriminator_alias = field.get("serialization_alias")
-
- field_schema = field["schema"]
+ discriminator_alias = field_info.alias
- if field_schema["type"] == "literal":
- for entry in cast("LiteralSchema", field_schema)["expected"]:
+ if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation):
+ for entry in get_args(annotation):
if isinstance(entry, str):
mapping[entry] = variant
else:
- field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
- if not field_info:
+ field = _extract_field_schema_pv2(variant, discriminator_field_name)
+ if not field:
continue
# Note: if one variant defines an alias then they all should
- discriminator_alias = field_info.alias
+ discriminator_alias = field.get("serialization_alias")
- if field_info.annotation and is_literal_type(field_info.annotation):
- for entry in get_args(field_info.annotation):
+ field_schema = field["schema"]
+
+ if field_schema["type"] == "literal":
+ for entry in cast("LiteralSchema", field_schema)["expected"]:
if isinstance(entry, str):
mapping[entry] = variant
@@ -630,15 +675,18 @@ def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any,
def _extract_field_schema_pv2(model: type[BaseModel], field_name: str) -> ModelField | None:
schema = model.__pydantic_core_schema__
+ if schema["type"] == "definitions":
+ schema = schema["schema"]
+
if schema["type"] != "model":
return None
+ schema = cast("ModelSchema", schema)
fields_schema = schema["schema"]
if fields_schema["type"] != "model-fields":
return None
fields_schema = cast("ModelFieldsSchema", fields_schema)
-
field = fields_schema["fields"].get(field_name)
if not field:
return None
@@ -662,7 +710,7 @@ def set_pydantic_config(typ: Any, config: pydantic.ConfigDict) -> None:
setattr(typ, "__pydantic_config__", config) # noqa: B010
-# our use of subclasssing here causes weirdness for type checkers,
+# our use of subclassing here causes weirdness for type checkers,
# so we just pretend that we don't subclass
if TYPE_CHECKING:
GenericModel = BaseModel
@@ -672,7 +720,7 @@ class GenericModel(BaseGenericModel, BaseModel):
pass
-if PYDANTIC_V2:
+if not PYDANTIC_V1:
from pydantic import TypeAdapter as _TypeAdapter
_CachedTypeAdapter = cast("TypeAdapter[object]", lru_cache(maxsize=None)(_TypeAdapter))
@@ -719,6 +767,7 @@ class FinalRequestOptionsInput(TypedDict, total=False):
idempotency_key: str
json_data: Body
extra_json: AnyMapping
+ follow_redirects: bool
@final
@@ -732,18 +781,19 @@ class FinalRequestOptions(pydantic.BaseModel):
files: Union[HttpxRequestFiles, None] = None
idempotency_key: Union[str, None] = None
post_parser: Union[Callable[[Any], Any], NotGiven] = NotGiven()
+ follow_redirects: Union[bool, None] = None
# It should be noted that we cannot use `json` here as that would override
# a BaseModel method in an incompatible fashion.
json_data: Union[Body, None] = None
extra_json: Union[AnyMapping, None] = None
- if PYDANTIC_V2:
- model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True)
- else:
+ if PYDANTIC_V1:
class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated]
arbitrary_types_allowed: bool = True
+ else:
+ model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True)
def get_max_retries(self, max_retries: int) -> int:
if isinstance(self.max_retries, NotGiven):
@@ -776,9 +826,9 @@ def construct( # type: ignore
key: strip_not_given(value)
for key, value in values.items()
}
- if PYDANTIC_V2:
- return super().model_construct(_fields_set, **kwargs)
- return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated]
+ if PYDANTIC_V1:
+ return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated]
+ return super().model_construct(_fields_set, **kwargs)
if not TYPE_CHECKING:
# type checkers incorrectly complain about this assignment
diff --git a/src/browserbase/_qs.py b/src/browserbase/_qs.py
index 274320ca..ada6fd3f 100644
--- a/src/browserbase/_qs.py
+++ b/src/browserbase/_qs.py
@@ -4,7 +4,7 @@
from urllib.parse import parse_qs, urlencode
from typing_extensions import Literal, get_args
-from ._types import NOT_GIVEN, NotGiven, NotGivenOr
+from ._types import NotGiven, not_given
from ._utils import flatten
_T = TypeVar("_T")
@@ -41,8 +41,8 @@ def stringify(
self,
params: Params,
*,
- array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN,
- nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN,
+ array_format: ArrayFormat | NotGiven = not_given,
+ nested_format: NestedFormat | NotGiven = not_given,
) -> str:
return urlencode(
self.stringify_items(
@@ -56,8 +56,8 @@ def stringify_items(
self,
params: Params,
*,
- array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN,
- nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN,
+ array_format: ArrayFormat | NotGiven = not_given,
+ nested_format: NestedFormat | NotGiven = not_given,
) -> list[tuple[str, str]]:
opts = Options(
qs=self,
@@ -143,8 +143,8 @@ def __init__(
self,
qs: Querystring = _qs,
*,
- array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN,
- nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN,
+ array_format: ArrayFormat | NotGiven = not_given,
+ nested_format: NestedFormat | NotGiven = not_given,
) -> None:
self.array_format = qs.array_format if isinstance(array_format, NotGiven) else array_format
self.nested_format = qs.nested_format if isinstance(nested_format, NotGiven) else nested_format
diff --git a/src/browserbase/_response.py b/src/browserbase/_response.py
index 81ae0828..5f8d0f48 100644
--- a/src/browserbase/_response.py
+++ b/src/browserbase/_response.py
@@ -25,7 +25,7 @@
import pydantic
from ._types import NoneType
-from ._utils import is_given, extract_type_arg, is_annotated_type, extract_type_var_from_base
+from ._utils import is_given, extract_type_arg, is_annotated_type, is_type_alias_type, extract_type_var_from_base
from ._models import BaseModel, is_basemodel
from ._constants import RAW_RESPONSE_HEADER, OVERRIDE_CAST_TO_HEADER
from ._streaming import Stream, AsyncStream, is_stream_class_type, extract_stream_chunk_type
@@ -126,9 +126,17 @@ def __repr__(self) -> str:
)
def _parse(self, *, to: type[_T] | None = None) -> R | _T:
+ cast_to = to if to is not None else self._cast_to
+
+ # unwrap `TypeAlias('Name', T)` -> `T`
+ if is_type_alias_type(cast_to):
+ cast_to = cast_to.__value__ # type: ignore[unreachable]
+
# unwrap `Annotated[T, ...]` -> `T`
- if to and is_annotated_type(to):
- to = extract_type_arg(to, 0)
+ if cast_to and is_annotated_type(cast_to):
+ cast_to = extract_type_arg(cast_to, 0)
+
+ origin = get_origin(cast_to) or cast_to
if self._is_sse_stream:
if to:
@@ -164,18 +172,12 @@ def _parse(self, *, to: type[_T] | None = None) -> R | _T:
return cast(
R,
stream_cls(
- cast_to=self._cast_to,
+ cast_to=cast_to,
response=self.http_response,
client=cast(Any, self._client),
),
)
- cast_to = to if to is not None else self._cast_to
-
- # unwrap `Annotated[T, ...]` -> `T`
- if is_annotated_type(cast_to):
- cast_to = extract_type_arg(cast_to, 0)
-
if cast_to is NoneType:
return cast(R, None)
@@ -195,8 +197,6 @@ def _parse(self, *, to: type[_T] | None = None) -> R | _T:
if cast_to == bool:
return cast(R, response.text.lower() == "true")
- origin = get_origin(cast_to) or cast_to
-
if origin == APIResponse:
raise RuntimeError("Unexpected state - cast_to is `APIResponse`")
@@ -210,7 +210,13 @@ def _parse(self, *, to: type[_T] | None = None) -> R | _T:
raise ValueError(f"Subclasses of httpx.Response cannot be passed to `cast_to`")
return cast(R, response)
- if inspect.isclass(origin) and not issubclass(origin, BaseModel) and issubclass(origin, pydantic.BaseModel):
+ if (
+ inspect.isclass(
+ origin # pyright: ignore[reportUnknownArgumentType]
+ )
+ and not issubclass(origin, BaseModel)
+ and issubclass(origin, pydantic.BaseModel)
+ ):
raise TypeError(
"Pydantic models must subclass our base model type, e.g. `from browserbase import BaseModel`"
)
@@ -229,7 +235,7 @@ def _parse(self, *, to: type[_T] | None = None) -> R | _T:
# split is required to handle cases where additional information is included
# in the response, e.g. application/json; charset=utf-8
content_type, *_ = response.headers.get("content-type", "*").split(";")
- if content_type != "application/json":
+ if not content_type.endswith("json"):
if is_basemodel(cast_to):
try:
data = response.json()
diff --git a/src/browserbase/_types.py b/src/browserbase/_types.py
index 1691090d..f86be54d 100644
--- a/src/browserbase/_types.py
+++ b/src/browserbase/_types.py
@@ -13,10 +13,21 @@
Mapping,
TypeVar,
Callable,
+ Iterator,
Optional,
Sequence,
)
-from typing_extensions import Set, Literal, Protocol, TypeAlias, TypedDict, override, runtime_checkable
+from typing_extensions import (
+ Set,
+ Literal,
+ Protocol,
+ TypeAlias,
+ TypedDict,
+ SupportsIndex,
+ overload,
+ override,
+ runtime_checkable,
+)
import httpx
import pydantic
@@ -100,23 +111,27 @@ class RequestOptions(TypedDict, total=False):
params: Query
extra_json: AnyMapping
idempotency_key: str
+ follow_redirects: bool
# Sentinel class used until PEP 0661 is accepted
class NotGiven:
"""
- A sentinel singleton class used to distinguish omitted keyword arguments
- from those passed in with the value None (which may have different behavior).
+ For parameters with a meaningful None value, we need to distinguish between
+ the user explicitly passing None, and the user not passing the parameter at
+ all.
+
+ User code shouldn't need to use not_given directly.
For example:
```py
- def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ...
+ def create(timeout: Timeout | None | NotGiven = not_given): ...
- get(timeout=1) # 1s timeout
- get(timeout=None) # No timeout
- get() # Default timeout behavior, which may not be statically known at the method definition.
+ create(timeout=1) # 1s timeout
+ create(timeout=None) # No timeout
+ create() # Default timeout behavior
```
"""
@@ -128,13 +143,14 @@ def __repr__(self) -> str:
return "NOT_GIVEN"
-NotGivenOr = Union[_T, NotGiven]
+not_given = NotGiven()
+# for backwards compatibility:
NOT_GIVEN = NotGiven()
class Omit:
- """In certain situations you need to be able to represent a case where a default value has
- to be explicitly removed and `None` is not an appropriate substitute, for example:
+ """
+ To explicitly omit something from being sent in a request, use `omit`.
```py
# as the default `Content-Type` header is `application/json` that will be sent
@@ -144,8 +160,8 @@ class Omit:
# to look something like: 'multipart/form-data; boundary=0d8382fcf5f8c3be01ca2e11002d2983'
client.post(..., headers={"Content-Type": "multipart/form-data"})
- # instead you can remove the default `application/json` header by passing Omit
- client.post(..., headers={"Content-Type": Omit()})
+ # instead you can remove the default `application/json` header by passing omit
+ client.post(..., headers={"Content-Type": omit})
```
"""
@@ -153,6 +169,9 @@ def __bool__(self) -> Literal[False]:
return False
+omit = Omit()
+
+
@runtime_checkable
class ModelBuilderProtocol(Protocol):
@classmethod
@@ -192,10 +211,8 @@ def get(self, __key: str) -> str | None: ...
StrBytesIntFloat = Union[str, bytes, int, float]
# Note: copied from Pydantic
-# https://github.com/pydantic/pydantic/blob/32ea570bf96e84234d2992e1ddf40ab8a565925a/pydantic/main.py#L49
-IncEx: TypeAlias = Union[
- Set[int], Set[str], Mapping[int, Union["IncEx", Literal[True]]], Mapping[str, Union["IncEx", Literal[True]]]
-]
+# https://github.com/pydantic/pydantic/blob/6f31f8f68ef011f84357330186f603ff295312fd/pydantic/main.py#L79
+IncEx: TypeAlias = Union[Set[int], Set[str], Mapping[int, Union["IncEx", bool]], Mapping[str, Union["IncEx", bool]]]
PostParser = Callable[[Any], Any]
@@ -217,3 +234,27 @@ class _GenericAlias(Protocol):
class HttpxSendArgs(TypedDict, total=False):
auth: httpx.Auth
+ follow_redirects: bool
+
+
+_T_co = TypeVar("_T_co", covariant=True)
+
+
+if TYPE_CHECKING:
+ # This works because str.__contains__ does not accept object (either in typeshed or at runtime)
+ # https://github.com/hauntsaninja/useful_types/blob/5e9710f3875107d068e7679fd7fec9cfab0eff3b/useful_types/__init__.py#L285
+ class SequenceNotStr(Protocol[_T_co]):
+ @overload
+ def __getitem__(self, index: SupportsIndex, /) -> _T_co: ...
+ @overload
+ def __getitem__(self, index: slice, /) -> Sequence[_T_co]: ...
+ def __contains__(self, value: object, /) -> bool: ...
+ def __len__(self) -> int: ...
+ def __iter__(self) -> Iterator[_T_co]: ...
+ def index(self, value: Any, start: int = 0, stop: int = ..., /) -> int: ...
+ def count(self, value: Any, /) -> int: ...
+ def __reversed__(self) -> Iterator[_T_co]: ...
+else:
+ # just point this to a normal `Sequence` at runtime to avoid having to special case
+ # deserializing our custom sequence type
+ SequenceNotStr = Sequence
diff --git a/src/browserbase/_utils/__init__.py b/src/browserbase/_utils/__init__.py
index 3efe66c8..dc64e29a 100644
--- a/src/browserbase/_utils/__init__.py
+++ b/src/browserbase/_utils/__init__.py
@@ -6,10 +6,10 @@
is_list as is_list,
is_given as is_given,
is_tuple as is_tuple,
+ json_safe as json_safe,
lru_cache as lru_cache,
is_mapping as is_mapping,
is_tuple_t as is_tuple_t,
- parse_date as parse_date,
is_iterable as is_iterable,
is_sequence as is_sequence,
coerce_float as coerce_float,
@@ -22,7 +22,6 @@
coerce_boolean as coerce_boolean,
coerce_integer as coerce_integer,
file_from_path as file_from_path,
- parse_datetime as parse_datetime,
strip_not_given as strip_not_given,
deepcopy_minimal as deepcopy_minimal,
get_async_library as get_async_library,
@@ -31,13 +30,22 @@
maybe_coerce_boolean as maybe_coerce_boolean,
maybe_coerce_integer as maybe_coerce_integer,
)
+from ._compat import (
+ get_args as get_args,
+ is_union as is_union,
+ get_origin as get_origin,
+ is_typeddict as is_typeddict,
+ is_literal_type as is_literal_type,
+)
from ._typing import (
is_list_type as is_list_type,
is_union_type as is_union_type,
extract_type_arg as extract_type_arg,
is_iterable_type as is_iterable_type,
is_required_type as is_required_type,
+ is_sequence_type as is_sequence_type,
is_annotated_type as is_annotated_type,
+ is_type_alias_type as is_type_alias_type,
strip_annotated_type as strip_annotated_type,
extract_type_var_from_base as extract_type_var_from_base,
)
@@ -53,3 +61,4 @@
function_has_argument as function_has_argument,
assert_signatures_in_sync as assert_signatures_in_sync,
)
+from ._datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
diff --git a/src/browserbase/_utils/_compat.py b/src/browserbase/_utils/_compat.py
new file mode 100644
index 00000000..dd703233
--- /dev/null
+++ b/src/browserbase/_utils/_compat.py
@@ -0,0 +1,45 @@
+from __future__ import annotations
+
+import sys
+import typing_extensions
+from typing import Any, Type, Union, Literal, Optional
+from datetime import date, datetime
+from typing_extensions import get_args as _get_args, get_origin as _get_origin
+
+from .._types import StrBytesIntFloat
+from ._datetime_parse import parse_date as _parse_date, parse_datetime as _parse_datetime
+
+_LITERAL_TYPES = {Literal, typing_extensions.Literal}
+
+
+def get_args(tp: type[Any]) -> tuple[Any, ...]:
+ return _get_args(tp)
+
+
+def get_origin(tp: type[Any]) -> type[Any] | None:
+ return _get_origin(tp)
+
+
+def is_union(tp: Optional[Type[Any]]) -> bool:
+ if sys.version_info < (3, 10):
+ return tp is Union # type: ignore[comparison-overlap]
+ else:
+ import types
+
+ return tp is Union or tp is types.UnionType
+
+
+def is_typeddict(tp: Type[Any]) -> bool:
+ return typing_extensions.is_typeddict(tp)
+
+
+def is_literal_type(tp: Type[Any]) -> bool:
+ return get_origin(tp) in _LITERAL_TYPES
+
+
+def parse_date(value: Union[date, StrBytesIntFloat]) -> date:
+ return _parse_date(value)
+
+
+def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime:
+ return _parse_datetime(value)
diff --git a/src/browserbase/_utils/_datetime_parse.py b/src/browserbase/_utils/_datetime_parse.py
new file mode 100644
index 00000000..7cb9d9e6
--- /dev/null
+++ b/src/browserbase/_utils/_datetime_parse.py
@@ -0,0 +1,136 @@
+"""
+This file contains code from https://github.com/pydantic/pydantic/blob/main/pydantic/v1/datetime_parse.py
+without the Pydantic v1 specific errors.
+"""
+
+from __future__ import annotations
+
+import re
+from typing import Dict, Union, Optional
+from datetime import date, datetime, timezone, timedelta
+
+from .._types import StrBytesIntFloat
+
+date_expr = r"(?P\d{4})-(?P\d{1,2})-(?P\d{1,2})"
+time_expr = (
+ r"(?P\d{1,2}):(?P\d{1,2})"
+ r"(?::(?P\d{1,2})(?:\.(?P\d{1,6})\d{0,6})?)?"
+ r"(?PZ|[+-]\d{2}(?::?\d{2})?)?$"
+)
+
+date_re = re.compile(f"{date_expr}$")
+datetime_re = re.compile(f"{date_expr}[T ]{time_expr}")
+
+
+EPOCH = datetime(1970, 1, 1)
+# if greater than this, the number is in ms, if less than or equal it's in seconds
+# (in seconds this is 11th October 2603, in ms it's 20th August 1970)
+MS_WATERSHED = int(2e10)
+# slightly more than datetime.max in ns - (datetime.max - EPOCH).total_seconds() * 1e9
+MAX_NUMBER = int(3e20)
+
+
+def _get_numeric(value: StrBytesIntFloat, native_expected_type: str) -> Union[None, int, float]:
+ if isinstance(value, (int, float)):
+ return value
+ try:
+ return float(value)
+ except ValueError:
+ return None
+ except TypeError:
+ raise TypeError(f"invalid type; expected {native_expected_type}, string, bytes, int or float") from None
+
+
+def _from_unix_seconds(seconds: Union[int, float]) -> datetime:
+ if seconds > MAX_NUMBER:
+ return datetime.max
+ elif seconds < -MAX_NUMBER:
+ return datetime.min
+
+ while abs(seconds) > MS_WATERSHED:
+ seconds /= 1000
+ dt = EPOCH + timedelta(seconds=seconds)
+ return dt.replace(tzinfo=timezone.utc)
+
+
+def _parse_timezone(value: Optional[str]) -> Union[None, int, timezone]:
+ if value == "Z":
+ return timezone.utc
+ elif value is not None:
+ offset_mins = int(value[-2:]) if len(value) > 3 else 0
+ offset = 60 * int(value[1:3]) + offset_mins
+ if value[0] == "-":
+ offset = -offset
+ return timezone(timedelta(minutes=offset))
+ else:
+ return None
+
+
+def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime:
+ """
+ Parse a datetime/int/float/string and return a datetime.datetime.
+
+ This function supports time zone offsets. When the input contains one,
+ the output uses a timezone with a fixed offset from UTC.
+
+ Raise ValueError if the input is well formatted but not a valid datetime.
+ Raise ValueError if the input isn't well formatted.
+ """
+ if isinstance(value, datetime):
+ return value
+
+ number = _get_numeric(value, "datetime")
+ if number is not None:
+ return _from_unix_seconds(number)
+
+ if isinstance(value, bytes):
+ value = value.decode()
+
+ assert not isinstance(value, (float, int))
+
+ match = datetime_re.match(value)
+ if match is None:
+ raise ValueError("invalid datetime format")
+
+ kw = match.groupdict()
+ if kw["microsecond"]:
+ kw["microsecond"] = kw["microsecond"].ljust(6, "0")
+
+ tzinfo = _parse_timezone(kw.pop("tzinfo"))
+ kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None}
+ kw_["tzinfo"] = tzinfo
+
+ return datetime(**kw_) # type: ignore
+
+
+def parse_date(value: Union[date, StrBytesIntFloat]) -> date:
+ """
+ Parse a date/int/float/string and return a datetime.date.
+
+ Raise ValueError if the input is well formatted but not a valid date.
+ Raise ValueError if the input isn't well formatted.
+ """
+ if isinstance(value, date):
+ if isinstance(value, datetime):
+ return value.date()
+ else:
+ return value
+
+ number = _get_numeric(value, "date")
+ if number is not None:
+ return _from_unix_seconds(number).date()
+
+ if isinstance(value, bytes):
+ value = value.decode()
+
+ assert not isinstance(value, (float, int))
+ match = date_re.match(value)
+ if match is None:
+ raise ValueError("invalid date format")
+
+ kw = {k: int(v) for k, v in match.groupdict().items()}
+
+ try:
+ return date(**kw)
+ except ValueError:
+ raise ValueError("invalid date format") from None
diff --git a/src/browserbase/_utils/_proxy.py b/src/browserbase/_utils/_proxy.py
index ffd883e9..0f239a33 100644
--- a/src/browserbase/_utils/_proxy.py
+++ b/src/browserbase/_utils/_proxy.py
@@ -46,7 +46,10 @@ def __dir__(self) -> Iterable[str]:
@property # type: ignore
@override
def __class__(self) -> type: # pyright: ignore
- proxied = self.__get_proxied__()
+ try:
+ proxied = self.__get_proxied__()
+ except Exception:
+ return type(self)
if issubclass(type(proxied), LazyProxy):
return type(proxied)
return proxied.__class__
diff --git a/src/browserbase/_utils/_resources_proxy.py b/src/browserbase/_utils/_resources_proxy.py
new file mode 100644
index 00000000..3901271d
--- /dev/null
+++ b/src/browserbase/_utils/_resources_proxy.py
@@ -0,0 +1,24 @@
+from __future__ import annotations
+
+from typing import Any
+from typing_extensions import override
+
+from ._proxy import LazyProxy
+
+
+class ResourcesProxy(LazyProxy[Any]):
+ """A proxy for the `browserbase.resources` module.
+
+ This is used so that we can lazily import `browserbase.resources` only when
+ needed *and* so that users can just import `browserbase` and reference `browserbase.resources`
+ """
+
+ @override
+ def __load__(self) -> Any:
+ import importlib
+
+ mod = importlib.import_module("browserbase.resources")
+ return mod
+
+
+resources = ResourcesProxy().__as_proxied__()
diff --git a/src/browserbase/_utils/_sync.py b/src/browserbase/_utils/_sync.py
index d0d81033..ad7ec71b 100644
--- a/src/browserbase/_utils/_sync.py
+++ b/src/browserbase/_utils/_sync.py
@@ -1,56 +1,77 @@
from __future__ import annotations
+import sys
+import asyncio
import functools
-from typing import TypeVar, Callable, Awaitable
+import contextvars
+from typing import Any, TypeVar, Callable, Awaitable
from typing_extensions import ParamSpec
import anyio
+import sniffio
import anyio.to_thread
-from ._reflection import function_has_argument
-
T_Retval = TypeVar("T_Retval")
T_ParamSpec = ParamSpec("T_ParamSpec")
-# copied from `asyncer`, https://github.com/tiangolo/asyncer
-def asyncify(
- function: Callable[T_ParamSpec, T_Retval],
- *,
- cancellable: bool = False,
- limiter: anyio.CapacityLimiter | None = None,
-) -> Callable[T_ParamSpec, Awaitable[T_Retval]]:
+if sys.version_info >= (3, 9):
+ _asyncio_to_thread = asyncio.to_thread
+else:
+ # backport of https://docs.python.org/3/library/asyncio-task.html#asyncio.to_thread
+ # for Python 3.8 support
+ async def _asyncio_to_thread(
+ func: Callable[T_ParamSpec, T_Retval], /, *args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs
+ ) -> Any:
+ """Asynchronously run function *func* in a separate thread.
+
+ Any *args and **kwargs supplied for this function are directly passed
+ to *func*. Also, the current :class:`contextvars.Context` is propagated,
+ allowing context variables from the main thread to be accessed in the
+ separate thread.
+
+ Returns a coroutine that can be awaited to get the eventual result of *func*.
+ """
+ loop = asyncio.events.get_running_loop()
+ ctx = contextvars.copy_context()
+ func_call = functools.partial(ctx.run, func, *args, **kwargs)
+ return await loop.run_in_executor(None, func_call)
+
+
+async def to_thread(
+ func: Callable[T_ParamSpec, T_Retval], /, *args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs
+) -> T_Retval:
+ if sniffio.current_async_library() == "asyncio":
+ return await _asyncio_to_thread(func, *args, **kwargs)
+
+ return await anyio.to_thread.run_sync(
+ functools.partial(func, *args, **kwargs),
+ )
+
+
+# inspired by `asyncer`, https://github.com/tiangolo/asyncer
+def asyncify(function: Callable[T_ParamSpec, T_Retval]) -> Callable[T_ParamSpec, Awaitable[T_Retval]]:
"""
Take a blocking function and create an async one that receives the same
- positional and keyword arguments, and that when called, calls the original function
- in a worker thread using `anyio.to_thread.run_sync()`. Internally,
- `asyncer.asyncify()` uses the same `anyio.to_thread.run_sync()`, but it supports
- keyword arguments additional to positional arguments and it adds better support for
- autocompletion and inline errors for the arguments of the function called and the
- return value.
-
- If the `cancellable` option is enabled and the task waiting for its completion is
- cancelled, the thread will still run its course but its return value (or any raised
- exception) will be ignored.
+ positional and keyword arguments. For python version 3.9 and above, it uses
+ asyncio.to_thread to run the function in a separate thread. For python version
+ 3.8, it uses locally defined copy of the asyncio.to_thread function which was
+ introduced in python 3.9.
- Use it like this:
+ Usage:
- ```Python
- def do_work(arg1, arg2, kwarg1="", kwarg2="") -> str:
- # Do work
- return "Some result"
+ ```python
+ def blocking_func(arg1, arg2, kwarg1=None):
+ # blocking code
+ return result
- result = await to_thread.asyncify(do_work)("spam", "ham", kwarg1="a", kwarg2="b")
- print(result)
+ result = asyncify(blocking_function)(arg1, arg2, kwarg1=value1)
```
## Arguments
`function`: a blocking regular callable (e.g. a function)
- `cancellable`: `True` to allow cancellation of the operation
- `limiter`: capacity limiter to use to limit the total amount of threads running
- (if omitted, the default limiter is used)
## Return
@@ -60,22 +81,6 @@ def do_work(arg1, arg2, kwarg1="", kwarg2="") -> str:
"""
async def wrapper(*args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs) -> T_Retval:
- partial_f = functools.partial(function, *args, **kwargs)
-
- # In `v4.1.0` anyio added the `abandon_on_cancel` argument and deprecated the old
- # `cancellable` argument, so we need to use the new `abandon_on_cancel` to avoid
- # surfacing deprecation warnings.
- if function_has_argument(anyio.to_thread.run_sync, "abandon_on_cancel"):
- return await anyio.to_thread.run_sync(
- partial_f,
- abandon_on_cancel=cancellable,
- limiter=limiter,
- )
-
- return await anyio.to_thread.run_sync(
- partial_f,
- cancellable=cancellable,
- limiter=limiter,
- )
+ return await to_thread(function, *args, **kwargs)
return wrapper
diff --git a/src/browserbase/_utils/_transform.py b/src/browserbase/_utils/_transform.py
index 47e262a5..52075492 100644
--- a/src/browserbase/_utils/_transform.py
+++ b/src/browserbase/_utils/_transform.py
@@ -5,27 +5,31 @@
import pathlib
from typing import Any, Mapping, TypeVar, cast
from datetime import date, datetime
-from typing_extensions import Literal, get_args, override, get_type_hints
+from typing_extensions import Literal, get_args, override, get_type_hints as _get_type_hints
import anyio
import pydantic
from ._utils import (
is_list,
+ is_given,
+ lru_cache,
is_mapping,
is_iterable,
+ is_sequence,
)
from .._files import is_base64_file_input
+from ._compat import get_origin, is_typeddict
from ._typing import (
is_list_type,
is_union_type,
extract_type_arg,
is_iterable_type,
is_required_type,
+ is_sequence_type,
is_annotated_type,
strip_annotated_type,
)
-from .._compat import model_dump, is_typeddict
_T = TypeVar("_T")
@@ -108,6 +112,7 @@ class Params(TypedDict, total=False):
return cast(_T, transformed)
+@lru_cache(maxsize=8096)
def _get_annotated_type(type_: type) -> type | None:
"""If the given type is an `Annotated` type then it is returned, if not `None` is returned.
@@ -126,7 +131,7 @@ def _get_annotated_type(type_: type) -> type | None:
def _maybe_transform_key(key: str, type_: type) -> str:
"""Transform the given `data` based on the annotations provided in `type_`.
- Note: this function only looks at `Annotated` types that contain `PropertInfo` metadata.
+ Note: this function only looks at `Annotated` types that contain `PropertyInfo` metadata.
"""
annotated_type = _get_annotated_type(type_)
if annotated_type is None:
@@ -142,6 +147,10 @@ def _maybe_transform_key(key: str, type_: type) -> str:
return key
+def _no_transform_needed(annotation: type) -> bool:
+ return annotation == float or annotation == int
+
+
def _transform_recursive(
data: object,
*,
@@ -160,20 +169,43 @@ def _transform_recursive(
Defaults to the same value as the `annotation` argument.
"""
+ from .._compat import model_dump
+
if inner_type is None:
inner_type = annotation
stripped_type = strip_annotated_type(inner_type)
+ origin = get_origin(stripped_type) or stripped_type
if is_typeddict(stripped_type) and is_mapping(data):
return _transform_typeddict(data, stripped_type)
+ if origin == dict and is_mapping(data):
+ items_type = get_args(stripped_type)[1]
+ return {key: _transform_recursive(value, annotation=items_type) for key, value in data.items()}
+
if (
# List[T]
(is_list_type(stripped_type) and is_list(data))
# Iterable[T]
or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str))
+ # Sequence[T]
+ or (is_sequence_type(stripped_type) and is_sequence(data) and not isinstance(data, str))
):
+ # dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually
+ # intended as an iterable, so we don't transform it.
+ if isinstance(data, dict):
+ return cast(object, data)
+
inner_type = extract_type_arg(stripped_type, 0)
+ if _no_transform_needed(inner_type):
+ # for some types there is no need to transform anything, so we can get a small
+ # perf boost from skipping that work.
+ #
+ # but we still need to convert to a list to ensure the data is json-serializable
+ if is_list(data):
+ return data
+ return list(data)
+
return [_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data]
if is_union_type(stripped_type):
@@ -186,7 +218,7 @@ def _transform_recursive(
return data
if isinstance(data, pydantic.BaseModel):
- return model_dump(data, exclude_unset=True)
+ return model_dump(data, exclude_unset=True, mode="json")
annotated_type = _get_annotated_type(annotation)
if annotated_type is None:
@@ -235,6 +267,11 @@ def _transform_typeddict(
result: dict[str, object] = {}
annotations = get_type_hints(expected_type, include_extras=True)
for key, value in data.items():
+ if not is_given(value):
+ # we don't need to include omitted values here as they'll
+ # be stripped out before the request is sent anyway
+ continue
+
type_ = annotations.get(key)
if type_ is None:
# we do not have a type annotation for this field, leave it as is
@@ -298,20 +335,43 @@ async def _async_transform_recursive(
Defaults to the same value as the `annotation` argument.
"""
+ from .._compat import model_dump
+
if inner_type is None:
inner_type = annotation
stripped_type = strip_annotated_type(inner_type)
+ origin = get_origin(stripped_type) or stripped_type
if is_typeddict(stripped_type) and is_mapping(data):
return await _async_transform_typeddict(data, stripped_type)
+ if origin == dict and is_mapping(data):
+ items_type = get_args(stripped_type)[1]
+ return {key: _transform_recursive(value, annotation=items_type) for key, value in data.items()}
+
if (
# List[T]
(is_list_type(stripped_type) and is_list(data))
# Iterable[T]
or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str))
+ # Sequence[T]
+ or (is_sequence_type(stripped_type) and is_sequence(data) and not isinstance(data, str))
):
+ # dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually
+ # intended as an iterable, so we don't transform it.
+ if isinstance(data, dict):
+ return cast(object, data)
+
inner_type = extract_type_arg(stripped_type, 0)
+ if _no_transform_needed(inner_type):
+ # for some types there is no need to transform anything, so we can get a small
+ # perf boost from skipping that work.
+ #
+ # but we still need to convert to a list to ensure the data is json-serializable
+ if is_list(data):
+ return data
+ return list(data)
+
return [await _async_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data]
if is_union_type(stripped_type):
@@ -324,7 +384,7 @@ async def _async_transform_recursive(
return data
if isinstance(data, pydantic.BaseModel):
- return model_dump(data, exclude_unset=True)
+ return model_dump(data, exclude_unset=True, mode="json")
annotated_type = _get_annotated_type(annotation)
if annotated_type is None:
@@ -373,6 +433,11 @@ async def _async_transform_typeddict(
result: dict[str, object] = {}
annotations = get_type_hints(expected_type, include_extras=True)
for key, value in data.items():
+ if not is_given(value):
+ # we don't need to include omitted values here as they'll
+ # be stripped out before the request is sent anyway
+ continue
+
type_ = annotations.get(key)
if type_ is None:
# we do not have a type annotation for this field, leave it as is
@@ -380,3 +445,13 @@ async def _async_transform_typeddict(
else:
result[_maybe_transform_key(key, type_)] = await _async_transform_recursive(value, annotation=type_)
return result
+
+
+@lru_cache(maxsize=8096)
+def get_type_hints(
+ obj: Any,
+ globalns: dict[str, Any] | None = None,
+ localns: Mapping[str, Any] | None = None,
+ include_extras: bool = False,
+) -> dict[str, Any]:
+ return _get_type_hints(obj, globalns=globalns, localns=localns, include_extras=include_extras)
diff --git a/src/browserbase/_utils/_typing.py b/src/browserbase/_utils/_typing.py
index c036991f..193109f3 100644
--- a/src/browserbase/_utils/_typing.py
+++ b/src/browserbase/_utils/_typing.py
@@ -1,11 +1,21 @@
from __future__ import annotations
+import sys
+import typing
+import typing_extensions
from typing import Any, TypeVar, Iterable, cast
from collections import abc as _c_abc
-from typing_extensions import Required, Annotated, get_args, get_origin
-
+from typing_extensions import (
+ TypeIs,
+ Required,
+ Annotated,
+ get_args,
+ get_origin,
+)
+
+from ._utils import lru_cache
from .._types import InheritsGeneric
-from .._compat import is_union as _is_union
+from ._compat import is_union as _is_union
def is_annotated_type(typ: type) -> bool:
@@ -16,6 +26,11 @@ def is_list_type(typ: type) -> bool:
return (get_origin(typ) or typ) == list
+def is_sequence_type(typ: type) -> bool:
+ origin = get_origin(typ) or typ
+ return origin == typing_extensions.Sequence or origin == typing.Sequence or origin == _c_abc.Sequence
+
+
def is_iterable_type(typ: type) -> bool:
"""If the given type is `typing.Iterable[T]`"""
origin = get_origin(typ) or typ
@@ -36,7 +51,28 @@ def is_typevar(typ: type) -> bool:
return type(typ) == TypeVar # type: ignore
+_TYPE_ALIAS_TYPES: tuple[type[typing_extensions.TypeAliasType], ...] = (typing_extensions.TypeAliasType,)
+if sys.version_info >= (3, 12):
+ _TYPE_ALIAS_TYPES = (*_TYPE_ALIAS_TYPES, typing.TypeAliasType)
+
+
+def is_type_alias_type(tp: Any, /) -> TypeIs[typing_extensions.TypeAliasType]:
+ """Return whether the provided argument is an instance of `TypeAliasType`.
+
+ ```python
+ type Int = int
+ is_type_alias_type(Int)
+ # > True
+ Str = TypeAliasType("Str", str)
+ is_type_alias_type(Str)
+ # > True
+ ```
+ """
+ return isinstance(tp, _TYPE_ALIAS_TYPES)
+
+
# Extracts T from Annotated[T, ...] or from Required[Annotated[T, ...]]
+@lru_cache(maxsize=8096)
def strip_annotated_type(typ: type) -> type:
if is_required_type(typ) or is_annotated_type(typ):
return strip_annotated_type(cast(type, get_args(typ)[0]))
@@ -79,7 +115,7 @@ class MyResponse(Foo[_T]):
```
"""
cls = cast(object, get_origin(typ) or typ)
- if cls in generic_bases:
+ if cls in generic_bases: # pyright: ignore[reportUnnecessaryContains]
# we're given the class directly
return extract_type_arg(typ, index)
diff --git a/src/browserbase/_utils/_utils.py b/src/browserbase/_utils/_utils.py
index 0bba17ca..50d59269 100644
--- a/src/browserbase/_utils/_utils.py
+++ b/src/browserbase/_utils/_utils.py
@@ -16,12 +16,12 @@
overload,
)
from pathlib import Path
+from datetime import date, datetime
from typing_extensions import TypeGuard
import sniffio
-from .._types import NotGiven, FileTypes, NotGivenOr, HeadersLike
-from .._compat import parse_date as parse_date, parse_datetime as parse_datetime
+from .._types import Omit, NotGiven, FileTypes, HeadersLike
_T = TypeVar("_T")
_TupleT = TypeVar("_TupleT", bound=Tuple[object, ...])
@@ -63,7 +63,7 @@ def _extract_items(
try:
key = path[index]
except IndexError:
- if isinstance(obj, NotGiven):
+ if not is_given(obj):
# no value was provided - we can safely ignore
return []
@@ -71,8 +71,16 @@ def _extract_items(
from .._files import assert_is_file_content
# We have exhausted the path, return the entry we found.
- assert_is_file_content(obj, key=flattened_key)
assert flattened_key is not None
+
+ if is_list(obj):
+ files: list[tuple[str, FileTypes]] = []
+ for entry in obj:
+ assert_is_file_content(entry, key=flattened_key + "[]" if flattened_key else "")
+ files.append((flattened_key + "[]", cast(FileTypes, entry)))
+ return files
+
+ assert_is_file_content(obj, key=flattened_key)
return [(flattened_key, cast(FileTypes, obj))]
index += 1
@@ -118,8 +126,8 @@ def _extract_items(
return []
-def is_given(obj: NotGivenOr[_T]) -> TypeGuard[_T]:
- return not isinstance(obj, NotGiven)
+def is_given(obj: _T | NotGiven | Omit) -> TypeGuard[_T]:
+ return not isinstance(obj, NotGiven) and not isinstance(obj, Omit)
# Type safe methods for narrowing types with TypeVars.
@@ -395,3 +403,19 @@ def lru_cache(*, maxsize: int | None = 128) -> Callable[[CallableT], CallableT]:
maxsize=maxsize,
)
return cast(Any, wrapper) # type: ignore[no-any-return]
+
+
+def json_safe(data: object) -> object:
+ """Translates a mapping / sequence recursively in the same fashion
+ as `pydantic` v2's `model_dump(mode="json")`.
+ """
+ if is_mapping(data):
+ return {json_safe(key): json_safe(value) for key, value in data.items()}
+
+ if is_iterable(data) and not isinstance(data, (str, bytes, bytearray)):
+ return [json_safe(item) for item in data]
+
+ if isinstance(data, (datetime, date)):
+ return data.isoformat()
+
+ return data
diff --git a/src/browserbase/_version.py b/src/browserbase/_version.py
index 1f27c648..6fa8f70b 100644
--- a/src/browserbase/_version.py
+++ b/src/browserbase/_version.py
@@ -1,4 +1,4 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
__title__ = "browserbase"
-__version__ = "1.0.0" # x-release-please-version
+__version__ = "1.5.0-alpha.1" # x-release-please-version
diff --git a/src/browserbase/resources/contexts.py b/src/browserbase/resources/contexts.py
index 806cb012..d2bb4167 100644
--- a/src/browserbase/resources/contexts.py
+++ b/src/browserbase/resources/contexts.py
@@ -5,11 +5,8 @@
import httpx
from ..types import context_create_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
-from .._utils import (
- maybe_transform,
- async_maybe_transform,
-)
+from .._types import Body, Query, Headers, NotGiven, not_given
+from .._utils import maybe_transform, async_maybe_transform
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
from .._response import (
@@ -19,9 +16,9 @@
async_to_streamed_response_wrapper,
)
from .._base_client import make_request_options
-from ..types.context import Context
from ..types.context_create_response import ContextCreateResponse
from ..types.context_update_response import ContextUpdateResponse
+from ..types.context_retrieve_response import ContextRetrieveResponse
__all__ = ["ContextsResource", "AsyncContextsResource"]
@@ -30,7 +27,7 @@ class ContextsResource(SyncAPIResource):
@cached_property
def with_raw_response(self) -> ContextsResourceWithRawResponse:
"""
- This property can be used as a prefix for any HTTP method call to return the
+ This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/browserbase/sdk-python#accessing-raw-response-data-eg-headers
@@ -55,7 +52,7 @@ def create(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ContextCreateResponse:
"""Create a Context
@@ -91,10 +88,10 @@ def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Context:
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> ContextRetrieveResponse:
"""
- Context
+ Get a Context
Args:
extra_headers: Send extra headers
@@ -112,7 +109,7 @@ def retrieve(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=Context,
+ cast_to=ContextRetrieveResponse,
)
def update(
@@ -124,10 +121,10 @@ def update(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ContextUpdateResponse:
"""
- Update Context
+ Update a Context
Args:
extra_headers: Send extra headers
@@ -153,7 +150,7 @@ class AsyncContextsResource(AsyncAPIResource):
@cached_property
def with_raw_response(self) -> AsyncContextsResourceWithRawResponse:
"""
- This property can be used as a prefix for any HTTP method call to return the
+ This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/browserbase/sdk-python#accessing-raw-response-data-eg-headers
@@ -178,7 +175,7 @@ async def create(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ContextCreateResponse:
"""Create a Context
@@ -214,10 +211,10 @@ async def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Context:
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> ContextRetrieveResponse:
"""
- Context
+ Get a Context
Args:
extra_headers: Send extra headers
@@ -235,7 +232,7 @@ async def retrieve(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=Context,
+ cast_to=ContextRetrieveResponse,
)
async def update(
@@ -247,10 +244,10 @@ async def update(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ContextUpdateResponse:
"""
- Update Context
+ Update a Context
Args:
extra_headers: Send extra headers
diff --git a/src/browserbase/resources/extensions.py b/src/browserbase/resources/extensions.py
index dc6c0ac7..21d06e70 100644
--- a/src/browserbase/resources/extensions.py
+++ b/src/browserbase/resources/extensions.py
@@ -7,13 +7,8 @@
import httpx
from ..types import extension_create_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven, FileTypes
-from .._utils import (
- extract_files,
- maybe_transform,
- deepcopy_minimal,
- async_maybe_transform,
-)
+from .._types import Body, Query, Headers, NoneType, NotGiven, FileTypes, not_given
+from .._utils import extract_files, maybe_transform, deepcopy_minimal, async_maybe_transform
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
from .._response import (
@@ -23,7 +18,8 @@
async_to_streamed_response_wrapper,
)
from .._base_client import make_request_options
-from ..types.extension import Extension
+from ..types.extension_create_response import ExtensionCreateResponse
+from ..types.extension_retrieve_response import ExtensionRetrieveResponse
__all__ = ["ExtensionsResource", "AsyncExtensionsResource"]
@@ -32,7 +28,7 @@ class ExtensionsResource(SyncAPIResource):
@cached_property
def with_raw_response(self) -> ExtensionsResourceWithRawResponse:
"""
- This property can be used as a prefix for any HTTP method call to return the
+ This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/browserbase/sdk-python#accessing-raw-response-data-eg-headers
@@ -57,8 +53,8 @@ def create(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Extension:
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> ExtensionCreateResponse:
"""
Upload an Extension
@@ -84,7 +80,7 @@ def create(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=Extension,
+ cast_to=ExtensionCreateResponse,
)
def retrieve(
@@ -96,10 +92,10 @@ def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Extension:
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> ExtensionRetrieveResponse:
"""
- Extension
+ Get an Extension
Args:
extra_headers: Send extra headers
@@ -117,7 +113,7 @@ def retrieve(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=Extension,
+ cast_to=ExtensionRetrieveResponse,
)
def delete(
@@ -129,10 +125,10 @@ def delete(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
- Delete Extension
+ Delete an Extension
Args:
extra_headers: Send extra headers
@@ -159,7 +155,7 @@ class AsyncExtensionsResource(AsyncAPIResource):
@cached_property
def with_raw_response(self) -> AsyncExtensionsResourceWithRawResponse:
"""
- This property can be used as a prefix for any HTTP method call to return the
+ This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/browserbase/sdk-python#accessing-raw-response-data-eg-headers
@@ -184,8 +180,8 @@ async def create(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Extension:
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> ExtensionCreateResponse:
"""
Upload an Extension
@@ -211,7 +207,7 @@ async def create(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=Extension,
+ cast_to=ExtensionCreateResponse,
)
async def retrieve(
@@ -223,10 +219,10 @@ async def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Extension:
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> ExtensionRetrieveResponse:
"""
- Extension
+ Get an Extension
Args:
extra_headers: Send extra headers
@@ -244,7 +240,7 @@ async def retrieve(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=Extension,
+ cast_to=ExtensionRetrieveResponse,
)
async def delete(
@@ -256,10 +252,10 @@ async def delete(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
- Delete Extension
+ Delete an Extension
Args:
extra_headers: Send extra headers
diff --git a/src/browserbase/resources/projects.py b/src/browserbase/resources/projects.py
index f8b1936a..62c28afa 100644
--- a/src/browserbase/resources/projects.py
+++ b/src/browserbase/resources/projects.py
@@ -4,7 +4,7 @@
import httpx
-from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from .._types import Body, Query, Headers, NotGiven, not_given
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
from .._response import (
@@ -14,9 +14,9 @@
async_to_streamed_response_wrapper,
)
from .._base_client import make_request_options
-from ..types.project import Project
-from ..types.project_usage import ProjectUsage
from ..types.project_list_response import ProjectListResponse
+from ..types.project_usage_response import ProjectUsageResponse
+from ..types.project_retrieve_response import ProjectRetrieveResponse
__all__ = ["ProjectsResource", "AsyncProjectsResource"]
@@ -25,7 +25,7 @@ class ProjectsResource(SyncAPIResource):
@cached_property
def with_raw_response(self) -> ProjectsResourceWithRawResponse:
"""
- This property can be used as a prefix for any HTTP method call to return the
+ This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/browserbase/sdk-python#accessing-raw-response-data-eg-headers
@@ -50,10 +50,10 @@ def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Project:
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> ProjectRetrieveResponse:
"""
- Project
+ Get a Project
Args:
extra_headers: Send extra headers
@@ -71,7 +71,7 @@ def retrieve(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=Project,
+ cast_to=ProjectRetrieveResponse,
)
def list(
@@ -82,9 +82,9 @@ def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ProjectListResponse:
- """List all projects"""
+ """List Projects"""
return self._get(
"/v1/projects",
options=make_request_options(
@@ -102,10 +102,10 @@ def usage(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> ProjectUsage:
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> ProjectUsageResponse:
"""
- Project Usage
+ Get Project Usage
Args:
extra_headers: Send extra headers
@@ -123,7 +123,7 @@ def usage(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=ProjectUsage,
+ cast_to=ProjectUsageResponse,
)
@@ -131,7 +131,7 @@ class AsyncProjectsResource(AsyncAPIResource):
@cached_property
def with_raw_response(self) -> AsyncProjectsResourceWithRawResponse:
"""
- This property can be used as a prefix for any HTTP method call to return the
+ This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/browserbase/sdk-python#accessing-raw-response-data-eg-headers
@@ -156,10 +156,10 @@ async def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Project:
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> ProjectRetrieveResponse:
"""
- Project
+ Get a Project
Args:
extra_headers: Send extra headers
@@ -177,7 +177,7 @@ async def retrieve(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=Project,
+ cast_to=ProjectRetrieveResponse,
)
async def list(
@@ -188,9 +188,9 @@ async def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ProjectListResponse:
- """List all projects"""
+ """List Projects"""
return await self._get(
"/v1/projects",
options=make_request_options(
@@ -208,10 +208,10 @@ async def usage(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> ProjectUsage:
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> ProjectUsageResponse:
"""
- Project Usage
+ Get Project Usage
Args:
extra_headers: Send extra headers
@@ -229,7 +229,7 @@ async def usage(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=ProjectUsage,
+ cast_to=ProjectUsageResponse,
)
diff --git a/src/browserbase/resources/sessions/downloads.py b/src/browserbase/resources/sessions/downloads.py
index 461163b0..6195c30b 100644
--- a/src/browserbase/resources/sessions/downloads.py
+++ b/src/browserbase/resources/sessions/downloads.py
@@ -4,7 +4,7 @@
import httpx
-from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._types import Body, Query, Headers, NotGiven, not_given
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
from ..._response import (
@@ -26,7 +26,7 @@ class DownloadsResource(SyncAPIResource):
@cached_property
def with_raw_response(self) -> DownloadsResourceWithRawResponse:
"""
- This property can be used as a prefix for any HTTP method call to return the
+ This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/browserbase/sdk-python#accessing-raw-response-data-eg-headers
@@ -51,7 +51,7 @@ def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> BinaryAPIResponse:
"""
Session Downloads
@@ -81,7 +81,7 @@ class AsyncDownloadsResource(AsyncAPIResource):
@cached_property
def with_raw_response(self) -> AsyncDownloadsResourceWithRawResponse:
"""
- This property can be used as a prefix for any HTTP method call to return the
+ This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/browserbase/sdk-python#accessing-raw-response-data-eg-headers
@@ -106,7 +106,7 @@ async def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> AsyncBinaryAPIResponse:
"""
Session Downloads
diff --git a/src/browserbase/resources/sessions/logs.py b/src/browserbase/resources/sessions/logs.py
index 07fb5818..b1c90f52 100644
--- a/src/browserbase/resources/sessions/logs.py
+++ b/src/browserbase/resources/sessions/logs.py
@@ -4,7 +4,7 @@
import httpx
-from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._types import Body, Query, Headers, NotGiven, not_given
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
from ..._response import (
@@ -23,7 +23,7 @@ class LogsResource(SyncAPIResource):
@cached_property
def with_raw_response(self) -> LogsResourceWithRawResponse:
"""
- This property can be used as a prefix for any HTTP method call to return the
+ This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/browserbase/sdk-python#accessing-raw-response-data-eg-headers
@@ -48,7 +48,7 @@ def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> LogListResponse:
"""
Session Logs
@@ -77,7 +77,7 @@ class AsyncLogsResource(AsyncAPIResource):
@cached_property
def with_raw_response(self) -> AsyncLogsResourceWithRawResponse:
"""
- This property can be used as a prefix for any HTTP method call to return the
+ This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/browserbase/sdk-python#accessing-raw-response-data-eg-headers
@@ -102,7 +102,7 @@ async def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> LogListResponse:
"""
Session Logs
diff --git a/src/browserbase/resources/sessions/recording.py b/src/browserbase/resources/sessions/recording.py
index b216fd9b..789087a8 100644
--- a/src/browserbase/resources/sessions/recording.py
+++ b/src/browserbase/resources/sessions/recording.py
@@ -4,7 +4,7 @@
import httpx
-from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._types import Body, Query, Headers, NotGiven, not_given
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
from ..._response import (
@@ -23,7 +23,7 @@ class RecordingResource(SyncAPIResource):
@cached_property
def with_raw_response(self) -> RecordingResourceWithRawResponse:
"""
- This property can be used as a prefix for any HTTP method call to return the
+ This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/browserbase/sdk-python#accessing-raw-response-data-eg-headers
@@ -48,7 +48,7 @@ def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> RecordingRetrieveResponse:
"""
Session Recording
@@ -77,7 +77,7 @@ class AsyncRecordingResource(AsyncAPIResource):
@cached_property
def with_raw_response(self) -> AsyncRecordingResourceWithRawResponse:
"""
- This property can be used as a prefix for any HTTP method call to return the
+ This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/browserbase/sdk-python#accessing-raw-response-data-eg-headers
@@ -102,7 +102,7 @@ async def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> RecordingRetrieveResponse:
"""
Session Recording
diff --git a/src/browserbase/resources/sessions/sessions.py b/src/browserbase/resources/sessions/sessions.py
index fc4cac3c..5e58bbbb 100644
--- a/src/browserbase/resources/sessions/sessions.py
+++ b/src/browserbase/resources/sessions/sessions.py
@@ -2,7 +2,7 @@
from __future__ import annotations
-from typing import Union, Iterable
+from typing import Dict, Union, Iterable
from typing_extensions import Literal
import httpx
@@ -24,11 +24,8 @@
UploadsResourceWithStreamingResponse,
AsyncUploadsResourceWithStreamingResponse,
)
-from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
-from ..._utils import (
- maybe_transform,
- async_maybe_transform,
-)
+from ..._types import Body, Omit, Query, Headers, NotGiven, omit, not_given
+from ..._utils import maybe_transform, async_maybe_transform
from ..._compat import cached_property
from .downloads import (
DownloadsResource,
@@ -54,10 +51,11 @@
async_to_streamed_response_wrapper,
)
from ..._base_client import make_request_options
-from ...types.session import Session
-from ...types.session_live_urls import SessionLiveURLs
from ...types.session_list_response import SessionListResponse
+from ...types.session_debug_response import SessionDebugResponse
from ...types.session_create_response import SessionCreateResponse
+from ...types.session_update_response import SessionUpdateResponse
+from ...types.session_retrieve_response import SessionRetrieveResponse
__all__ = ["SessionsResource", "AsyncSessionsResource"]
@@ -82,7 +80,7 @@ def uploads(self) -> UploadsResource:
@cached_property
def with_raw_response(self) -> SessionsResourceWithRawResponse:
"""
- This property can be used as a prefix for any HTTP method call to return the
+ This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/browserbase/sdk-python#accessing-raw-response-data-eg-headers
@@ -102,18 +100,20 @@ def create(
self,
*,
project_id: str,
- browser_settings: session_create_params.BrowserSettings | NotGiven = NOT_GIVEN,
- extension_id: str | NotGiven = NOT_GIVEN,
- keep_alive: bool | NotGiven = NOT_GIVEN,
- proxies: Union[bool, Iterable[session_create_params.ProxiesUnionMember1]] | NotGiven = NOT_GIVEN,
- region: Literal["us-west-2", "us-east-1", "eu-central-1", "ap-southeast-1"] | NotGiven = NOT_GIVEN,
- api_timeout: int | NotGiven = NOT_GIVEN,
+ browser_settings: session_create_params.BrowserSettings | Omit = omit,
+ extension_id: str | Omit = omit,
+ keep_alive: bool | Omit = omit,
+ proxies: Union[Iterable[session_create_params.ProxiesUnionMember0], bool] | Omit = omit,
+ proxy_settings: session_create_params.ProxySettings | Omit = omit,
+ region: Literal["us-west-2", "us-east-1", "eu-central-1", "ap-southeast-1"] | Omit = omit,
+ api_timeout: int | Omit = omit,
+ user_metadata: Dict[str, object] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> SessionCreateResponse:
"""Create a Session
@@ -126,17 +126,22 @@ def create(
extension_id: The uploaded Extension ID. See
[Upload Extension](/reference/api/upload-an-extension).
- keep_alive: Set to true to keep the session alive even after disconnections. This is
- available on the Startup plan only.
+ keep_alive: Set to true to keep the session alive even after disconnections. Available on
+ the Hobby Plan and above.
proxies: Proxy configuration. Can be true for default proxy, or an array of proxy
configurations.
+ proxy_settings: [NOT IN DOCS] Supplementary proxy settings. Optional.
+
region: The region where the Session should run.
api_timeout: Duration in seconds after which the session will automatically end. Defaults to
the Project's `defaultTimeout`.
+ user_metadata: Arbitrary user metadata to attach to the session. To learn more about user
+ metadata, see [User Metadata](/features/sessions#user-metadata).
+
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
@@ -154,8 +159,10 @@ def create(
"extension_id": extension_id,
"keep_alive": keep_alive,
"proxies": proxies,
+ "proxy_settings": proxy_settings,
"region": region,
- "timeout": api_timeout,
+ "api_timeout": api_timeout,
+ "user_metadata": user_metadata,
},
session_create_params.SessionCreateParams,
),
@@ -174,10 +181,10 @@ def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Session:
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> SessionRetrieveResponse:
"""
- Session
+ Get a Session
Args:
extra_headers: Send extra headers
@@ -195,7 +202,7 @@ def retrieve(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=Session,
+ cast_to=SessionRetrieveResponse,
)
def update(
@@ -209,9 +216,9 @@ def update(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Session:
- """Update Session
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> SessionUpdateResponse:
+ """Update a Session
Args:
project_id: The Project ID.
@@ -244,24 +251,30 @@ def update(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=Session,
+ cast_to=SessionUpdateResponse,
)
def list(
self,
*,
- status: Literal["RUNNING", "ERROR", "TIMED_OUT", "COMPLETED"] | NotGiven = NOT_GIVEN,
+ q: str | Omit = omit,
+ status: Literal["RUNNING", "ERROR", "TIMED_OUT", "COMPLETED"] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> SessionListResponse:
- """
- List Sessions
+ """List Sessions
Args:
+ q: Query sessions by user metadata.
+
+ See
+ [Querying Sessions by User Metadata](/features/sessions#querying-sessions-by-user-metadata)
+ for the schema of this query.
+
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
@@ -277,7 +290,13 @@ def list(
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
- query=maybe_transform({"status": status}, session_list_params.SessionListParams),
+ query=maybe_transform(
+ {
+ "q": q,
+ "status": status,
+ },
+ session_list_params.SessionListParams,
+ ),
),
cast_to=SessionListResponse,
)
@@ -291,8 +310,8 @@ def debug(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> SessionLiveURLs:
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> SessionDebugResponse:
"""
Session Live URLs
@@ -312,7 +331,7 @@ def debug(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=SessionLiveURLs,
+ cast_to=SessionDebugResponse,
)
@@ -336,7 +355,7 @@ def uploads(self) -> AsyncUploadsResource:
@cached_property
def with_raw_response(self) -> AsyncSessionsResourceWithRawResponse:
"""
- This property can be used as a prefix for any HTTP method call to return the
+ This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/browserbase/sdk-python#accessing-raw-response-data-eg-headers
@@ -356,18 +375,20 @@ async def create(
self,
*,
project_id: str,
- browser_settings: session_create_params.BrowserSettings | NotGiven = NOT_GIVEN,
- extension_id: str | NotGiven = NOT_GIVEN,
- keep_alive: bool | NotGiven = NOT_GIVEN,
- proxies: Union[bool, Iterable[session_create_params.ProxiesUnionMember1]] | NotGiven = NOT_GIVEN,
- region: Literal["us-west-2", "us-east-1", "eu-central-1", "ap-southeast-1"] | NotGiven = NOT_GIVEN,
- api_timeout: int | NotGiven = NOT_GIVEN,
+ browser_settings: session_create_params.BrowserSettings | Omit = omit,
+ extension_id: str | Omit = omit,
+ keep_alive: bool | Omit = omit,
+ proxies: Union[Iterable[session_create_params.ProxiesUnionMember0], bool] | Omit = omit,
+ proxy_settings: session_create_params.ProxySettings | Omit = omit,
+ region: Literal["us-west-2", "us-east-1", "eu-central-1", "ap-southeast-1"] | Omit = omit,
+ api_timeout: int | Omit = omit,
+ user_metadata: Dict[str, object] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> SessionCreateResponse:
"""Create a Session
@@ -380,17 +401,22 @@ async def create(
extension_id: The uploaded Extension ID. See
[Upload Extension](/reference/api/upload-an-extension).
- keep_alive: Set to true to keep the session alive even after disconnections. This is
- available on the Startup plan only.
+ keep_alive: Set to true to keep the session alive even after disconnections. Available on
+ the Hobby Plan and above.
proxies: Proxy configuration. Can be true for default proxy, or an array of proxy
configurations.
+ proxy_settings: [NOT IN DOCS] Supplementary proxy settings. Optional.
+
region: The region where the Session should run.
api_timeout: Duration in seconds after which the session will automatically end. Defaults to
the Project's `defaultTimeout`.
+ user_metadata: Arbitrary user metadata to attach to the session. To learn more about user
+ metadata, see [User Metadata](/features/sessions#user-metadata).
+
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
@@ -408,8 +434,10 @@ async def create(
"extension_id": extension_id,
"keep_alive": keep_alive,
"proxies": proxies,
+ "proxy_settings": proxy_settings,
"region": region,
- "timeout": api_timeout,
+ "api_timeout": api_timeout,
+ "user_metadata": user_metadata,
},
session_create_params.SessionCreateParams,
),
@@ -428,10 +456,10 @@ async def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Session:
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> SessionRetrieveResponse:
"""
- Session
+ Get a Session
Args:
extra_headers: Send extra headers
@@ -449,7 +477,7 @@ async def retrieve(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=Session,
+ cast_to=SessionRetrieveResponse,
)
async def update(
@@ -463,9 +491,9 @@ async def update(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> Session:
- """Update Session
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> SessionUpdateResponse:
+ """Update a Session
Args:
project_id: The Project ID.
@@ -498,24 +526,30 @@ async def update(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=Session,
+ cast_to=SessionUpdateResponse,
)
async def list(
self,
*,
- status: Literal["RUNNING", "ERROR", "TIMED_OUT", "COMPLETED"] | NotGiven = NOT_GIVEN,
+ q: str | Omit = omit,
+ status: Literal["RUNNING", "ERROR", "TIMED_OUT", "COMPLETED"] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> SessionListResponse:
- """
- List Sessions
+ """List Sessions
Args:
+ q: Query sessions by user metadata.
+
+ See
+ [Querying Sessions by User Metadata](/features/sessions#querying-sessions-by-user-metadata)
+ for the schema of this query.
+
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
@@ -531,7 +565,13 @@ async def list(
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
- query=await async_maybe_transform({"status": status}, session_list_params.SessionListParams),
+ query=await async_maybe_transform(
+ {
+ "q": q,
+ "status": status,
+ },
+ session_list_params.SessionListParams,
+ ),
),
cast_to=SessionListResponse,
)
@@ -545,8 +585,8 @@ async def debug(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> SessionLiveURLs:
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> SessionDebugResponse:
"""
Session Live URLs
@@ -566,7 +606,7 @@ async def debug(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=SessionLiveURLs,
+ cast_to=SessionDebugResponse,
)
@@ -715,4 +755,4 @@ def recording(self) -> AsyncRecordingResourceWithStreamingResponse:
@cached_property
def uploads(self) -> AsyncUploadsResourceWithStreamingResponse:
- return AsyncUploadsResourceWithStreamingResponse(self._sessions.uploads)
\ No newline at end of file
+ return AsyncUploadsResourceWithStreamingResponse(self._sessions.uploads)
diff --git a/src/browserbase/resources/sessions/uploads.py b/src/browserbase/resources/sessions/uploads.py
index e985e4d9..aba72b64 100644
--- a/src/browserbase/resources/sessions/uploads.py
+++ b/src/browserbase/resources/sessions/uploads.py
@@ -6,13 +6,8 @@
import httpx
-from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven, FileTypes
-from ..._utils import (
- extract_files,
- maybe_transform,
- deepcopy_minimal,
- async_maybe_transform,
-)
+from ..._types import Body, Query, Headers, NotGiven, FileTypes, not_given
+from ..._utils import extract_files, maybe_transform, deepcopy_minimal, async_maybe_transform
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
from ..._response import (
@@ -32,7 +27,7 @@ class UploadsResource(SyncAPIResource):
@cached_property
def with_raw_response(self) -> UploadsResourceWithRawResponse:
"""
- This property can be used as a prefix for any HTTP method call to return the
+ This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/browserbase/sdk-python#accessing-raw-response-data-eg-headers
@@ -58,7 +53,7 @@ def create(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> UploadCreateResponse:
"""
Create Session Uploads
@@ -95,7 +90,7 @@ class AsyncUploadsResource(AsyncAPIResource):
@cached_property
def with_raw_response(self) -> AsyncUploadsResourceWithRawResponse:
"""
- This property can be used as a prefix for any HTTP method call to return the
+ This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/browserbase/sdk-python#accessing-raw-response-data-eg-headers
@@ -121,7 +116,7 @@ async def create(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> UploadCreateResponse:
"""
Create Session Uploads
diff --git a/src/browserbase/types/__init__.py b/src/browserbase/types/__init__.py
index ebc243db..20e2f905 100644
--- a/src/browserbase/types/__init__.py
+++ b/src/browserbase/types/__init__.py
@@ -2,19 +2,21 @@
from __future__ import annotations
-from .context import Context as Context
-from .project import Project as Project
-from .session import Session as Session
-from .extension import Extension as Extension
-from .project_usage import ProjectUsage as ProjectUsage
-from .session_live_urls import SessionLiveURLs as SessionLiveURLs
from .session_list_params import SessionListParams as SessionListParams
from .context_create_params import ContextCreateParams as ContextCreateParams
from .project_list_response import ProjectListResponse as ProjectListResponse
from .session_create_params import SessionCreateParams as SessionCreateParams
from .session_list_response import SessionListResponse as SessionListResponse
from .session_update_params import SessionUpdateParams as SessionUpdateParams
+from .project_usage_response import ProjectUsageResponse as ProjectUsageResponse
+from .session_debug_response import SessionDebugResponse as SessionDebugResponse
from .context_create_response import ContextCreateResponse as ContextCreateResponse
from .context_update_response import ContextUpdateResponse as ContextUpdateResponse
from .extension_create_params import ExtensionCreateParams as ExtensionCreateParams
from .session_create_response import SessionCreateResponse as SessionCreateResponse
+from .session_update_response import SessionUpdateResponse as SessionUpdateResponse
+from .context_retrieve_response import ContextRetrieveResponse as ContextRetrieveResponse
+from .extension_create_response import ExtensionCreateResponse as ExtensionCreateResponse
+from .project_retrieve_response import ProjectRetrieveResponse as ProjectRetrieveResponse
+from .session_retrieve_response import SessionRetrieveResponse as SessionRetrieveResponse
+from .extension_retrieve_response import ExtensionRetrieveResponse as ExtensionRetrieveResponse
diff --git a/src/browserbase/types/context_create_response.py b/src/browserbase/types/context_create_response.py
index c168596e..8e2f7aa3 100644
--- a/src/browserbase/types/context_create_response.py
+++ b/src/browserbase/types/context_create_response.py
@@ -1,6 +1,5 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
from pydantic import Field as FieldInfo
from .._models import BaseModel
diff --git a/src/browserbase/types/context.py b/src/browserbase/types/context_retrieve_response.py
similarity index 84%
rename from src/browserbase/types/context.py
rename to src/browserbase/types/context_retrieve_response.py
index cb5c32fd..c2cd6925 100644
--- a/src/browserbase/types/context.py
+++ b/src/browserbase/types/context_retrieve_response.py
@@ -6,10 +6,10 @@
from .._models import BaseModel
-__all__ = ["Context"]
+__all__ = ["ContextRetrieveResponse"]
-class Context(BaseModel):
+class ContextRetrieveResponse(BaseModel):
id: str
created_at: datetime = FieldInfo(alias="createdAt")
diff --git a/src/browserbase/types/context_update_response.py b/src/browserbase/types/context_update_response.py
index d07e50e7..7e16c624 100644
--- a/src/browserbase/types/context_update_response.py
+++ b/src/browserbase/types/context_update_response.py
@@ -1,6 +1,5 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
from pydantic import Field as FieldInfo
from .._models import BaseModel
diff --git a/src/browserbase/types/extension.py b/src/browserbase/types/extension_create_response.py
similarity index 85%
rename from src/browserbase/types/extension.py
rename to src/browserbase/types/extension_create_response.py
index 94582c34..d2b74f41 100644
--- a/src/browserbase/types/extension.py
+++ b/src/browserbase/types/extension_create_response.py
@@ -6,10 +6,10 @@
from .._models import BaseModel
-__all__ = ["Extension"]
+__all__ = ["ExtensionCreateResponse"]
-class Extension(BaseModel):
+class ExtensionCreateResponse(BaseModel):
id: str
created_at: datetime = FieldInfo(alias="createdAt")
diff --git a/src/browserbase/types/extension_retrieve_response.py b/src/browserbase/types/extension_retrieve_response.py
new file mode 100644
index 00000000..c786348e
--- /dev/null
+++ b/src/browserbase/types/extension_retrieve_response.py
@@ -0,0 +1,22 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from datetime import datetime
+
+from pydantic import Field as FieldInfo
+
+from .._models import BaseModel
+
+__all__ = ["ExtensionRetrieveResponse"]
+
+
+class ExtensionRetrieveResponse(BaseModel):
+ id: str
+
+ created_at: datetime = FieldInfo(alias="createdAt")
+
+ file_name: str = FieldInfo(alias="fileName")
+
+ project_id: str = FieldInfo(alias="projectId")
+ """The Project ID linked to the uploaded Extension."""
+
+ updated_at: datetime = FieldInfo(alias="updatedAt")
diff --git a/src/browserbase/types/project_list_response.py b/src/browserbase/types/project_list_response.py
index 2d05a236..e364b520 100644
--- a/src/browserbase/types/project_list_response.py
+++ b/src/browserbase/types/project_list_response.py
@@ -1,10 +1,31 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List
+from datetime import datetime
from typing_extensions import TypeAlias
-from .project import Project
+from pydantic import Field as FieldInfo
-__all__ = ["ProjectListResponse"]
+from .._models import BaseModel
-ProjectListResponse: TypeAlias = List[Project]
+__all__ = ["ProjectListResponse", "ProjectListResponseItem"]
+
+
+class ProjectListResponseItem(BaseModel):
+ id: str
+
+ concurrency: int
+ """The maximum number of sessions that this project can run concurrently."""
+
+ created_at: datetime = FieldInfo(alias="createdAt")
+
+ default_timeout: int = FieldInfo(alias="defaultTimeout")
+
+ name: str
+
+ owner_id: str = FieldInfo(alias="ownerId")
+
+ updated_at: datetime = FieldInfo(alias="updatedAt")
+
+
+ProjectListResponse: TypeAlias = List[ProjectListResponseItem]
diff --git a/src/browserbase/types/project.py b/src/browserbase/types/project_retrieve_response.py
similarity index 70%
rename from src/browserbase/types/project.py
rename to src/browserbase/types/project_retrieve_response.py
index afbcef63..78126679 100644
--- a/src/browserbase/types/project.py
+++ b/src/browserbase/types/project_retrieve_response.py
@@ -6,12 +6,15 @@
from .._models import BaseModel
-__all__ = ["Project"]
+__all__ = ["ProjectRetrieveResponse"]
-class Project(BaseModel):
+class ProjectRetrieveResponse(BaseModel):
id: str
+ concurrency: int
+ """The maximum number of sessions that this project can run concurrently."""
+
created_at: datetime = FieldInfo(alias="createdAt")
default_timeout: int = FieldInfo(alias="defaultTimeout")
diff --git a/src/browserbase/types/project_usage.py b/src/browserbase/types/project_usage_response.py
similarity index 78%
rename from src/browserbase/types/project_usage.py
rename to src/browserbase/types/project_usage_response.py
index f68cc2da..b52fccfe 100644
--- a/src/browserbase/types/project_usage.py
+++ b/src/browserbase/types/project_usage_response.py
@@ -1,14 +1,13 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
from pydantic import Field as FieldInfo
from .._models import BaseModel
-__all__ = ["ProjectUsage"]
+__all__ = ["ProjectUsageResponse"]
-class ProjectUsage(BaseModel):
+class ProjectUsageResponse(BaseModel):
browser_minutes: int = FieldInfo(alias="browserMinutes")
proxy_bytes: int = FieldInfo(alias="proxyBytes")
diff --git a/src/browserbase/types/session_create_params.py b/src/browserbase/types/session_create_params.py
index bd643b38..7fafe448 100644
--- a/src/browserbase/types/session_create_params.py
+++ b/src/browserbase/types/session_create_params.py
@@ -2,9 +2,10 @@
from __future__ import annotations
-from typing import List, Union, Iterable
+from typing import Dict, List, Union, Iterable
from typing_extensions import Literal, Required, Annotated, TypeAlias, TypedDict
+from .._types import SequenceNotStr
from .._utils import PropertyInfo
__all__ = [
@@ -14,10 +15,11 @@
"BrowserSettingsFingerprint",
"BrowserSettingsFingerprintScreen",
"BrowserSettingsViewport",
- "ProxiesUnionMember1",
- "ProxiesUnionMember1BrowserbaseProxyConfig",
- "ProxiesUnionMember1BrowserbaseProxyConfigGeolocation",
- "ProxiesUnionMember1ExternalProxyConfig",
+ "ProxiesUnionMember0",
+ "ProxiesUnionMember0UnionMember0",
+ "ProxiesUnionMember0UnionMember0Geolocation",
+ "ProxiesUnionMember0UnionMember1",
+ "ProxySettings",
]
@@ -39,15 +41,18 @@ class SessionCreateParams(TypedDict, total=False):
keep_alive: Annotated[bool, PropertyInfo(alias="keepAlive")]
"""Set to true to keep the session alive even after disconnections.
- This is available on the Startup plan only.
+ Available on the Hobby Plan and above.
"""
- proxies: Union[bool, Iterable[ProxiesUnionMember1]]
+ proxies: Union[Iterable[ProxiesUnionMember0], bool]
"""Proxy configuration.
Can be true for default proxy, or an array of proxy configurations.
"""
+ proxy_settings: Annotated[ProxySettings, PropertyInfo(alias="proxySettings")]
+ """[NOT IN DOCS] Supplementary proxy settings. Optional."""
+
region: Literal["us-west-2", "us-east-1", "eu-central-1", "ap-southeast-1"]
"""The region where the Session should run."""
@@ -57,6 +62,13 @@ class SessionCreateParams(TypedDict, total=False):
Defaults to the Project's `defaultTimeout`.
"""
+ user_metadata: Annotated[Dict[str, object], PropertyInfo(alias="userMetadata")]
+ """Arbitrary user metadata to attach to the session.
+
+ To learn more about user metadata, see
+ [User Metadata](/features/sessions#user-metadata).
+ """
+
class BrowserSettingsContext(TypedDict, total=False):
id: Required[str]
@@ -81,35 +93,44 @@ class BrowserSettingsFingerprint(TypedDict, total=False):
devices: List[Literal["desktop", "mobile"]]
- http_version: Annotated[Literal[1, 2], PropertyInfo(alias="httpVersion")]
+ http_version: Annotated[Literal["1", "2"], PropertyInfo(alias="httpVersion")]
- locales: List[str]
- """
- Full list of locales is available
- [here](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Language).
- """
+ locales: SequenceNotStr[str]
operating_systems: Annotated[
List[Literal["android", "ios", "linux", "macos", "windows"]], PropertyInfo(alias="operatingSystems")
]
- """
- Note: `operatingSystems` set to `ios` or `android` requires `devices` to include
- `"mobile"`.
- """
screen: BrowserSettingsFingerprintScreen
class BrowserSettingsViewport(TypedDict, total=False):
height: int
+ """The height of the browser."""
width: int
+ """The width of the browser."""
class BrowserSettings(TypedDict, total=False):
+ advanced_stealth: Annotated[bool, PropertyInfo(alias="advancedStealth")]
+ """Advanced Browser Stealth Mode"""
+
block_ads: Annotated[bool, PropertyInfo(alias="blockAds")]
"""Enable or disable ad blocking in the browser. Defaults to `false`."""
+ captcha_image_selector: Annotated[str, PropertyInfo(alias="captchaImageSelector")]
+ """Custom selector for captcha image.
+
+ See [Custom Captcha Solving](/features/stealth-mode#custom-captcha-solving)
+ """
+
+ captcha_input_selector: Annotated[str, PropertyInfo(alias="captchaInputSelector")]
+ """Custom selector for captcha input.
+
+ See [Custom Captcha Solving](/features/stealth-mode#custom-captcha-solving)
+ """
+
context: BrowserSettingsContext
extension_id: Annotated[str, PropertyInfo(alias="extensionId")]
@@ -121,12 +142,18 @@ class BrowserSettings(TypedDict, total=False):
fingerprint: BrowserSettingsFingerprint
"""
See usage examples
- [in the Stealth Mode page](/features/stealth-mode#fingerprinting).
+ [on the Stealth Mode page](/features/stealth-mode#fingerprinting)
"""
log_session: Annotated[bool, PropertyInfo(alias="logSession")]
"""Enable or disable session logging. Defaults to `true`."""
+ os: Literal["windows", "mac", "linux", "mobile", "tablet"]
+ """Operating system for stealth mode.
+
+ Valid values: windows, mac, linux, mobile, tablet
+ """
+
record_session: Annotated[bool, PropertyInfo(alias="recordSession")]
"""Enable or disable session recording. Defaults to `true`."""
@@ -136,7 +163,7 @@ class BrowserSettings(TypedDict, total=False):
viewport: BrowserSettingsViewport
-class ProxiesUnionMember1BrowserbaseProxyConfigGeolocation(TypedDict, total=False):
+class ProxiesUnionMember0UnionMember0Geolocation(TypedDict, total=False):
country: Required[str]
"""Country code in ISO 3166-1 alpha-2 format"""
@@ -147,7 +174,7 @@ class ProxiesUnionMember1BrowserbaseProxyConfigGeolocation(TypedDict, total=Fals
"""US state code (2 characters). Must also specify US as the country. Optional."""
-class ProxiesUnionMember1BrowserbaseProxyConfig(TypedDict, total=False):
+class ProxiesUnionMember0UnionMember0(TypedDict, total=False):
type: Required[Literal["browserbase"]]
"""Type of proxy.
@@ -160,11 +187,11 @@ class ProxiesUnionMember1BrowserbaseProxyConfig(TypedDict, total=False):
If omitted, defaults to all domains. Optional.
"""
- geolocation: ProxiesUnionMember1BrowserbaseProxyConfigGeolocation
- """Configuration for geolocation"""
+ geolocation: ProxiesUnionMember0UnionMember0Geolocation
+ """Geographic location for the proxy. Optional."""
-class ProxiesUnionMember1ExternalProxyConfig(TypedDict, total=False):
+class ProxiesUnionMember0UnionMember1(TypedDict, total=False):
server: Required[str]
"""Server URL for external proxy. Required."""
@@ -184,6 +211,9 @@ class ProxiesUnionMember1ExternalProxyConfig(TypedDict, total=False):
"""Username for external proxy authentication. Optional."""
-ProxiesUnionMember1: TypeAlias = Union[
- ProxiesUnionMember1BrowserbaseProxyConfig, ProxiesUnionMember1ExternalProxyConfig
-]
+ProxiesUnionMember0: TypeAlias = Union[ProxiesUnionMember0UnionMember0, ProxiesUnionMember0UnionMember1]
+
+
+class ProxySettings(TypedDict, total=False):
+ ca_certificates: Required[Annotated[SequenceNotStr[str], PropertyInfo(alias="caCertificates")]]
+ """[NOT IN DOCS] The TLS certificate IDs to trust. Optional."""
diff --git a/src/browserbase/types/session_create_response.py b/src/browserbase/types/session_create_response.py
index 8c9ae097..b548d50f 100644
--- a/src/browserbase/types/session_create_response.py
+++ b/src/browserbase/types/session_create_response.py
@@ -1,6 +1,6 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from typing import Optional
+from typing import Dict, Optional
from datetime import datetime
from typing_extensions import Literal
@@ -55,3 +55,10 @@ class SessionCreateResponse(BaseModel):
memory_usage: Optional[int] = FieldInfo(alias="memoryUsage", default=None)
"""Memory used by the Session"""
+
+ user_metadata: Optional[Dict[str, object]] = FieldInfo(alias="userMetadata", default=None)
+ """Arbitrary user metadata to attach to the session.
+
+ To learn more about user metadata, see
+ [User Metadata](/features/sessions#user-metadata).
+ """
diff --git a/src/browserbase/types/session_live_urls.py b/src/browserbase/types/session_debug_response.py
similarity index 88%
rename from src/browserbase/types/session_live_urls.py
rename to src/browserbase/types/session_debug_response.py
index 3c7ba320..9cee7a77 100644
--- a/src/browserbase/types/session_live_urls.py
+++ b/src/browserbase/types/session_debug_response.py
@@ -6,7 +6,7 @@
from .._models import BaseModel
-__all__ = ["SessionLiveURLs", "Page"]
+__all__ = ["SessionDebugResponse", "Page"]
class Page(BaseModel):
@@ -23,7 +23,7 @@ class Page(BaseModel):
url: str
-class SessionLiveURLs(BaseModel):
+class SessionDebugResponse(BaseModel):
debugger_fullscreen_url: str = FieldInfo(alias="debuggerFullscreenUrl")
debugger_url: str = FieldInfo(alias="debuggerUrl")
diff --git a/src/browserbase/types/session_list_params.py b/src/browserbase/types/session_list_params.py
index 7ba4798c..54b0a05c 100644
--- a/src/browserbase/types/session_list_params.py
+++ b/src/browserbase/types/session_list_params.py
@@ -8,4 +8,12 @@
class SessionListParams(TypedDict, total=False):
+ q: str
+ """Query sessions by user metadata.
+
+ See
+ [Querying Sessions by User Metadata](/features/sessions#querying-sessions-by-user-metadata)
+ for the schema of this query.
+ """
+
status: Literal["RUNNING", "ERROR", "TIMED_OUT", "COMPLETED"]
diff --git a/src/browserbase/types/session_list_response.py b/src/browserbase/types/session_list_response.py
index ca162ddb..4c1bd885 100644
--- a/src/browserbase/types/session_list_response.py
+++ b/src/browserbase/types/session_list_response.py
@@ -1,10 +1,58 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from typing import List
-from typing_extensions import TypeAlias
+from typing import Dict, List, Optional
+from datetime import datetime
+from typing_extensions import Literal, TypeAlias
-from .session import Session
+from pydantic import Field as FieldInfo
-__all__ = ["SessionListResponse"]
+from .._models import BaseModel
-SessionListResponse: TypeAlias = List[Session]
+__all__ = ["SessionListResponse", "SessionListResponseItem"]
+
+
+class SessionListResponseItem(BaseModel):
+ id: str
+
+ created_at: datetime = FieldInfo(alias="createdAt")
+
+ expires_at: datetime = FieldInfo(alias="expiresAt")
+
+ keep_alive: bool = FieldInfo(alias="keepAlive")
+ """Indicates if the Session was created to be kept alive upon disconnections"""
+
+ project_id: str = FieldInfo(alias="projectId")
+ """The Project ID linked to the Session."""
+
+ proxy_bytes: int = FieldInfo(alias="proxyBytes")
+ """Bytes used via the [Proxy](/features/stealth-mode#proxies-and-residential-ips)"""
+
+ region: Literal["us-west-2", "us-east-1", "eu-central-1", "ap-southeast-1"]
+ """The region where the Session is running."""
+
+ started_at: datetime = FieldInfo(alias="startedAt")
+
+ status: Literal["RUNNING", "ERROR", "TIMED_OUT", "COMPLETED"]
+
+ updated_at: datetime = FieldInfo(alias="updatedAt")
+
+ avg_cpu_usage: Optional[int] = FieldInfo(alias="avgCpuUsage", default=None)
+ """CPU used by the Session"""
+
+ context_id: Optional[str] = FieldInfo(alias="contextId", default=None)
+ """Optional. The Context linked to the Session."""
+
+ ended_at: Optional[datetime] = FieldInfo(alias="endedAt", default=None)
+
+ memory_usage: Optional[int] = FieldInfo(alias="memoryUsage", default=None)
+ """Memory used by the Session"""
+
+ user_metadata: Optional[Dict[str, object]] = FieldInfo(alias="userMetadata", default=None)
+ """Arbitrary user metadata to attach to the session.
+
+ To learn more about user metadata, see
+ [User Metadata](/features/sessions#user-metadata).
+ """
+
+
+SessionListResponse: TypeAlias = List[SessionListResponseItem]
diff --git a/src/browserbase/types/session_retrieve_response.py b/src/browserbase/types/session_retrieve_response.py
new file mode 100644
index 00000000..a9a4ff28
--- /dev/null
+++ b/src/browserbase/types/session_retrieve_response.py
@@ -0,0 +1,64 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Dict, Optional
+from datetime import datetime
+from typing_extensions import Literal
+
+from pydantic import Field as FieldInfo
+
+from .._models import BaseModel
+
+__all__ = ["SessionRetrieveResponse"]
+
+
+class SessionRetrieveResponse(BaseModel):
+ id: str
+
+ created_at: datetime = FieldInfo(alias="createdAt")
+
+ expires_at: datetime = FieldInfo(alias="expiresAt")
+
+ keep_alive: bool = FieldInfo(alias="keepAlive")
+ """Indicates if the Session was created to be kept alive upon disconnections"""
+
+ project_id: str = FieldInfo(alias="projectId")
+ """The Project ID linked to the Session."""
+
+ proxy_bytes: int = FieldInfo(alias="proxyBytes")
+ """Bytes used via the [Proxy](/features/stealth-mode#proxies-and-residential-ips)"""
+
+ region: Literal["us-west-2", "us-east-1", "eu-central-1", "ap-southeast-1"]
+ """The region where the Session is running."""
+
+ started_at: datetime = FieldInfo(alias="startedAt")
+
+ status: Literal["RUNNING", "ERROR", "TIMED_OUT", "COMPLETED"]
+
+ updated_at: datetime = FieldInfo(alias="updatedAt")
+
+ avg_cpu_usage: Optional[int] = FieldInfo(alias="avgCpuUsage", default=None)
+ """CPU used by the Session"""
+
+ connect_url: Optional[str] = FieldInfo(alias="connectUrl", default=None)
+ """WebSocket URL to connect to the Session."""
+
+ context_id: Optional[str] = FieldInfo(alias="contextId", default=None)
+ """Optional. The Context linked to the Session."""
+
+ ended_at: Optional[datetime] = FieldInfo(alias="endedAt", default=None)
+
+ memory_usage: Optional[int] = FieldInfo(alias="memoryUsage", default=None)
+ """Memory used by the Session"""
+
+ selenium_remote_url: Optional[str] = FieldInfo(alias="seleniumRemoteUrl", default=None)
+ """HTTP URL to connect to the Session."""
+
+ signing_key: Optional[str] = FieldInfo(alias="signingKey", default=None)
+ """Signing key to use when connecting to the Session via HTTP."""
+
+ user_metadata: Optional[Dict[str, object]] = FieldInfo(alias="userMetadata", default=None)
+ """Arbitrary user metadata to attach to the session.
+
+ To learn more about user metadata, see
+ [User Metadata](/features/sessions#user-metadata).
+ """
diff --git a/src/browserbase/types/session.py b/src/browserbase/types/session_update_response.py
similarity index 80%
rename from src/browserbase/types/session.py
rename to src/browserbase/types/session_update_response.py
index 8bd47f93..67a13711 100644
--- a/src/browserbase/types/session.py
+++ b/src/browserbase/types/session_update_response.py
@@ -1,6 +1,6 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from typing import Optional
+from typing import Dict, Optional
from datetime import datetime
from typing_extensions import Literal
@@ -8,10 +8,10 @@
from .._models import BaseModel
-__all__ = ["Session"]
+__all__ = ["SessionUpdateResponse"]
-class Session(BaseModel):
+class SessionUpdateResponse(BaseModel):
id: str
created_at: datetime = FieldInfo(alias="createdAt")
@@ -46,3 +46,10 @@ class Session(BaseModel):
memory_usage: Optional[int] = FieldInfo(alias="memoryUsage", default=None)
"""Memory used by the Session"""
+
+ user_metadata: Optional[Dict[str, object]] = FieldInfo(alias="userMetadata", default=None)
+ """Arbitrary user metadata to attach to the session.
+
+ To learn more about user metadata, see
+ [User Metadata](/features/sessions#user-metadata).
+ """
diff --git a/src/browserbase/types/sessions/__init__.py b/src/browserbase/types/sessions/__init__.py
index 0cef6b19..69d54703 100644
--- a/src/browserbase/types/sessions/__init__.py
+++ b/src/browserbase/types/sessions/__init__.py
@@ -2,9 +2,7 @@
from __future__ import annotations
-from .session_log import SessionLog as SessionLog
from .log_list_response import LogListResponse as LogListResponse
-from .session_recording import SessionRecording as SessionRecording
from .upload_create_params import UploadCreateParams as UploadCreateParams
from .upload_create_response import UploadCreateResponse as UploadCreateResponse
from .recording_retrieve_response import RecordingRetrieveResponse as RecordingRetrieveResponse
diff --git a/src/browserbase/types/sessions/log_list_response.py b/src/browserbase/types/sessions/log_list_response.py
index 2b325a8c..efd848ab 100644
--- a/src/browserbase/types/sessions/log_list_response.py
+++ b/src/browserbase/types/sessions/log_list_response.py
@@ -1,10 +1,50 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from typing import List
+from typing import Dict, List, Optional
from typing_extensions import TypeAlias
-from .session_log import SessionLog
+from pydantic import Field as FieldInfo
-__all__ = ["LogListResponse"]
+from ..._models import BaseModel
-LogListResponse: TypeAlias = List[SessionLog]
+__all__ = ["LogListResponse", "LogListResponseItem", "LogListResponseItemRequest", "LogListResponseItemResponse"]
+
+
+class LogListResponseItemRequest(BaseModel):
+ params: Dict[str, object]
+
+ raw_body: str = FieldInfo(alias="rawBody")
+
+ timestamp: Optional[int] = None
+ """milliseconds that have elapsed since the UNIX epoch"""
+
+
+class LogListResponseItemResponse(BaseModel):
+ raw_body: str = FieldInfo(alias="rawBody")
+
+ result: Dict[str, object]
+
+ timestamp: Optional[int] = None
+ """milliseconds that have elapsed since the UNIX epoch"""
+
+
+class LogListResponseItem(BaseModel):
+ method: str
+
+ page_id: int = FieldInfo(alias="pageId")
+
+ session_id: str = FieldInfo(alias="sessionId")
+
+ frame_id: Optional[str] = FieldInfo(alias="frameId", default=None)
+
+ loader_id: Optional[str] = FieldInfo(alias="loaderId", default=None)
+
+ request: Optional[LogListResponseItemRequest] = None
+
+ response: Optional[LogListResponseItemResponse] = None
+
+ timestamp: Optional[int] = None
+ """milliseconds that have elapsed since the UNIX epoch"""
+
+
+LogListResponse: TypeAlias = List[LogListResponseItem]
diff --git a/src/browserbase/types/sessions/recording_retrieve_response.py b/src/browserbase/types/sessions/recording_retrieve_response.py
index 951969bb..d3613b8c 100644
--- a/src/browserbase/types/sessions/recording_retrieve_response.py
+++ b/src/browserbase/types/sessions/recording_retrieve_response.py
@@ -1,10 +1,28 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from typing import List
+from typing import Dict, List
from typing_extensions import TypeAlias
-from .session_recording import SessionRecording
+from pydantic import Field as FieldInfo
-__all__ = ["RecordingRetrieveResponse"]
+from ..._models import BaseModel
-RecordingRetrieveResponse: TypeAlias = List[SessionRecording]
+__all__ = ["RecordingRetrieveResponse", "RecordingRetrieveResponseItem"]
+
+
+class RecordingRetrieveResponseItem(BaseModel):
+ data: Dict[str, object]
+ """
+ See
+ [rrweb documentation](https://github.com/rrweb-io/rrweb/blob/master/docs/recipes/dive-into-event.md).
+ """
+
+ session_id: str = FieldInfo(alias="sessionId")
+
+ timestamp: int
+ """milliseconds that have elapsed since the UNIX epoch"""
+
+ type: int
+
+
+RecordingRetrieveResponse: TypeAlias = List[RecordingRetrieveResponseItem]
diff --git a/src/browserbase/types/sessions/session_log.py b/src/browserbase/types/sessions/session_log.py
deleted file mode 100644
index d15eb831..00000000
--- a/src/browserbase/types/sessions/session_log.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from typing import Dict, Optional
-
-from pydantic import Field as FieldInfo
-
-from ..._models import BaseModel
-
-__all__ = ["SessionLog", "Request", "Response"]
-
-
-class Request(BaseModel):
- params: Dict[str, object]
-
- raw_body: str = FieldInfo(alias="rawBody")
-
- timestamp: int
- """milliseconds that have elapsed since the UNIX epoch"""
-
-
-class Response(BaseModel):
- raw_body: str = FieldInfo(alias="rawBody")
-
- result: Dict[str, object]
-
- timestamp: int
- """milliseconds that have elapsed since the UNIX epoch"""
-
-
-class SessionLog(BaseModel):
- event_id: str = FieldInfo(alias="eventId")
-
- method: str
-
- page_id: int = FieldInfo(alias="pageId")
-
- session_id: str = FieldInfo(alias="sessionId")
-
- timestamp: int
- """milliseconds that have elapsed since the UNIX epoch"""
-
- frame_id: Optional[str] = FieldInfo(alias="frameId", default=None)
-
- loader_id: Optional[str] = FieldInfo(alias="loaderId", default=None)
-
- request: Optional[Request] = None
-
- response: Optional[Response] = None
diff --git a/src/browserbase/types/sessions/session_recording.py b/src/browserbase/types/sessions/session_recording.py
deleted file mode 100644
index d3e0325a..00000000
--- a/src/browserbase/types/sessions/session_recording.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from typing import Dict
-
-from pydantic import Field as FieldInfo
-
-from ..._models import BaseModel
-
-__all__ = ["SessionRecording"]
-
-
-class SessionRecording(BaseModel):
- id: str
-
- data: Dict[str, object]
- """
- See
- [rrweb documentation](https://github.com/rrweb-io/rrweb/blob/master/docs/recipes/dive-into-event.md).
- """
-
- session_id: str = FieldInfo(alias="sessionId")
-
- timestamp: int
- """milliseconds that have elapsed since the UNIX epoch"""
-
- type: int
diff --git a/src/browserbase/types/sessions/upload_create_response.py b/src/browserbase/types/sessions/upload_create_response.py
index ceece2cd..abeed017 100644
--- a/src/browserbase/types/sessions/upload_create_response.py
+++ b/src/browserbase/types/sessions/upload_create_response.py
@@ -1,6 +1,5 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
from ..._models import BaseModel
__all__ = ["UploadCreateResponse"]
diff --git a/tests/api_resources/sessions/test_downloads.py b/tests/api_resources/sessions/test_downloads.py
index 825ff786..10e84fdb 100644
--- a/tests/api_resources/sessions/test_downloads.py
+++ b/tests/api_resources/sessions/test_downloads.py
@@ -75,7 +75,9 @@ def test_path_params_list(self, client: Browserbase) -> None:
class TestAsyncDownloads:
- parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
+ parametrize = pytest.mark.parametrize(
+ "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
+ )
@parametrize
@pytest.mark.respx(base_url=base_url)
diff --git a/tests/api_resources/sessions/test_logs.py b/tests/api_resources/sessions/test_logs.py
index c72002b3..eadde723 100644
--- a/tests/api_resources/sessions/test_logs.py
+++ b/tests/api_resources/sessions/test_logs.py
@@ -57,7 +57,9 @@ def test_path_params_list(self, client: Browserbase) -> None:
class TestAsyncLogs:
- parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
+ parametrize = pytest.mark.parametrize(
+ "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
+ )
@parametrize
async def test_method_list(self, async_client: AsyncBrowserbase) -> None:
diff --git a/tests/api_resources/sessions/test_recording.py b/tests/api_resources/sessions/test_recording.py
index 0d7a542e..f1e97d07 100644
--- a/tests/api_resources/sessions/test_recording.py
+++ b/tests/api_resources/sessions/test_recording.py
@@ -57,7 +57,9 @@ def test_path_params_retrieve(self, client: Browserbase) -> None:
class TestAsyncRecording:
- parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
+ parametrize = pytest.mark.parametrize(
+ "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
+ )
@parametrize
async def test_method_retrieve(self, async_client: AsyncBrowserbase) -> None:
diff --git a/tests/api_resources/sessions/test_uploads.py b/tests/api_resources/sessions/test_uploads.py
index f193256c..748b92e7 100644
--- a/tests/api_resources/sessions/test_uploads.py
+++ b/tests/api_resources/sessions/test_uploads.py
@@ -61,7 +61,9 @@ def test_path_params_create(self, client: Browserbase) -> None:
class TestAsyncUploads:
- parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
+ parametrize = pytest.mark.parametrize(
+ "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
+ )
@parametrize
async def test_method_create(self, async_client: AsyncBrowserbase) -> None:
diff --git a/tests/api_resources/test_contexts.py b/tests/api_resources/test_contexts.py
index e53b7e11..4ad27733 100644
--- a/tests/api_resources/test_contexts.py
+++ b/tests/api_resources/test_contexts.py
@@ -9,7 +9,11 @@
from browserbase import Browserbase, AsyncBrowserbase
from tests.utils import assert_matches_type
-from browserbase.types import Context, ContextCreateResponse, ContextUpdateResponse
+from browserbase.types import (
+ ContextCreateResponse,
+ ContextUpdateResponse,
+ ContextRetrieveResponse,
+)
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -53,7 +57,7 @@ def test_method_retrieve(self, client: Browserbase) -> None:
context = client.contexts.retrieve(
"id",
)
- assert_matches_type(Context, context, path=["response"])
+ assert_matches_type(ContextRetrieveResponse, context, path=["response"])
@parametrize
def test_raw_response_retrieve(self, client: Browserbase) -> None:
@@ -64,7 +68,7 @@ def test_raw_response_retrieve(self, client: Browserbase) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
context = response.parse()
- assert_matches_type(Context, context, path=["response"])
+ assert_matches_type(ContextRetrieveResponse, context, path=["response"])
@parametrize
def test_streaming_response_retrieve(self, client: Browserbase) -> None:
@@ -75,7 +79,7 @@ def test_streaming_response_retrieve(self, client: Browserbase) -> None:
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
context = response.parse()
- assert_matches_type(Context, context, path=["response"])
+ assert_matches_type(ContextRetrieveResponse, context, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -126,7 +130,9 @@ def test_path_params_update(self, client: Browserbase) -> None:
class TestAsyncContexts:
- parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
+ parametrize = pytest.mark.parametrize(
+ "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
+ )
@parametrize
async def test_method_create(self, async_client: AsyncBrowserbase) -> None:
@@ -164,7 +170,7 @@ async def test_method_retrieve(self, async_client: AsyncBrowserbase) -> None:
context = await async_client.contexts.retrieve(
"id",
)
- assert_matches_type(Context, context, path=["response"])
+ assert_matches_type(ContextRetrieveResponse, context, path=["response"])
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncBrowserbase) -> None:
@@ -175,7 +181,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncBrowserbase) -> No
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
context = await response.parse()
- assert_matches_type(Context, context, path=["response"])
+ assert_matches_type(ContextRetrieveResponse, context, path=["response"])
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncBrowserbase) -> None:
@@ -186,7 +192,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncBrowserbase)
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
context = await response.parse()
- assert_matches_type(Context, context, path=["response"])
+ assert_matches_type(ContextRetrieveResponse, context, path=["response"])
assert cast(Any, response.is_closed) is True
diff --git a/tests/api_resources/test_extensions.py b/tests/api_resources/test_extensions.py
index b7fec7a5..e32ae9b0 100644
--- a/tests/api_resources/test_extensions.py
+++ b/tests/api_resources/test_extensions.py
@@ -9,7 +9,7 @@
from browserbase import Browserbase, AsyncBrowserbase
from tests.utils import assert_matches_type
-from browserbase.types import Extension
+from browserbase.types import ExtensionCreateResponse, ExtensionRetrieveResponse
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -22,7 +22,7 @@ def test_method_create(self, client: Browserbase) -> None:
extension = client.extensions.create(
file=b"raw file contents",
)
- assert_matches_type(Extension, extension, path=["response"])
+ assert_matches_type(ExtensionCreateResponse, extension, path=["response"])
@parametrize
def test_raw_response_create(self, client: Browserbase) -> None:
@@ -33,7 +33,7 @@ def test_raw_response_create(self, client: Browserbase) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
extension = response.parse()
- assert_matches_type(Extension, extension, path=["response"])
+ assert_matches_type(ExtensionCreateResponse, extension, path=["response"])
@parametrize
def test_streaming_response_create(self, client: Browserbase) -> None:
@@ -44,7 +44,7 @@ def test_streaming_response_create(self, client: Browserbase) -> None:
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
extension = response.parse()
- assert_matches_type(Extension, extension, path=["response"])
+ assert_matches_type(ExtensionCreateResponse, extension, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -53,7 +53,7 @@ def test_method_retrieve(self, client: Browserbase) -> None:
extension = client.extensions.retrieve(
"id",
)
- assert_matches_type(Extension, extension, path=["response"])
+ assert_matches_type(ExtensionRetrieveResponse, extension, path=["response"])
@parametrize
def test_raw_response_retrieve(self, client: Browserbase) -> None:
@@ -64,7 +64,7 @@ def test_raw_response_retrieve(self, client: Browserbase) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
extension = response.parse()
- assert_matches_type(Extension, extension, path=["response"])
+ assert_matches_type(ExtensionRetrieveResponse, extension, path=["response"])
@parametrize
def test_streaming_response_retrieve(self, client: Browserbase) -> None:
@@ -75,7 +75,7 @@ def test_streaming_response_retrieve(self, client: Browserbase) -> None:
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
extension = response.parse()
- assert_matches_type(Extension, extension, path=["response"])
+ assert_matches_type(ExtensionRetrieveResponse, extension, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -126,14 +126,16 @@ def test_path_params_delete(self, client: Browserbase) -> None:
class TestAsyncExtensions:
- parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
+ parametrize = pytest.mark.parametrize(
+ "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
+ )
@parametrize
async def test_method_create(self, async_client: AsyncBrowserbase) -> None:
extension = await async_client.extensions.create(
file=b"raw file contents",
)
- assert_matches_type(Extension, extension, path=["response"])
+ assert_matches_type(ExtensionCreateResponse, extension, path=["response"])
@parametrize
async def test_raw_response_create(self, async_client: AsyncBrowserbase) -> None:
@@ -144,7 +146,7 @@ async def test_raw_response_create(self, async_client: AsyncBrowserbase) -> None
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
extension = await response.parse()
- assert_matches_type(Extension, extension, path=["response"])
+ assert_matches_type(ExtensionCreateResponse, extension, path=["response"])
@parametrize
async def test_streaming_response_create(self, async_client: AsyncBrowserbase) -> None:
@@ -155,7 +157,7 @@ async def test_streaming_response_create(self, async_client: AsyncBrowserbase) -
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
extension = await response.parse()
- assert_matches_type(Extension, extension, path=["response"])
+ assert_matches_type(ExtensionCreateResponse, extension, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -164,7 +166,7 @@ async def test_method_retrieve(self, async_client: AsyncBrowserbase) -> None:
extension = await async_client.extensions.retrieve(
"id",
)
- assert_matches_type(Extension, extension, path=["response"])
+ assert_matches_type(ExtensionRetrieveResponse, extension, path=["response"])
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncBrowserbase) -> None:
@@ -175,7 +177,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncBrowserbase) -> No
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
extension = await response.parse()
- assert_matches_type(Extension, extension, path=["response"])
+ assert_matches_type(ExtensionRetrieveResponse, extension, path=["response"])
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncBrowserbase) -> None:
@@ -186,7 +188,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncBrowserbase)
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
extension = await response.parse()
- assert_matches_type(Extension, extension, path=["response"])
+ assert_matches_type(ExtensionRetrieveResponse, extension, path=["response"])
assert cast(Any, response.is_closed) is True
diff --git a/tests/api_resources/test_projects.py b/tests/api_resources/test_projects.py
index 9e70d034..0d8e3c94 100644
--- a/tests/api_resources/test_projects.py
+++ b/tests/api_resources/test_projects.py
@@ -9,7 +9,7 @@
from browserbase import Browserbase, AsyncBrowserbase
from tests.utils import assert_matches_type
-from browserbase.types import Project, ProjectUsage, ProjectListResponse
+from browserbase.types import ProjectListResponse, ProjectUsageResponse, ProjectRetrieveResponse
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -22,7 +22,7 @@ def test_method_retrieve(self, client: Browserbase) -> None:
project = client.projects.retrieve(
"id",
)
- assert_matches_type(Project, project, path=["response"])
+ assert_matches_type(ProjectRetrieveResponse, project, path=["response"])
@parametrize
def test_raw_response_retrieve(self, client: Browserbase) -> None:
@@ -33,7 +33,7 @@ def test_raw_response_retrieve(self, client: Browserbase) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
project = response.parse()
- assert_matches_type(Project, project, path=["response"])
+ assert_matches_type(ProjectRetrieveResponse, project, path=["response"])
@parametrize
def test_streaming_response_retrieve(self, client: Browserbase) -> None:
@@ -44,7 +44,7 @@ def test_streaming_response_retrieve(self, client: Browserbase) -> None:
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
project = response.parse()
- assert_matches_type(Project, project, path=["response"])
+ assert_matches_type(ProjectRetrieveResponse, project, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -85,7 +85,7 @@ def test_method_usage(self, client: Browserbase) -> None:
project = client.projects.usage(
"id",
)
- assert_matches_type(ProjectUsage, project, path=["response"])
+ assert_matches_type(ProjectUsageResponse, project, path=["response"])
@parametrize
def test_raw_response_usage(self, client: Browserbase) -> None:
@@ -96,7 +96,7 @@ def test_raw_response_usage(self, client: Browserbase) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
project = response.parse()
- assert_matches_type(ProjectUsage, project, path=["response"])
+ assert_matches_type(ProjectUsageResponse, project, path=["response"])
@parametrize
def test_streaming_response_usage(self, client: Browserbase) -> None:
@@ -107,7 +107,7 @@ def test_streaming_response_usage(self, client: Browserbase) -> None:
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
project = response.parse()
- assert_matches_type(ProjectUsage, project, path=["response"])
+ assert_matches_type(ProjectUsageResponse, project, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -120,14 +120,16 @@ def test_path_params_usage(self, client: Browserbase) -> None:
class TestAsyncProjects:
- parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
+ parametrize = pytest.mark.parametrize(
+ "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
+ )
@parametrize
async def test_method_retrieve(self, async_client: AsyncBrowserbase) -> None:
project = await async_client.projects.retrieve(
"id",
)
- assert_matches_type(Project, project, path=["response"])
+ assert_matches_type(ProjectRetrieveResponse, project, path=["response"])
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncBrowserbase) -> None:
@@ -138,7 +140,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncBrowserbase) -> No
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
project = await response.parse()
- assert_matches_type(Project, project, path=["response"])
+ assert_matches_type(ProjectRetrieveResponse, project, path=["response"])
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncBrowserbase) -> None:
@@ -149,7 +151,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncBrowserbase)
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
project = await response.parse()
- assert_matches_type(Project, project, path=["response"])
+ assert_matches_type(ProjectRetrieveResponse, project, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -190,7 +192,7 @@ async def test_method_usage(self, async_client: AsyncBrowserbase) -> None:
project = await async_client.projects.usage(
"id",
)
- assert_matches_type(ProjectUsage, project, path=["response"])
+ assert_matches_type(ProjectUsageResponse, project, path=["response"])
@parametrize
async def test_raw_response_usage(self, async_client: AsyncBrowserbase) -> None:
@@ -201,7 +203,7 @@ async def test_raw_response_usage(self, async_client: AsyncBrowserbase) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
project = await response.parse()
- assert_matches_type(ProjectUsage, project, path=["response"])
+ assert_matches_type(ProjectUsageResponse, project, path=["response"])
@parametrize
async def test_streaming_response_usage(self, async_client: AsyncBrowserbase) -> None:
@@ -212,7 +214,7 @@ async def test_streaming_response_usage(self, async_client: AsyncBrowserbase) ->
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
project = await response.parse()
- assert_matches_type(ProjectUsage, project, path=["response"])
+ assert_matches_type(ProjectUsageResponse, project, path=["response"])
assert cast(Any, response.is_closed) is True
diff --git a/tests/api_resources/test_sessions.py b/tests/api_resources/test_sessions.py
index 8ebd5daf..24da8f0b 100644
--- a/tests/api_resources/test_sessions.py
+++ b/tests/api_resources/test_sessions.py
@@ -10,10 +10,11 @@
from browserbase import Browserbase, AsyncBrowserbase
from tests.utils import assert_matches_type
from browserbase.types import (
- Session,
- SessionLiveURLs,
SessionListResponse,
+ SessionDebugResponse,
SessionCreateResponse,
+ SessionUpdateResponse,
+ SessionRetrieveResponse,
)
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -34,18 +35,21 @@ def test_method_create_with_all_params(self, client: Browserbase) -> None:
session = client.sessions.create(
project_id="projectId",
browser_settings={
+ "advanced_stealth": True,
"block_ads": True,
+ "captcha_image_selector": "captchaImageSelector",
+ "captcha_input_selector": "captchaInputSelector",
"context": {
"id": "id",
"persist": True,
},
"extension_id": "extensionId",
"fingerprint": {
- "browsers": ["chrome", "edge", "firefox"],
- "devices": ["desktop", "mobile"],
- "http_version": 1,
- "locales": ["string", "string", "string"],
- "operating_systems": ["android", "ios", "linux"],
+ "browsers": ["chrome"],
+ "devices": ["desktop"],
+ "http_version": "1",
+ "locales": ["string"],
+ "operating_systems": ["android"],
"screen": {
"max_height": 0,
"max_width": 0,
@@ -54,6 +58,7 @@ def test_method_create_with_all_params(self, client: Browserbase) -> None:
},
},
"log_session": True,
+ "os": "windows",
"record_session": True,
"solve_captchas": True,
"viewport": {
@@ -63,9 +68,21 @@ def test_method_create_with_all_params(self, client: Browserbase) -> None:
},
extension_id="extensionId",
keep_alive=True,
- proxies=True,
+ proxies=[
+ {
+ "type": "browserbase",
+ "domain_pattern": "domainPattern",
+ "geolocation": {
+ "country": "xx",
+ "city": "city",
+ "state": "xx",
+ },
+ }
+ ],
+ proxy_settings={"ca_certificates": ["182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e"]},
region="us-west-2",
api_timeout=60,
+ user_metadata={"foo": "bar"},
)
assert_matches_type(SessionCreateResponse, session, path=["response"])
@@ -98,7 +115,7 @@ def test_method_retrieve(self, client: Browserbase) -> None:
session = client.sessions.retrieve(
"id",
)
- assert_matches_type(Session, session, path=["response"])
+ assert_matches_type(SessionRetrieveResponse, session, path=["response"])
@parametrize
def test_raw_response_retrieve(self, client: Browserbase) -> None:
@@ -109,7 +126,7 @@ def test_raw_response_retrieve(self, client: Browserbase) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
session = response.parse()
- assert_matches_type(Session, session, path=["response"])
+ assert_matches_type(SessionRetrieveResponse, session, path=["response"])
@parametrize
def test_streaming_response_retrieve(self, client: Browserbase) -> None:
@@ -120,7 +137,7 @@ def test_streaming_response_retrieve(self, client: Browserbase) -> None:
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
session = response.parse()
- assert_matches_type(Session, session, path=["response"])
+ assert_matches_type(SessionRetrieveResponse, session, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -138,7 +155,7 @@ def test_method_update(self, client: Browserbase) -> None:
project_id="projectId",
status="REQUEST_RELEASE",
)
- assert_matches_type(Session, session, path=["response"])
+ assert_matches_type(SessionUpdateResponse, session, path=["response"])
@parametrize
def test_raw_response_update(self, client: Browserbase) -> None:
@@ -151,7 +168,7 @@ def test_raw_response_update(self, client: Browserbase) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
session = response.parse()
- assert_matches_type(Session, session, path=["response"])
+ assert_matches_type(SessionUpdateResponse, session, path=["response"])
@parametrize
def test_streaming_response_update(self, client: Browserbase) -> None:
@@ -164,7 +181,7 @@ def test_streaming_response_update(self, client: Browserbase) -> None:
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
session = response.parse()
- assert_matches_type(Session, session, path=["response"])
+ assert_matches_type(SessionUpdateResponse, session, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -185,6 +202,7 @@ def test_method_list(self, client: Browserbase) -> None:
@parametrize
def test_method_list_with_all_params(self, client: Browserbase) -> None:
session = client.sessions.list(
+ q="q",
status="RUNNING",
)
assert_matches_type(SessionListResponse, session, path=["response"])
@@ -214,7 +232,7 @@ def test_method_debug(self, client: Browserbase) -> None:
session = client.sessions.debug(
"id",
)
- assert_matches_type(SessionLiveURLs, session, path=["response"])
+ assert_matches_type(SessionDebugResponse, session, path=["response"])
@parametrize
def test_raw_response_debug(self, client: Browserbase) -> None:
@@ -225,7 +243,7 @@ def test_raw_response_debug(self, client: Browserbase) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
session = response.parse()
- assert_matches_type(SessionLiveURLs, session, path=["response"])
+ assert_matches_type(SessionDebugResponse, session, path=["response"])
@parametrize
def test_streaming_response_debug(self, client: Browserbase) -> None:
@@ -236,7 +254,7 @@ def test_streaming_response_debug(self, client: Browserbase) -> None:
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
session = response.parse()
- assert_matches_type(SessionLiveURLs, session, path=["response"])
+ assert_matches_type(SessionDebugResponse, session, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -249,7 +267,9 @@ def test_path_params_debug(self, client: Browserbase) -> None:
class TestAsyncSessions:
- parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
+ parametrize = pytest.mark.parametrize(
+ "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
+ )
@parametrize
async def test_method_create(self, async_client: AsyncBrowserbase) -> None:
@@ -263,18 +283,21 @@ async def test_method_create_with_all_params(self, async_client: AsyncBrowserbas
session = await async_client.sessions.create(
project_id="projectId",
browser_settings={
+ "advanced_stealth": True,
"block_ads": True,
+ "captcha_image_selector": "captchaImageSelector",
+ "captcha_input_selector": "captchaInputSelector",
"context": {
"id": "id",
"persist": True,
},
"extension_id": "extensionId",
"fingerprint": {
- "browsers": ["chrome", "edge", "firefox"],
- "devices": ["desktop", "mobile"],
- "http_version": 1,
- "locales": ["string", "string", "string"],
- "operating_systems": ["android", "ios", "linux"],
+ "browsers": ["chrome"],
+ "devices": ["desktop"],
+ "http_version": "1",
+ "locales": ["string"],
+ "operating_systems": ["android"],
"screen": {
"max_height": 0,
"max_width": 0,
@@ -283,6 +306,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncBrowserbas
},
},
"log_session": True,
+ "os": "windows",
"record_session": True,
"solve_captchas": True,
"viewport": {
@@ -292,9 +316,21 @@ async def test_method_create_with_all_params(self, async_client: AsyncBrowserbas
},
extension_id="extensionId",
keep_alive=True,
- proxies=True,
+ proxies=[
+ {
+ "type": "browserbase",
+ "domain_pattern": "domainPattern",
+ "geolocation": {
+ "country": "xx",
+ "city": "city",
+ "state": "xx",
+ },
+ }
+ ],
+ proxy_settings={"ca_certificates": ["182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e"]},
region="us-west-2",
api_timeout=60,
+ user_metadata={"foo": "bar"},
)
assert_matches_type(SessionCreateResponse, session, path=["response"])
@@ -327,7 +363,7 @@ async def test_method_retrieve(self, async_client: AsyncBrowserbase) -> None:
session = await async_client.sessions.retrieve(
"id",
)
- assert_matches_type(Session, session, path=["response"])
+ assert_matches_type(SessionRetrieveResponse, session, path=["response"])
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncBrowserbase) -> None:
@@ -338,7 +374,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncBrowserbase) -> No
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
session = await response.parse()
- assert_matches_type(Session, session, path=["response"])
+ assert_matches_type(SessionRetrieveResponse, session, path=["response"])
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncBrowserbase) -> None:
@@ -349,7 +385,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncBrowserbase)
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
session = await response.parse()
- assert_matches_type(Session, session, path=["response"])
+ assert_matches_type(SessionRetrieveResponse, session, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -367,7 +403,7 @@ async def test_method_update(self, async_client: AsyncBrowserbase) -> None:
project_id="projectId",
status="REQUEST_RELEASE",
)
- assert_matches_type(Session, session, path=["response"])
+ assert_matches_type(SessionUpdateResponse, session, path=["response"])
@parametrize
async def test_raw_response_update(self, async_client: AsyncBrowserbase) -> None:
@@ -380,7 +416,7 @@ async def test_raw_response_update(self, async_client: AsyncBrowserbase) -> None
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
session = await response.parse()
- assert_matches_type(Session, session, path=["response"])
+ assert_matches_type(SessionUpdateResponse, session, path=["response"])
@parametrize
async def test_streaming_response_update(self, async_client: AsyncBrowserbase) -> None:
@@ -393,7 +429,7 @@ async def test_streaming_response_update(self, async_client: AsyncBrowserbase) -
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
session = await response.parse()
- assert_matches_type(Session, session, path=["response"])
+ assert_matches_type(SessionUpdateResponse, session, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -414,6 +450,7 @@ async def test_method_list(self, async_client: AsyncBrowserbase) -> None:
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncBrowserbase) -> None:
session = await async_client.sessions.list(
+ q="q",
status="RUNNING",
)
assert_matches_type(SessionListResponse, session, path=["response"])
@@ -443,7 +480,7 @@ async def test_method_debug(self, async_client: AsyncBrowserbase) -> None:
session = await async_client.sessions.debug(
"id",
)
- assert_matches_type(SessionLiveURLs, session, path=["response"])
+ assert_matches_type(SessionDebugResponse, session, path=["response"])
@parametrize
async def test_raw_response_debug(self, async_client: AsyncBrowserbase) -> None:
@@ -454,7 +491,7 @@ async def test_raw_response_debug(self, async_client: AsyncBrowserbase) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
session = await response.parse()
- assert_matches_type(SessionLiveURLs, session, path=["response"])
+ assert_matches_type(SessionDebugResponse, session, path=["response"])
@parametrize
async def test_streaming_response_debug(self, async_client: AsyncBrowserbase) -> None:
@@ -465,7 +502,7 @@ async def test_streaming_response_debug(self, async_client: AsyncBrowserbase) ->
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
session = await response.parse()
- assert_matches_type(SessionLiveURLs, session, path=["response"])
+ assert_matches_type(SessionDebugResponse, session, path=["response"])
assert cast(Any, response.is_closed) is True
diff --git a/tests/conftest.py b/tests/conftest.py
index 15ddbcad..7fc31c49 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,16 +1,20 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
from __future__ import annotations
import os
import logging
from typing import TYPE_CHECKING, Iterator, AsyncIterator
+import httpx
import pytest
from pytest_asyncio import is_async_test
-from browserbase import Browserbase, AsyncBrowserbase
+from browserbase import Browserbase, AsyncBrowserbase, DefaultAioHttpClient
+from browserbase._utils import is_dict
if TYPE_CHECKING:
- from _pytest.fixtures import FixtureRequest
+ from _pytest.fixtures import FixtureRequest # pyright: ignore[reportPrivateImportUsage]
pytest.register_assert_rewrite("tests.utils")
@@ -25,6 +29,19 @@ def pytest_collection_modifyitems(items: list[pytest.Function]) -> None:
for async_test in pytest_asyncio_tests:
async_test.add_marker(session_scope_marker, append=False)
+ # We skip tests that use both the aiohttp client and respx_mock as respx_mock
+ # doesn't support custom transports.
+ for item in items:
+ if "async_client" not in item.fixturenames or "respx_mock" not in item.fixturenames:
+ continue
+
+ if not hasattr(item, "callspec"):
+ continue
+
+ async_client_param = item.callspec.params.get("async_client")
+ if is_dict(async_client_param) and async_client_param.get("http_client") == "aiohttp":
+ item.add_marker(pytest.mark.skip(reason="aiohttp client is not compatible with respx_mock"))
+
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -43,9 +60,25 @@ def client(request: FixtureRequest) -> Iterator[Browserbase]:
@pytest.fixture(scope="session")
async def async_client(request: FixtureRequest) -> AsyncIterator[AsyncBrowserbase]:
- strict = getattr(request, "param", True)
- if not isinstance(strict, bool):
- raise TypeError(f"Unexpected fixture parameter type {type(strict)}, expected {bool}")
-
- async with AsyncBrowserbase(base_url=base_url, api_key=api_key, _strict_response_validation=strict) as client:
+ param = getattr(request, "param", True)
+
+ # defaults
+ strict = True
+ http_client: None | httpx.AsyncClient = None
+
+ if isinstance(param, bool):
+ strict = param
+ elif is_dict(param):
+ strict = param.get("strict", True)
+ assert isinstance(strict, bool)
+
+ http_client_type = param.get("http_client", "httpx")
+ if http_client_type == "aiohttp":
+ http_client = DefaultAioHttpClient()
+ else:
+ raise TypeError(f"Unexpected fixture parameter type {type(param)}, expected bool or dict")
+
+ async with AsyncBrowserbase(
+ base_url=base_url, api_key=api_key, _strict_response_validation=strict, http_client=http_client
+ ) as client:
yield client
diff --git a/tests/test_client.py b/tests/test_client.py
index c70ef50e..aed68baf 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -4,6 +4,7 @@
import gc
import os
+import sys
import json
import asyncio
import inspect
@@ -19,13 +20,17 @@
from browserbase import Browserbase, AsyncBrowserbase, APIResponseValidationError
from browserbase._types import Omit
+from browserbase._utils import asyncify
from browserbase._models import BaseModel, FinalRequestOptions
-from browserbase._constants import RAW_RESPONSE_HEADER
from browserbase._exceptions import APIStatusError, APITimeoutError, BrowserbaseError, APIResponseValidationError
from browserbase._base_client import (
DEFAULT_TIMEOUT,
HTTPX_DEFAULT_TIMEOUT,
BaseClient,
+ OtherPlatform,
+ DefaultHttpxClient,
+ DefaultAsyncHttpxClient,
+ get_platform,
make_request_options,
)
@@ -186,6 +191,7 @@ def test_copy_signature(self) -> None:
copy_param = copy_signature.parameters.get(name)
assert copy_param is not None, f"copy() signature is missing the {name} param"
+ @pytest.mark.skipif(sys.version_info >= (3, 10), reason="fails because of a memory leak that started from 3.12")
def test_copy_build_request(self) -> None:
options = FinalRequestOptions(method="get", url="/foo")
@@ -352,11 +358,11 @@ def test_default_query_option(self) -> None:
FinalRequestOptions(
method="get",
url="/foo",
- params={"foo": "baz", "query_param": "overriden"},
+ params={"foo": "baz", "query_param": "overridden"},
)
)
url = httpx.URL(request.url)
- assert dict(url.params) == {"foo": "baz", "query_param": "overriden"}
+ assert dict(url.params) == {"foo": "baz", "query_param": "overridden"}
def test_request_extra_json(self) -> None:
request = self.client._build_request(
@@ -458,7 +464,7 @@ def test_request_extra_query(self) -> None:
def test_multipart_repeating_array(self, client: Browserbase) -> None:
request = client._build_request(
FinalRequestOptions.construct(
- method="get",
+ method="post",
url="/foo",
headers={"Content-Type": "multipart/form-data; boundary=6b7ba517decee4a450543ea6ae821c82"},
json_data={"array": ["foo", "bar"]},
@@ -703,7 +709,7 @@ class Model(BaseModel):
[3, "", 0.5],
[2, "", 0.5 * 2.0],
[1, "", 0.5 * 4.0],
- [-1100, "", 7.8], # test large number potentially overflowing
+ [-1100, "", 8], # test large number potentially overflowing
],
)
@mock.patch("time.time", mock.MagicMock(return_value=1696004797))
@@ -717,32 +723,21 @@ def test_parse_retry_after_header(self, remaining_retries: int, retry_after: str
@mock.patch("browserbase._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout)
@pytest.mark.respx(base_url=base_url)
- def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter) -> None:
+ def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter, client: Browserbase) -> None:
respx_mock.post("/v1/sessions").mock(side_effect=httpx.TimeoutException("Test timeout error"))
with pytest.raises(APITimeoutError):
- self.client.post(
- "/v1/sessions",
- body=cast(object, dict(project_id="your_project_id")),
- cast_to=httpx.Response,
- options={"headers": {RAW_RESPONSE_HEADER: "stream"}},
- )
+ client.sessions.with_streaming_response.create(project_id="projectId").__enter__()
assert _get_open_connections(self.client) == 0
@mock.patch("browserbase._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout)
@pytest.mark.respx(base_url=base_url)
- def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter) -> None:
+ def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter, client: Browserbase) -> None:
respx_mock.post("/v1/sessions").mock(return_value=httpx.Response(500))
with pytest.raises(APIStatusError):
- self.client.post(
- "/v1/sessions",
- body=cast(object, dict(project_id="your_project_id")),
- cast_to=httpx.Response,
- options={"headers": {RAW_RESPONSE_HEADER: "stream"}},
- )
-
+ client.sessions.with_streaming_response.create(project_id="projectId").__enter__()
assert _get_open_connections(self.client) == 0
@pytest.mark.parametrize("failures_before_success", [0, 2, 4])
@@ -826,6 +821,55 @@ def retry_handler(_request: httpx.Request) -> httpx.Response:
assert response.http_request.headers.get("x-stainless-retry-count") == "42"
+ def test_proxy_environment_variables(self, monkeypatch: pytest.MonkeyPatch) -> None:
+ # Test that the proxy environment variables are set correctly
+ monkeypatch.setenv("HTTPS_PROXY", "https://example.org")
+
+ client = DefaultHttpxClient()
+
+ mounts = tuple(client._mounts.items())
+ assert len(mounts) == 1
+ assert mounts[0][0].pattern == "https://"
+
+ @pytest.mark.filterwarnings("ignore:.*deprecated.*:DeprecationWarning")
+ def test_default_client_creation(self) -> None:
+ # Ensure that the client can be initialized without any exceptions
+ DefaultHttpxClient(
+ verify=True,
+ cert=None,
+ trust_env=True,
+ http1=True,
+ http2=False,
+ limits=httpx.Limits(max_connections=100, max_keepalive_connections=20),
+ )
+
+ @pytest.mark.respx(base_url=base_url)
+ def test_follow_redirects(self, respx_mock: MockRouter) -> None:
+ # Test that the default follow_redirects=True allows following redirects
+ respx_mock.post("/redirect").mock(
+ return_value=httpx.Response(302, headers={"Location": f"{base_url}/redirected"})
+ )
+ respx_mock.get("/redirected").mock(return_value=httpx.Response(200, json={"status": "ok"}))
+
+ response = self.client.post("/redirect", body={"key": "value"}, cast_to=httpx.Response)
+ assert response.status_code == 200
+ assert response.json() == {"status": "ok"}
+
+ @pytest.mark.respx(base_url=base_url)
+ def test_follow_redirects_disabled(self, respx_mock: MockRouter) -> None:
+ # Test that follow_redirects=False prevents following redirects
+ respx_mock.post("/redirect").mock(
+ return_value=httpx.Response(302, headers={"Location": f"{base_url}/redirected"})
+ )
+
+ with pytest.raises(APIStatusError) as exc_info:
+ self.client.post(
+ "/redirect", body={"key": "value"}, options={"follow_redirects": False}, cast_to=httpx.Response
+ )
+
+ assert exc_info.value.response.status_code == 302
+ assert exc_info.value.response.headers["Location"] == f"{base_url}/redirected"
+
class TestAsyncBrowserbase:
client = AsyncBrowserbase(base_url=base_url, api_key=api_key, _strict_response_validation=True)
@@ -962,6 +1006,7 @@ def test_copy_signature(self) -> None:
copy_param = copy_signature.parameters.get(name)
assert copy_param is not None, f"copy() signature is missing the {name} param"
+ @pytest.mark.skipif(sys.version_info >= (3, 10), reason="fails because of a memory leak that started from 3.12")
def test_copy_build_request(self) -> None:
options = FinalRequestOptions(method="get", url="/foo")
@@ -1128,11 +1173,11 @@ def test_default_query_option(self) -> None:
FinalRequestOptions(
method="get",
url="/foo",
- params={"foo": "baz", "query_param": "overriden"},
+ params={"foo": "baz", "query_param": "overridden"},
)
)
url = httpx.URL(request.url)
- assert dict(url.params) == {"foo": "baz", "query_param": "overriden"}
+ assert dict(url.params) == {"foo": "baz", "query_param": "overridden"}
def test_request_extra_json(self) -> None:
request = self.client._build_request(
@@ -1234,7 +1279,7 @@ def test_request_extra_query(self) -> None:
def test_multipart_repeating_array(self, async_client: AsyncBrowserbase) -> None:
request = async_client._build_request(
FinalRequestOptions.construct(
- method="get",
+ method="post",
url="/foo",
headers={"Content-Type": "multipart/form-data; boundary=6b7ba517decee4a450543ea6ae821c82"},
json_data={"array": ["foo", "bar"]},
@@ -1482,7 +1527,7 @@ class Model(BaseModel):
[3, "", 0.5],
[2, "", 0.5 * 2.0],
[1, "", 0.5 * 4.0],
- [-1100, "", 7.8], # test large number potentially overflowing
+ [-1100, "", 8], # test large number potentially overflowing
],
)
@mock.patch("time.time", mock.MagicMock(return_value=1696004797))
@@ -1497,32 +1542,25 @@ async def test_parse_retry_after_header(self, remaining_retries: int, retry_afte
@mock.patch("browserbase._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout)
@pytest.mark.respx(base_url=base_url)
- async def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter) -> None:
+ async def test_retrying_timeout_errors_doesnt_leak(
+ self, respx_mock: MockRouter, async_client: AsyncBrowserbase
+ ) -> None:
respx_mock.post("/v1/sessions").mock(side_effect=httpx.TimeoutException("Test timeout error"))
with pytest.raises(APITimeoutError):
- await self.client.post(
- "/v1/sessions",
- body=cast(object, dict(project_id="your_project_id")),
- cast_to=httpx.Response,
- options={"headers": {RAW_RESPONSE_HEADER: "stream"}},
- )
+ await async_client.sessions.with_streaming_response.create(project_id="projectId").__aenter__()
assert _get_open_connections(self.client) == 0
@mock.patch("browserbase._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout)
@pytest.mark.respx(base_url=base_url)
- async def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter) -> None:
+ async def test_retrying_status_errors_doesnt_leak(
+ self, respx_mock: MockRouter, async_client: AsyncBrowserbase
+ ) -> None:
respx_mock.post("/v1/sessions").mock(return_value=httpx.Response(500))
with pytest.raises(APIStatusError):
- await self.client.post(
- "/v1/sessions",
- body=cast(object, dict(project_id="your_project_id")),
- cast_to=httpx.Response,
- options={"headers": {RAW_RESPONSE_HEADER: "stream"}},
- )
-
+ await async_client.sessions.with_streaming_response.create(project_id="projectId").__aenter__()
assert _get_open_connections(self.client) == 0
@pytest.mark.parametrize("failures_before_success", [0, 2, 4])
@@ -1608,3 +1646,56 @@ def retry_handler(_request: httpx.Request) -> httpx.Response:
)
assert response.http_request.headers.get("x-stainless-retry-count") == "42"
+
+ async def test_get_platform(self) -> None:
+ platform = await asyncify(get_platform)()
+ assert isinstance(platform, (str, OtherPlatform))
+
+ async def test_proxy_environment_variables(self, monkeypatch: pytest.MonkeyPatch) -> None:
+ # Test that the proxy environment variables are set correctly
+ monkeypatch.setenv("HTTPS_PROXY", "https://example.org")
+
+ client = DefaultAsyncHttpxClient()
+
+ mounts = tuple(client._mounts.items())
+ assert len(mounts) == 1
+ assert mounts[0][0].pattern == "https://"
+
+ @pytest.mark.filterwarnings("ignore:.*deprecated.*:DeprecationWarning")
+ async def test_default_client_creation(self) -> None:
+ # Ensure that the client can be initialized without any exceptions
+ DefaultAsyncHttpxClient(
+ verify=True,
+ cert=None,
+ trust_env=True,
+ http1=True,
+ http2=False,
+ limits=httpx.Limits(max_connections=100, max_keepalive_connections=20),
+ )
+
+ @pytest.mark.respx(base_url=base_url)
+ async def test_follow_redirects(self, respx_mock: MockRouter) -> None:
+ # Test that the default follow_redirects=True allows following redirects
+ respx_mock.post("/redirect").mock(
+ return_value=httpx.Response(302, headers={"Location": f"{base_url}/redirected"})
+ )
+ respx_mock.get("/redirected").mock(return_value=httpx.Response(200, json={"status": "ok"}))
+
+ response = await self.client.post("/redirect", body={"key": "value"}, cast_to=httpx.Response)
+ assert response.status_code == 200
+ assert response.json() == {"status": "ok"}
+
+ @pytest.mark.respx(base_url=base_url)
+ async def test_follow_redirects_disabled(self, respx_mock: MockRouter) -> None:
+ # Test that follow_redirects=False prevents following redirects
+ respx_mock.post("/redirect").mock(
+ return_value=httpx.Response(302, headers={"Location": f"{base_url}/redirected"})
+ )
+
+ with pytest.raises(APIStatusError) as exc_info:
+ await self.client.post(
+ "/redirect", body={"key": "value"}, options={"follow_redirects": False}, cast_to=httpx.Response
+ )
+
+ assert exc_info.value.response.status_code == 302
+ assert exc_info.value.response.headers["Location"] == f"{base_url}/redirected"
diff --git a/tests/test_models.py b/tests/test_models.py
index 5b8044f0..34f87334 100644
--- a/tests/test_models.py
+++ b/tests/test_models.py
@@ -1,14 +1,14 @@
import json
-from typing import Any, Dict, List, Union, Optional, cast
+from typing import TYPE_CHECKING, Any, Dict, List, Union, Optional, cast
from datetime import datetime, timezone
-from typing_extensions import Literal, Annotated
+from typing_extensions import Literal, Annotated, TypeAliasType
import pytest
import pydantic
from pydantic import Field
from browserbase._utils import PropertyInfo
-from browserbase._compat import PYDANTIC_V2, parse_obj, model_dump, model_json
+from browserbase._compat import PYDANTIC_V1, parse_obj, model_dump, model_json
from browserbase._models import BaseModel, construct_type
@@ -294,12 +294,12 @@ class Model(BaseModel):
assert cast(bool, m.foo) is True
m = Model.construct(foo={"name": 3})
- if PYDANTIC_V2:
- assert isinstance(m.foo, Submodel1)
- assert m.foo.name == 3 # type: ignore
- else:
+ if PYDANTIC_V1:
assert isinstance(m.foo, Submodel2)
assert m.foo.name == "3"
+ else:
+ assert isinstance(m.foo, Submodel1)
+ assert m.foo.name == 3 # type: ignore
def test_list_of_unions() -> None:
@@ -426,10 +426,10 @@ class Model(BaseModel):
expected = datetime(2019, 12, 27, 18, 11, 19, 117000, tzinfo=timezone.utc)
- if PYDANTIC_V2:
- expected_json = '{"created_at":"2019-12-27T18:11:19.117000Z"}'
- else:
+ if PYDANTIC_V1:
expected_json = '{"created_at": "2019-12-27T18:11:19.117000+00:00"}'
+ else:
+ expected_json = '{"created_at":"2019-12-27T18:11:19.117000Z"}'
model = Model.construct(created_at="2019-12-27T18:11:19.117Z")
assert model.created_at == expected
@@ -492,12 +492,15 @@ class Model(BaseModel):
resource_id: Optional[str] = None
m = Model.construct()
+ assert m.resource_id is None
assert "resource_id" not in m.model_fields_set
m = Model.construct(resource_id=None)
+ assert m.resource_id is None
assert "resource_id" in m.model_fields_set
m = Model.construct(resource_id="foo")
+ assert m.resource_id == "foo"
assert "resource_id" in m.model_fields_set
@@ -520,19 +523,15 @@ class Model(BaseModel):
assert m3.to_dict(exclude_none=True) == {}
assert m3.to_dict(exclude_defaults=True) == {}
- if PYDANTIC_V2:
+ class Model2(BaseModel):
+ created_at: datetime
- class Model2(BaseModel):
- created_at: datetime
-
- time_str = "2024-03-21T11:39:01.275859"
- m4 = Model2.construct(created_at=time_str)
- assert m4.to_dict(mode="python") == {"created_at": datetime.fromisoformat(time_str)}
- assert m4.to_dict(mode="json") == {"created_at": time_str}
- else:
- with pytest.raises(ValueError, match="mode is only supported in Pydantic v2"):
- m.to_dict(mode="json")
+ time_str = "2024-03-21T11:39:01.275859"
+ m4 = Model2.construct(created_at=time_str)
+ assert m4.to_dict(mode="python") == {"created_at": datetime.fromisoformat(time_str)}
+ assert m4.to_dict(mode="json") == {"created_at": time_str}
+ if PYDANTIC_V1:
with pytest.raises(ValueError, match="warnings is only supported in Pydantic v2"):
m.to_dict(warnings=False)
@@ -557,10 +556,7 @@ class Model(BaseModel):
assert m3.model_dump() == {"foo": None}
assert m3.model_dump(exclude_none=True) == {}
- if not PYDANTIC_V2:
- with pytest.raises(ValueError, match="mode is only supported in Pydantic v2"):
- m.model_dump(mode="json")
-
+ if PYDANTIC_V1:
with pytest.raises(ValueError, match="round_trip is only supported in Pydantic v2"):
m.model_dump(round_trip=True)
@@ -568,6 +564,14 @@ class Model(BaseModel):
m.model_dump(warnings=False)
+def test_compat_method_no_error_for_warnings() -> None:
+ class Model(BaseModel):
+ foo: Optional[str]
+
+ m = Model(foo="hello")
+ assert isinstance(model_dump(m, warnings=False), dict)
+
+
def test_to_json() -> None:
class Model(BaseModel):
foo: Optional[str] = Field(alias="FOO", default=None)
@@ -576,10 +580,10 @@ class Model(BaseModel):
assert json.loads(m.to_json()) == {"FOO": "hello"}
assert json.loads(m.to_json(use_api_names=False)) == {"foo": "hello"}
- if PYDANTIC_V2:
- assert m.to_json(indent=None) == '{"FOO":"hello"}'
- else:
+ if PYDANTIC_V1:
assert m.to_json(indent=None) == '{"FOO": "hello"}'
+ else:
+ assert m.to_json(indent=None) == '{"FOO":"hello"}'
m2 = Model()
assert json.loads(m2.to_json()) == {}
@@ -591,7 +595,7 @@ class Model(BaseModel):
assert json.loads(m3.to_json()) == {"FOO": None}
assert json.loads(m3.to_json(exclude_none=True)) == {}
- if not PYDANTIC_V2:
+ if PYDANTIC_V1:
with pytest.raises(ValueError, match="warnings is only supported in Pydantic v2"):
m.to_json(warnings=False)
@@ -618,7 +622,7 @@ class Model(BaseModel):
assert json.loads(m3.model_dump_json()) == {"foo": None}
assert json.loads(m3.model_dump_json(exclude_none=True)) == {}
- if not PYDANTIC_V2:
+ if PYDANTIC_V1:
with pytest.raises(ValueError, match="round_trip is only supported in Pydantic v2"):
m.model_dump_json(round_trip=True)
@@ -675,12 +679,12 @@ class B(BaseModel):
)
assert isinstance(m, A)
assert m.type == "a"
- if PYDANTIC_V2:
- assert m.data == 100 # type: ignore[comparison-overlap]
- else:
+ if PYDANTIC_V1:
# pydantic v1 automatically converts inputs to strings
# if the expected type is a str
assert m.data == "100"
+ else:
+ assert m.data == 100 # type: ignore[comparison-overlap]
def test_discriminated_unions_unknown_variant() -> None:
@@ -764,12 +768,12 @@ class B(BaseModel):
)
assert isinstance(m, A)
assert m.foo_type == "a"
- if PYDANTIC_V2:
- assert m.data == 100 # type: ignore[comparison-overlap]
- else:
+ if PYDANTIC_V1:
# pydantic v1 automatically converts inputs to strings
# if the expected type is a str
assert m.data == "100"
+ else:
+ assert m.data == 100 # type: ignore[comparison-overlap]
def test_discriminated_unions_overlapping_discriminators_invalid_data() -> None:
@@ -827,3 +831,133 @@ class B(BaseModel):
# if the discriminator details object stays the same between invocations then
# we hit the cache
assert UnionType.__discriminator__ is discriminator
+
+
+@pytest.mark.skipif(PYDANTIC_V1, reason="TypeAliasType is not supported in Pydantic v1")
+def test_type_alias_type() -> None:
+ Alias = TypeAliasType("Alias", str) # pyright: ignore
+
+ class Model(BaseModel):
+ alias: Alias
+ union: Union[int, Alias]
+
+ m = construct_type(value={"alias": "foo", "union": "bar"}, type_=Model)
+ assert isinstance(m, Model)
+ assert isinstance(m.alias, str)
+ assert m.alias == "foo"
+ assert isinstance(m.union, str)
+ assert m.union == "bar"
+
+
+@pytest.mark.skipif(PYDANTIC_V1, reason="TypeAliasType is not supported in Pydantic v1")
+def test_field_named_cls() -> None:
+ class Model(BaseModel):
+ cls: str
+
+ m = construct_type(value={"cls": "foo"}, type_=Model)
+ assert isinstance(m, Model)
+ assert isinstance(m.cls, str)
+
+
+def test_discriminated_union_case() -> None:
+ class A(BaseModel):
+ type: Literal["a"]
+
+ data: bool
+
+ class B(BaseModel):
+ type: Literal["b"]
+
+ data: List[Union[A, object]]
+
+ class ModelA(BaseModel):
+ type: Literal["modelA"]
+
+ data: int
+
+ class ModelB(BaseModel):
+ type: Literal["modelB"]
+
+ required: str
+
+ data: Union[A, B]
+
+ # when constructing ModelA | ModelB, value data doesn't match ModelB exactly - missing `required`
+ m = construct_type(
+ value={"type": "modelB", "data": {"type": "a", "data": True}},
+ type_=cast(Any, Annotated[Union[ModelA, ModelB], PropertyInfo(discriminator="type")]),
+ )
+
+ assert isinstance(m, ModelB)
+
+
+def test_nested_discriminated_union() -> None:
+ class InnerType1(BaseModel):
+ type: Literal["type_1"]
+
+ class InnerModel(BaseModel):
+ inner_value: str
+
+ class InnerType2(BaseModel):
+ type: Literal["type_2"]
+ some_inner_model: InnerModel
+
+ class Type1(BaseModel):
+ base_type: Literal["base_type_1"]
+ value: Annotated[
+ Union[
+ InnerType1,
+ InnerType2,
+ ],
+ PropertyInfo(discriminator="type"),
+ ]
+
+ class Type2(BaseModel):
+ base_type: Literal["base_type_2"]
+
+ T = Annotated[
+ Union[
+ Type1,
+ Type2,
+ ],
+ PropertyInfo(discriminator="base_type"),
+ ]
+
+ model = construct_type(
+ type_=T,
+ value={
+ "base_type": "base_type_1",
+ "value": {
+ "type": "type_2",
+ },
+ },
+ )
+ assert isinstance(model, Type1)
+ assert isinstance(model.value, InnerType2)
+
+
+@pytest.mark.skipif(PYDANTIC_V1, reason="this is only supported in pydantic v2 for now")
+def test_extra_properties() -> None:
+ class Item(BaseModel):
+ prop: int
+
+ class Model(BaseModel):
+ __pydantic_extra__: Dict[str, Item] = Field(init=False) # pyright: ignore[reportIncompatibleVariableOverride]
+
+ other: str
+
+ if TYPE_CHECKING:
+
+ def __getattr__(self, attr: str) -> Item: ...
+
+ model = construct_type(
+ type_=Model,
+ value={
+ "a": {"prop": 1},
+ "other": "foo",
+ },
+ )
+ assert isinstance(model, Model)
+ assert model.a.prop == 1
+ assert isinstance(model.a, Item)
+ assert model.other == "foo"
diff --git a/tests/test_transform.py b/tests/test_transform.py
index 436b8185..c31b1f40 100644
--- a/tests/test_transform.py
+++ b/tests/test_transform.py
@@ -2,20 +2,20 @@
import io
import pathlib
-from typing import Any, List, Union, TypeVar, Iterable, Optional, cast
+from typing import Any, Dict, List, Union, TypeVar, Iterable, Optional, cast
from datetime import date, datetime
from typing_extensions import Required, Annotated, TypedDict
import pytest
-from browserbase._types import Base64FileInput
+from browserbase._types import Base64FileInput, omit, not_given
from browserbase._utils import (
PropertyInfo,
transform as _transform,
parse_datetime,
async_transform as _async_transform,
)
-from browserbase._compat import PYDANTIC_V2
+from browserbase._compat import PYDANTIC_V1
from browserbase._models import BaseModel
_T = TypeVar("_T")
@@ -177,17 +177,32 @@ class DateDict(TypedDict, total=False):
foo: Annotated[date, PropertyInfo(format="iso8601")]
+class DatetimeModel(BaseModel):
+ foo: datetime
+
+
+class DateModel(BaseModel):
+ foo: Optional[date]
+
+
@parametrize
@pytest.mark.asyncio
async def test_iso8601_format(use_async: bool) -> None:
dt = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00")
+ tz = "+00:00" if PYDANTIC_V1 else "Z"
assert await transform({"foo": dt}, DatetimeDict, use_async) == {"foo": "2023-02-23T14:16:36.337692+00:00"} # type: ignore[comparison-overlap]
+ assert await transform(DatetimeModel(foo=dt), Any, use_async) == {"foo": "2023-02-23T14:16:36.337692" + tz} # type: ignore[comparison-overlap]
dt = dt.replace(tzinfo=None)
assert await transform({"foo": dt}, DatetimeDict, use_async) == {"foo": "2023-02-23T14:16:36.337692"} # type: ignore[comparison-overlap]
+ assert await transform(DatetimeModel(foo=dt), Any, use_async) == {"foo": "2023-02-23T14:16:36.337692"} # type: ignore[comparison-overlap]
assert await transform({"foo": None}, DateDict, use_async) == {"foo": None} # type: ignore[comparison-overlap]
+ assert await transform(DateModel(foo=None), Any, use_async) == {"foo": None} # type: ignore
assert await transform({"foo": date.fromisoformat("2023-02-23")}, DateDict, use_async) == {"foo": "2023-02-23"} # type: ignore[comparison-overlap]
+ assert await transform(DateModel(foo=date.fromisoformat("2023-02-23")), DateDict, use_async) == {
+ "foo": "2023-02-23"
+ } # type: ignore[comparison-overlap]
@parametrize
@@ -282,11 +297,11 @@ async def test_pydantic_unknown_field(use_async: bool) -> None:
@pytest.mark.asyncio
async def test_pydantic_mismatched_types(use_async: bool) -> None:
model = MyModel.construct(foo=True)
- if PYDANTIC_V2:
+ if PYDANTIC_V1:
+ params = await transform(model, Any, use_async)
+ else:
with pytest.warns(UserWarning):
params = await transform(model, Any, use_async)
- else:
- params = await transform(model, Any, use_async)
assert cast(Any, params) == {"foo": True}
@@ -294,11 +309,11 @@ async def test_pydantic_mismatched_types(use_async: bool) -> None:
@pytest.mark.asyncio
async def test_pydantic_mismatched_object_type(use_async: bool) -> None:
model = MyModel.construct(foo=MyModel.construct(hello="world"))
- if PYDANTIC_V2:
+ if PYDANTIC_V1:
+ params = await transform(model, Any, use_async)
+ else:
with pytest.warns(UserWarning):
params = await transform(model, Any, use_async)
- else:
- params = await transform(model, Any, use_async)
assert cast(Any, params) == {"foo": {"hello": "world"}}
@@ -373,6 +388,15 @@ def my_iter() -> Iterable[Baz8]:
}
+@parametrize
+@pytest.mark.asyncio
+async def test_dictionary_items(use_async: bool) -> None:
+ class DictItems(TypedDict):
+ foo_baz: Annotated[str, PropertyInfo(alias="fooBaz")]
+
+ assert await transform({"foo": {"foo_baz": "bar"}}, Dict[str, DictItems], use_async) == {"foo": {"fooBaz": "bar"}}
+
+
class TypedDictIterableUnionStr(TypedDict):
foo: Annotated[Union[str, Iterable[Baz8]], PropertyInfo(alias="FOO")]
@@ -408,3 +432,29 @@ async def test_base64_file_input(use_async: bool) -> None:
assert await transform({"foo": io.BytesIO(b"Hello, world!")}, TypedDictBase64Input, use_async) == {
"foo": "SGVsbG8sIHdvcmxkIQ=="
} # type: ignore[comparison-overlap]
+
+
+@parametrize
+@pytest.mark.asyncio
+async def test_transform_skipping(use_async: bool) -> None:
+ # lists of ints are left as-is
+ data = [1, 2, 3]
+ assert await transform(data, List[int], use_async) is data
+
+ # iterables of ints are converted to a list
+ data = iter([1, 2, 3])
+ assert await transform(data, Iterable[int], use_async) == [1, 2, 3]
+
+
+@parametrize
+@pytest.mark.asyncio
+async def test_strips_notgiven(use_async: bool) -> None:
+ assert await transform({"foo_bar": "bar"}, Foo1, use_async) == {"fooBar": "bar"}
+ assert await transform({"foo_bar": not_given}, Foo1, use_async) == {}
+
+
+@parametrize
+@pytest.mark.asyncio
+async def test_strips_omit(use_async: bool) -> None:
+ assert await transform({"foo_bar": "bar"}, Foo1, use_async) == {"fooBar": "bar"}
+ assert await transform({"foo_bar": omit}, Foo1, use_async) == {}
diff --git a/tests/test_utils/test_datetime_parse.py b/tests/test_utils/test_datetime_parse.py
new file mode 100644
index 00000000..2834c471
--- /dev/null
+++ b/tests/test_utils/test_datetime_parse.py
@@ -0,0 +1,110 @@
+"""
+Copied from https://github.com/pydantic/pydantic/blob/v1.10.22/tests/test_datetime_parse.py
+with modifications so it works without pydantic v1 imports.
+"""
+
+from typing import Type, Union
+from datetime import date, datetime, timezone, timedelta
+
+import pytest
+
+from browserbase._utils import parse_date, parse_datetime
+
+
+def create_tz(minutes: int) -> timezone:
+ return timezone(timedelta(minutes=minutes))
+
+
+@pytest.mark.parametrize(
+ "value,result",
+ [
+ # Valid inputs
+ ("1494012444.883309", date(2017, 5, 5)),
+ (b"1494012444.883309", date(2017, 5, 5)),
+ (1_494_012_444.883_309, date(2017, 5, 5)),
+ ("1494012444", date(2017, 5, 5)),
+ (1_494_012_444, date(2017, 5, 5)),
+ (0, date(1970, 1, 1)),
+ ("2012-04-23", date(2012, 4, 23)),
+ (b"2012-04-23", date(2012, 4, 23)),
+ ("2012-4-9", date(2012, 4, 9)),
+ (date(2012, 4, 9), date(2012, 4, 9)),
+ (datetime(2012, 4, 9, 12, 15), date(2012, 4, 9)),
+ # Invalid inputs
+ ("x20120423", ValueError),
+ ("2012-04-56", ValueError),
+ (19_999_999_999, date(2603, 10, 11)), # just before watershed
+ (20_000_000_001, date(1970, 8, 20)), # just after watershed
+ (1_549_316_052, date(2019, 2, 4)), # nowish in s
+ (1_549_316_052_104, date(2019, 2, 4)), # nowish in ms
+ (1_549_316_052_104_324, date(2019, 2, 4)), # nowish in μs
+ (1_549_316_052_104_324_096, date(2019, 2, 4)), # nowish in ns
+ ("infinity", date(9999, 12, 31)),
+ ("inf", date(9999, 12, 31)),
+ (float("inf"), date(9999, 12, 31)),
+ ("infinity ", date(9999, 12, 31)),
+ (int("1" + "0" * 100), date(9999, 12, 31)),
+ (1e1000, date(9999, 12, 31)),
+ ("-infinity", date(1, 1, 1)),
+ ("-inf", date(1, 1, 1)),
+ ("nan", ValueError),
+ ],
+)
+def test_date_parsing(value: Union[str, bytes, int, float], result: Union[date, Type[Exception]]) -> None:
+ if type(result) == type and issubclass(result, Exception): # pyright: ignore[reportUnnecessaryIsInstance]
+ with pytest.raises(result):
+ parse_date(value)
+ else:
+ assert parse_date(value) == result
+
+
+@pytest.mark.parametrize(
+ "value,result",
+ [
+ # Valid inputs
+ # values in seconds
+ ("1494012444.883309", datetime(2017, 5, 5, 19, 27, 24, 883_309, tzinfo=timezone.utc)),
+ (1_494_012_444.883_309, datetime(2017, 5, 5, 19, 27, 24, 883_309, tzinfo=timezone.utc)),
+ ("1494012444", datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)),
+ (b"1494012444", datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)),
+ (1_494_012_444, datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)),
+ # values in ms
+ ("1494012444000.883309", datetime(2017, 5, 5, 19, 27, 24, 883, tzinfo=timezone.utc)),
+ ("-1494012444000.883309", datetime(1922, 8, 29, 4, 32, 35, 999117, tzinfo=timezone.utc)),
+ (1_494_012_444_000, datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)),
+ ("2012-04-23T09:15:00", datetime(2012, 4, 23, 9, 15)),
+ ("2012-4-9 4:8:16", datetime(2012, 4, 9, 4, 8, 16)),
+ ("2012-04-23T09:15:00Z", datetime(2012, 4, 23, 9, 15, 0, 0, timezone.utc)),
+ ("2012-4-9 4:8:16-0320", datetime(2012, 4, 9, 4, 8, 16, 0, create_tz(-200))),
+ ("2012-04-23T10:20:30.400+02:30", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(150))),
+ ("2012-04-23T10:20:30.400+02", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(120))),
+ ("2012-04-23T10:20:30.400-02", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(-120))),
+ (b"2012-04-23T10:20:30.400-02", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(-120))),
+ (datetime(2017, 5, 5), datetime(2017, 5, 5)),
+ (0, datetime(1970, 1, 1, 0, 0, 0, tzinfo=timezone.utc)),
+ # Invalid inputs
+ ("x20120423091500", ValueError),
+ ("2012-04-56T09:15:90", ValueError),
+ ("2012-04-23T11:05:00-25:00", ValueError),
+ (19_999_999_999, datetime(2603, 10, 11, 11, 33, 19, tzinfo=timezone.utc)), # just before watershed
+ (20_000_000_001, datetime(1970, 8, 20, 11, 33, 20, 1000, tzinfo=timezone.utc)), # just after watershed
+ (1_549_316_052, datetime(2019, 2, 4, 21, 34, 12, 0, tzinfo=timezone.utc)), # nowish in s
+ (1_549_316_052_104, datetime(2019, 2, 4, 21, 34, 12, 104_000, tzinfo=timezone.utc)), # nowish in ms
+ (1_549_316_052_104_324, datetime(2019, 2, 4, 21, 34, 12, 104_324, tzinfo=timezone.utc)), # nowish in μs
+ (1_549_316_052_104_324_096, datetime(2019, 2, 4, 21, 34, 12, 104_324, tzinfo=timezone.utc)), # nowish in ns
+ ("infinity", datetime(9999, 12, 31, 23, 59, 59, 999999)),
+ ("inf", datetime(9999, 12, 31, 23, 59, 59, 999999)),
+ ("inf ", datetime(9999, 12, 31, 23, 59, 59, 999999)),
+ (1e50, datetime(9999, 12, 31, 23, 59, 59, 999999)),
+ (float("inf"), datetime(9999, 12, 31, 23, 59, 59, 999999)),
+ ("-infinity", datetime(1, 1, 1, 0, 0)),
+ ("-inf", datetime(1, 1, 1, 0, 0)),
+ ("nan", ValueError),
+ ],
+)
+def test_datetime_parsing(value: Union[str, bytes, int, float], result: Union[datetime, Type[Exception]]) -> None:
+ if type(result) == type and issubclass(result, Exception): # pyright: ignore[reportUnnecessaryIsInstance]
+ with pytest.raises(result):
+ parse_datetime(value)
+ else:
+ assert parse_datetime(value) == result
diff --git a/tests/test_utils/test_proxy.py b/tests/test_utils/test_proxy.py
index 986bef9d..d4e2f311 100644
--- a/tests/test_utils/test_proxy.py
+++ b/tests/test_utils/test_proxy.py
@@ -21,3 +21,14 @@ def test_recursive_proxy() -> None:
assert dir(proxy) == []
assert type(proxy).__name__ == "RecursiveLazyProxy"
assert type(operator.attrgetter("name.foo.bar.baz")(proxy)).__name__ == "RecursiveLazyProxy"
+
+
+def test_isinstance_does_not_error() -> None:
+ class AlwaysErrorProxy(LazyProxy[Any]):
+ @override
+ def __load__(self) -> Any:
+ raise RuntimeError("Mocking missing dependency")
+
+ proxy = AlwaysErrorProxy()
+ assert not isinstance(proxy, dict)
+ assert isinstance(proxy, LazyProxy)
diff --git a/tests/utils.py b/tests/utils.py
index ad9be375..55521a9b 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -4,7 +4,7 @@
import inspect
import traceback
import contextlib
-from typing import Any, TypeVar, Iterator, cast
+from typing import Any, TypeVar, Iterator, Sequence, cast
from datetime import date, datetime
from typing_extensions import Literal, get_args, get_origin, assert_type
@@ -15,9 +15,11 @@
is_list_type,
is_union_type,
extract_type_arg,
+ is_sequence_type,
is_annotated_type,
+ is_type_alias_type,
)
-from browserbase._compat import PYDANTIC_V2, field_outer_type, get_model_fields
+from browserbase._compat import PYDANTIC_V1, field_outer_type, get_model_fields
from browserbase._models import BaseModel
BaseModelT = TypeVar("BaseModelT", bound=BaseModel)
@@ -26,12 +28,12 @@
def assert_matches_model(model: type[BaseModelT], value: BaseModelT, *, path: list[str]) -> bool:
for name, field in get_model_fields(model).items():
field_value = getattr(value, name)
- if PYDANTIC_V2:
- allow_none = False
- else:
+ if PYDANTIC_V1:
# in v1 nullability was structured differently
# https://docs.pydantic.dev/2.0/migration/#required-optional-and-nullable-fields
allow_none = getattr(field, "allow_none", False)
+ else:
+ allow_none = False
assert_matches_type(
field_outer_type(field),
@@ -51,6 +53,9 @@ def assert_matches_type(
path: list[str],
allow_none: bool = False,
) -> None:
+ if is_type_alias_type(type_):
+ type_ = type_.__value__
+
# unwrap `Annotated[T, ...]` -> `T`
if is_annotated_type(type_):
type_ = extract_type_arg(type_, 0)
@@ -67,6 +72,13 @@ def assert_matches_type(
if is_list_type(type_):
return _assert_list_type(type_, value)
+ if is_sequence_type(type_):
+ assert isinstance(value, Sequence)
+ inner_type = get_args(type_)[0]
+ for entry in value: # type: ignore
+ assert_type(inner_type, entry) # type: ignore
+ return
+
if origin == str:
assert isinstance(value, str)
elif origin == int: