diff --git a/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE/bug_report.md similarity index 62% rename from ISSUE_TEMPLATE.md rename to .github/ISSUE_TEMPLATE/bug_report.md index 5de83b2cc..2970d494f 100644 --- a/ISSUE_TEMPLATE.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -1,10 +1,21 @@ +--- +name: Bug report +about: Bug reports related to any component in this repo +title: '' +labels: '' +assignees: '' + +--- + ### [READ] Step 1: Are you in the right place? - * For issues or feature requests related to __the code in this repository__ - file a Github issue. - * If this is a __feature request__ make sure the issue title starts with "FR:". + * For issues related to __the code in this repository__ file a GitHub issue. + * If the issue pertains to __Cloud Firestore__, report directly in the + [Python Firestore](https://github.com/googleapis/python-firestore) GitHub repo. Firestore + bugs reported in this repo will be closed with a reference to the Python Firestore + project. * For general technical questions, post a question on [StackOverflow](http://stackoverflow.com/) - with the firebase tag. + with the `firebase` tag. * For general Firebase discussion, use the [firebase-talk](https://groups.google.com/forum/#!forum/firebase-talk) google group. * For help troubleshooting your application that does not fall under one @@ -15,8 +26,9 @@ * Operating System version: _____ * Firebase SDK version: _____ - * Library version: _____ * Firebase Product: _____ (auth, database, storage, etc) + * Python version: _____ + * Pip version: _____ ### [REQUIRED] Step 3: Describe the problem diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 000000000..7729d13a4 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: "[FR]" +labels: 'type: feature request' +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context, code samples or screenshots about the feature request here. diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 000000000..6a7695c06 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,6 @@ +version: 2 +updates: + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "weekly" diff --git a/.github/resources/integ-service-account.json.gpg b/.github/resources/integ-service-account.json.gpg new file mode 100644 index 000000000..5a52805c9 Binary files /dev/null and b/.github/resources/integ-service-account.json.gpg differ diff --git a/.github/scripts/generate_changelog.sh b/.github/scripts/generate_changelog.sh new file mode 100755 index 000000000..e393f40e4 --- /dev/null +++ b/.github/scripts/generate_changelog.sh @@ -0,0 +1,79 @@ +#!/bin/bash + +# Copyright 2020 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e +set -u + +function printChangelog() { + local TITLE=$1 + shift + # Skip the sentinel value. + local ENTRIES=("${@:2}") + if [ ${#ENTRIES[@]} -ne 0 ]; then + echo "### ${TITLE}" + echo "" + for ((i = 0; i < ${#ENTRIES[@]}; i++)) + do + echo "* ${ENTRIES[$i]}" + done + echo "" + fi +} + +if [[ -z "${GITHUB_SHA}" ]]; then + GITHUB_SHA="HEAD" +fi + +LAST_TAG=`git describe --tags $(git rev-list --tags --max-count=1) 2> /dev/null` || true +if [[ -z "${LAST_TAG}" ]]; then + echo "[INFO] No tags found. Including all commits up to ${GITHUB_SHA}." + VERSION_RANGE="${GITHUB_SHA}" +else + echo "[INFO] Last release tag: ${LAST_TAG}." + COMMIT_SHA=`git show-ref -s ${LAST_TAG}` + echo "[INFO] Last release commit: ${COMMIT_SHA}." + VERSION_RANGE="${COMMIT_SHA}..${GITHUB_SHA}" + echo "[INFO] Including all commits in the range ${VERSION_RANGE}." +fi + +echo "" + +# Older versions of Bash (< 4.4) treat empty arrays as unbound variables, which triggers +# errors when referencing them. Therefore we initialize each of these arrays with an empty +# sentinel value, and later skip them. +CHANGES=("") +FIXES=("") +FEATS=("") +MISC=("") + +while read -r line +do + COMMIT_MSG=`echo ${line} | cut -d ' ' -f 2-` + if [[ $COMMIT_MSG =~ ^change(\(.*\))?: ]]; then + CHANGES+=("$COMMIT_MSG") + elif [[ $COMMIT_MSG =~ ^fix(\(.*\))?: ]]; then + FIXES+=("$COMMIT_MSG") + elif [[ $COMMIT_MSG =~ ^feat(\(.*\))?: ]]; then + FEATS+=("$COMMIT_MSG") + else + MISC+=("${COMMIT_MSG}") + fi +done < <(git log ${VERSION_RANGE} --oneline) + +printChangelog "Breaking Changes" "${CHANGES[@]}" +printChangelog "New Features" "${FEATS[@]}" +printChangelog "Bug Fixes" "${FIXES[@]}" +printChangelog "Miscellaneous" "${MISC[@]}" diff --git a/.github/scripts/publish_preflight_check.sh b/.github/scripts/publish_preflight_check.sh new file mode 100755 index 000000000..38fe49a88 --- /dev/null +++ b/.github/scripts/publish_preflight_check.sh @@ -0,0 +1,184 @@ +#!/bin/bash + +# Copyright 2020 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +###################################### Outputs ##################################### + +# 1. version: The version of this release including the 'v' prefix (e.g. v1.2.3). +# 2. changelog: Formatted changelog text for this release. + +#################################################################################### + +set -e +set -u + +function echo_info() { + local MESSAGE=$1 + echo "[INFO] ${MESSAGE}" +} + +function echo_warn() { + local MESSAGE=$1 + echo "[WARN] ${MESSAGE}" +} + +function terminate() { + echo "" + echo_warn "--------------------------------------------" + echo_warn "PREFLIGHT FAILED" + echo_warn "--------------------------------------------" + exit 1 +} + + +echo_info "Starting publish preflight check..." +echo_info "Git revision : ${GITHUB_SHA}" +echo_info "Workflow triggered by : ${GITHUB_ACTOR}" +echo_info "GitHub event : ${GITHUB_EVENT_NAME}" + + +echo_info "" +echo_info "--------------------------------------------" +echo_info "Extracting release version" +echo_info "--------------------------------------------" +echo_info "" + +readonly ABOUT_FILE="firebase_admin/__about__.py" +echo_info "Loading version from: ${ABOUT_FILE}" + +readonly RELEASE_VERSION=`grep "__version__" ${ABOUT_FILE} | awk '{print $3}' | tr -d \'` || true +if [[ -z "${RELEASE_VERSION}" ]]; then + echo_warn "Failed to extract release version from: ${ABOUT_FILE}" + terminate +fi + +if [[ ! "${RELEASE_VERSION}" =~ ^([0-9]*)\.([0-9]*)\.([0-9]*)$ ]]; then + echo_warn "Malformed release version string: ${RELEASE_VERSION}. Exiting." + terminate +fi + +echo_info "Extracted release version: ${RELEASE_VERSION}" +echo "version=v${RELEASE_VERSION}" >> $GITHUB_OUTPUT + + +echo_info "" +echo_info "--------------------------------------------" +echo_info "Check release artifacts" +echo_info "--------------------------------------------" +echo_info "" + +if [[ ! -d dist ]]; then + echo_warn "dist directory does not exist." + terminate +fi + +readonly BIN_DIST="dist/firebase_admin-${RELEASE_VERSION}-py3-none-any.whl" +if [[ -f "${BIN_DIST}" ]]; then + echo_info "Found binary distribution (bdist_wheel): ${BIN_DIST}" +else + echo_warn "Binary distribution ${BIN_DIST} not found." + terminate +fi + +readonly SRC_DIST="dist/firebase_admin-${RELEASE_VERSION}.tar.gz" +if [[ -f "${SRC_DIST}" ]]; then + echo_info "Found source distribution (sdist): ${SRC_DIST}" +else + echo_warn "Source distribution ${SRC_DIST} not found." + terminate +fi + +readonly ARTIFACT_COUNT=`ls dist/ | wc -l` +if [[ $ARTIFACT_COUNT -ne 2 ]]; then + echo_warn "Unexpected artifacts in the distribution directory." + ls -l dist + terminate +fi + + +echo_info "" +echo_info "--------------------------------------------" +echo_info "Checking previous releases" +echo_info "--------------------------------------------" +echo_info "" + +readonly PYPI_URL="https://pypi.org/pypi/firebase-admin/${RELEASE_VERSION}/json" +readonly PYPI_STATUS=`curl -s -o /dev/null -L -w "%{http_code}" ${PYPI_URL}` +if [[ $PYPI_STATUS -eq 404 ]]; then + echo_info "Release version ${RELEASE_VERSION} not found in Pypi." +elif [[ $PYPI_STATUS -eq 200 ]]; then + echo_warn "Release version ${RELEASE_VERSION} already present in Pypi." + terminate +else + echo_warn "Unexpected ${PYPI_STATUS} response from Pypi. Exiting." + terminate +fi + + +echo_info "" +echo_info "--------------------------------------------" +echo_info "Checking release tag" +echo_info "--------------------------------------------" +echo_info "" + +echo_info "---< git fetch --depth=1 origin +refs/tags/*:refs/tags/* >---" +git fetch --depth=1 origin +refs/tags/*:refs/tags/* +echo "" + +readonly EXISTING_TAG=`git rev-parse -q --verify "refs/tags/v${RELEASE_VERSION}"` || true +if [[ -n "${EXISTING_TAG}" ]]; then + echo_warn "Tag v${RELEASE_VERSION} already exists. Exiting." + echo_warn "If the tag was created in a previous unsuccessful attempt, delete it and try again." + echo_warn " $ git tag -d v${RELEASE_VERSION}" + echo_warn " $ git push --delete origin v${RELEASE_VERSION}" + + readonly RELEASE_URL="https://github.com/firebase/firebase-admin-python/releases/tag/v${RELEASE_VERSION}" + echo_warn "Delete any corresponding releases at ${RELEASE_URL}." + terminate +fi + +echo_info "Tag v${RELEASE_VERSION} does not exist." + + +echo_info "" +echo_info "--------------------------------------------" +echo_info "Generating changelog" +echo_info "--------------------------------------------" +echo_info "" + +echo_info "---< git fetch origin main --prune --unshallow >---" +git fetch origin main --prune --unshallow +echo "" + +echo_info "Generating changelog from history..." +readonly CURRENT_DIR=$(dirname "$0") +readonly CHANGELOG=`${CURRENT_DIR}/generate_changelog.sh` +echo "$CHANGELOG" + +# Parse and preformat the text to handle multi-line output. +# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#example-of-a-multiline-string +# and https://github.com/github/docs/issues/21529#issue-1418590935 +FILTERED_CHANGELOG=`echo "$CHANGELOG" | grep -v "\\[INFO\\]"` +FILTERED_CHANGELOG="${FILTERED_CHANGELOG//$'\''/'"'}" +echo "changelog<> $GITHUB_OUTPUT +echo -e "$FILTERED_CHANGELOG" >> $GITHUB_OUTPUT +echo "CHANGELOGEOF" >> $GITHUB_OUTPUT + + +echo "" +echo_info "--------------------------------------------" +echo_info "PREFLIGHT SUCCESSFUL" +echo_info "--------------------------------------------" diff --git a/.github/scripts/run_integration_tests.sh b/.github/scripts/run_integration_tests.sh new file mode 100755 index 000000000..96b0ad75d --- /dev/null +++ b/.github/scripts/run_integration_tests.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +# Copyright 2020 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e +set -u + +gpg --quiet --batch --yes --decrypt --passphrase="${FIREBASE_SERVICE_ACCT_KEY}" \ + --output integ-service-account.json .github/resources/integ-service-account.json.gpg + +echo "${FIREBASE_API_KEY}" > integ-api-key.txt + +pytest integration/ --cert integ-service-account.json --apikey integ-api-key.txt diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 000000000..5bf78a56b --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,65 @@ +name: Continuous Integration + +on: pull_request + +jobs: + build: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python: ['3.9', '3.10', '3.11', '3.12', '3.13', 'pypy3.9'] + + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # 4.3.1 + + - name: Set up Python 3.13 for emulator + uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # 5.6.0 + with: + python-version: '3.13' + - name: Setup functions emulator environment + run: | + python -m venv integration/emulators/functions/venv + source integration/emulators/functions/venv/bin/activate + pip install -r integration/emulators/functions/requirements.txt + deactivate + - name: Set up Python ${{ matrix.python }} + uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # 5.6.0 + with: + python-version: ${{ matrix.python }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + - name: Test with pytest + run: pytest + - name: Set up Node.js 20 + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # 4.4.0 + with: + node-version: 20 + - name: Set up Java 21 + uses: actions/setup-java@f2beeb24e141e01a676f977032f5a29d81c9e27e # 5.1.0 + with: + distribution: 'temurin' + java-version: '21' + check-latest: true + - name: Install firebase-tools + run: npm install -g firebase-tools + - name: Run Database emulator tests + run: firebase emulators:exec --only database --project fake-project-id 'pytest integration/test_db.py' + - name: Run Functions emulator tests + run: firebase emulators:exec --config integration/emulators/firebase.json --only tasks,functions --project fake-project-id 'CLOUD_TASKS_EMULATOR_HOST=localhost:9499 pytest integration/test_functions.py' + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # 4.3.1 + - name: Set up Python 3.9 + uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # 5.6.0 + with: + python-version: 3.9 + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + - name: Lint with pylint + run: ./lint.sh all diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml new file mode 100644 index 000000000..d60b3cd0b --- /dev/null +++ b/.github/workflows/nightly.yml @@ -0,0 +1,99 @@ +# Copyright 2021 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: Nightly Builds + +on: + # Runs every day at 06:20 AM (PT) and 08:20 PM (PT) / 04:20 AM (UTC) and 02:20 PM (UTC) + # or on 'firebase_nightly_build' repository dispatch event. + schedule: + - cron: "20 4,14 * * *" + repository_dispatch: + types: [firebase_nightly_build] + +jobs: + nightly: + + runs-on: ubuntu-latest + + steps: + - name: Checkout source for staging + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # 4.3.1 + with: + ref: ${{ github.event.client_payload.ref || github.ref }} + + - name: Set up Python + uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # 5.6.0 + with: + python-version: 3.9 + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + pip install setuptools wheel + pip install tensorflow + pip install keras + pip install build + + - name: Run unit tests + run: pytest + + - name: Run integration tests + run: ./.github/scripts/run_integration_tests.sh + env: + FIREBASE_SERVICE_ACCT_KEY: ${{ secrets.FIREBASE_SERVICE_ACCT_KEY }} + FIREBASE_API_KEY: ${{ secrets.FIREBASE_API_KEY }} + + # Build the Python Wheel and the source distribution. + - name: Package release artifacts + run: python -m build + + # Attach the packaged artifacts to the workflow output. These can be manually + # downloaded for later inspection if necessary. + - name: Archive artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: dist + path: dist + + - name: Send email on failure + if: failure() + uses: firebase/firebase-admin-node/.github/actions/send-email@2e2b36a84ba28679bcb7aecdacabfec0bded2d48 # Admin Node SDK v13.6.0 + with: + api-key: ${{ secrets.OSS_BOT_MAILGUN_KEY }} + domain: ${{ secrets.OSS_BOT_MAILGUN_DOMAIN }} + from: 'GitHub ' + to: ${{ secrets.FIREBASE_ADMIN_GITHUB_EMAIL }} + subject: 'Nightly build ${{github.run_id}} of ${{github.repository}} failed!' + html: > + Nightly workflow ${{github.run_id}} failed on: ${{github.repository}} +

Navigate to the + failed workflow. + continue-on-error: true + + - name: Send email on cancelled + if: cancelled() + uses: firebase/firebase-admin-node/.github/actions/send-email@2e2b36a84ba28679bcb7aecdacabfec0bded2d48 # Admin Node SDK v13.6.0 + with: + api-key: ${{ secrets.OSS_BOT_MAILGUN_KEY }} + domain: ${{ secrets.OSS_BOT_MAILGUN_DOMAIN }} + from: 'GitHub ' + to: ${{ secrets.FIREBASE_ADMIN_GITHUB_EMAIL }} + subject: 'Nightly build ${{github.run_id}} of ${{github.repository}} cancelled!' + html: > + Nightly workflow ${{github.run_id}} cancelled on: ${{github.repository}} +

Navigate to the + cancelled workflow. + continue-on-error: true diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 000000000..6bbf19aab --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,133 @@ +# Copyright 2020 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: Release Candidate + +on: + # Run the workflow when: + # 1. A PR is created or updated (staging checks). + # 2. A commit is pushed to main (release publication). + # 3. A developer explicitly requests a build via 'firebase_build' event. + pull_request: + types: [opened, synchronize] + + push: + branches: + - main + paths: + - 'firebase_admin/__about__.py' + + repository_dispatch: + types: + - firebase_build + +jobs: + stage_release: + # To publish a release, merge a PR with the title prefix '[chore] Release ' to main + # and ensure the squashed commit message also has the prefix. + # To stage a release without publishing it, send a 'firebase_build' event or apply + # the 'release:stage' label to a PR. + if: github.event.action == 'firebase_build' || + contains(github.event.pull_request.labels.*.name, 'release:stage') || + (github.event_name == 'push' && startsWith(github.event.head_commit.message, '[chore] Release ')) + + runs-on: ubuntu-latest + + steps: + - name: Checkout source for staging + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # 4.3.1 + + - name: Set up Python + uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # 5.6.0 + with: + python-version: 3.9 + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + pip install setuptools wheel + pip install tensorflow + pip install keras + pip install build + + - name: Run unit tests + run: pytest + + - name: Run integration tests + run: ./.github/scripts/run_integration_tests.sh + env: + FIREBASE_SERVICE_ACCT_KEY: ${{ secrets.FIREBASE_SERVICE_ACCT_KEY }} + FIREBASE_API_KEY: ${{ secrets.FIREBASE_API_KEY }} + + # Build the Python Wheel and the source distribution. + - name: Package release artifacts + run: python -m build + + # Attach the packaged artifacts to the workflow output. These can be manually + # downloaded for later inspection if necessary. + - name: Archive artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: dist + path: dist + + publish_release: + needs: stage_release + + # Check whether the release should be published. We publish only when the trigger is + # 1. a push (merge) + # 2. to the main branch + # 3. and the commit message has the title prefix '[chore] Release '. + if: github.event_name == 'push' && + github.ref == 'refs/heads/main' && + startsWith(github.event.head_commit.message, '[chore] Release ') + + runs-on: ubuntu-latest + environment: Release + permissions: + # Used to create a short-lived OIDC token which is given to PyPi to identify this workflow job + # See: https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#adding-permissions-settings + # and https://docs.pypi.org/trusted-publishers/using-a-publisher/ + id-token: write + contents: write + + steps: + - name: Checkout source for publish + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # 4.3.1 + + # Download the artifacts created by the stage_release job. + - name: Download release candidates + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 + with: + name: dist + path: dist + + - name: Publish preflight check + id: preflight + run: ./.github/scripts/publish_preflight_check.sh + + # See: https://cli.github.com/manual/gh_release_create + - name: Create release tag + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + RELEASE_VER: ${{ steps.preflight.outputs.version }} + RELEASE_BODY: ${{ steps.preflight.outputs.changelog }} + run: | + gh release create "$RELEASE_VER" \ + --title "Firebase Admin Python SDK $RELEASE_VER" \ + --notes "$RELEASE_BODY" + + - name: Publish to Pypi + uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0 diff --git a/.gitignore b/.gitignore index a3e914db2..d9d47dc51 100644 --- a/.gitignore +++ b/.gitignore @@ -4,4 +4,13 @@ .cache/ .tox/ *.egg-info/ +build/ +dist/ *~ +cert.json +apikey.txt +htmlcov/ +.pytest_cache/ +.vscode/ +.venv/ +.DS_Store diff --git a/.opensource/project.json b/.opensource/project.json new file mode 100644 index 000000000..162f67ba3 --- /dev/null +++ b/.opensource/project.json @@ -0,0 +1,14 @@ +{ + "name": "Firebase Admin SDK - Python", + "platforms": [ + "Python", + "Admin" + ], + "content": "README.md", + "pages": [], + "related": [ + "firebase/firebase-admin-java", + "firebase/firebase-admin-node", + "firebase/firebase-admin-go" + ] +} diff --git a/.pylintrc b/.pylintrc index 2155853c7..ea54e481c 100644 --- a/.pylintrc +++ b/.pylintrc @@ -1,4 +1,4 @@ -[MASTER] +[MAIN] # Specify a configuration file. #rcfile= @@ -20,7 +20,9 @@ persistent=no # List of plugins (as comma separated values of python modules names) to load, # usually to register additional checkers. -load-plugins=pylint.extensions.docparams,pylint.extensions.docstyle +load-plugins=pylint.extensions.docparams, + pylint.extensions.docstyle, + pylint.extensions.bad_builtin, # Use multiple processes to speed up Pylint. jobs=1 @@ -34,15 +36,6 @@ unsafe-load-any-extension=no # run arbitrary code extension-pkg-whitelist= -# Allow optimization of some AST trees. This will activate a peephole AST -# optimizer, which will apply various small optimizations. For instance, it can -# be used to obtain the result of joining multiple strings with the addition -# operator. Joining a lot of strings can lead to a maximum recursion error in -# Pylint and this flag can prevent that. It has one side effect, the resulting -# AST will be different than the one from reality. This option is deprecated -# and it will be removed in Pylint 2.0. -optimize-ast=no - [MESSAGES CONTROL] @@ -65,21 +58,31 @@ enable=indexing-exception,old-raise-syntax # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use"--disable=all --enable=classes # --disable=W" -disable=design,similarities,no-self-use,attribute-defined-outside-init,locally-disabled,star-args,pointless-except,bad-option-value,global-statement,fixme,suppressed-message,useless-suppression,locally-enabled,file-ignored,missing-type-doc +disable=design, + similarities, + no-self-use, + attribute-defined-outside-init, + locally-disabled, + star-args, + pointless-except, + bad-option-value, + lobal-statement, + fixme, + suppressed-message, + useless-suppression, + locally-enabled, + file-ignored, + missing-type-doc, + c-extension-no-member, [REPORTS] -# Set the output format. Available formats are text, parseable, colorized, msvs -# (visual studio) and html. You can also give a reporter class, eg -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Put messages in a separate file for each module / package specified on the -# command line instead of printing them on stdout. Reports (if any) will be -# written in a file name "pylint_global.[txt|html]". This option is deprecated -# and it will be removed in Pylint 2.0. -files-output=no +# Set the output format. Available formats are: 'text', 'parseable', +# 'colorized', 'json2' (improved json format), 'json' (old json format), msvs +# (visual studio) and 'github' (GitHub actions). You can also give a reporter +# class, e.g. mypackage.mymodule.MyReporterClass. +output-format=colorized # Tells whether to display a full report or only the messages reports=no @@ -176,9 +179,12 @@ logging-modules=logging good-names=main,_ # Bad variable names which should always be refused, separated by a comma -bad-names= - -bad-functions=input,apply,reduce +bad-names=foo, + bar, + baz, + toto, + tutu, + tata # Colon-delimited sets of names that determine each other's naming style when # the name regexes allow several styles. @@ -194,64 +200,33 @@ property-classes=abc.abstractproperty # Regular expression matching correct function names function-rgx=[a-z_][a-z0-9_]*$ -# Naming hint for function names -function-name-hint=[a-z_][a-z0-9_]*$ - # Regular expression matching correct variable names variable-rgx=[a-z_][a-z0-9_]{2,30}$ -# Naming hint for variable names -variable-name-hint=[a-z_][a-z0-9_]{2,30}$ - # Regular expression matching correct constant names const-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$ - -# Naming hint for constant names -const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$ - # Regular expression matching correct attribute names attr-rgx=[a-z_][a-z0-9_]{2,30}$ -# Naming hint for attribute names -attr-name-hint=[a-z_][a-z0-9_]{2,30}$ - # Regular expression matching correct argument names argument-rgx=[a-z_][a-z0-9_]{2,30}$ -# Naming hint for argument names -argument-name-hint=[a-z_][a-z0-9_]{2,30}$ - # Regular expression matching correct class attribute names class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ -# Naming hint for class attribute names -class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ - # Regular expression matching correct inline iteration names inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ -# Naming hint for inline iteration names -inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$ - # Regular expression matching correct class names class-rgx=[A-Z_][a-zA-Z0-9]+$ -# Naming hint for class names -class-name-hint=[A-Z_][a-zA-Z0-9]+$ - # Regular expression matching correct module names module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ -# Naming hint for module names -module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ - # Regular expression matching correct method names method-rgx=[a-z_][a-z0-9_]*$ -# Naming hint for method names -method-name-hint=[a-z_][a-z0-9_]*$ - # Regular expression which should only match function or class names that do # not require a docstring. no-docstring-rgx=(__.*__|main) @@ -294,12 +269,6 @@ ignore-long-lines=^\s*(# )??$ # else. single-line-if-stmt=no -# List of optional constructs for which whitespace checking is disabled. `dict- -# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. -# `trailing-comma` allows a space between comma and closing bracket: (a, ). -# `empty-line` allows space-only lines. -no-space-check=trailing-comma,dict-separator - # Maximum number of lines in a module max-module-lines=1000 @@ -405,6 +374,12 @@ exclude-protected=_asdict,_fields,_replace,_source,_make [EXCEPTIONS] -# Exceptions that will emit a warning when being caught. Defaults to -# "Exception" -overgeneral-exceptions=Exception +# Exceptions that will emit a warning when caught. +overgeneral-exceptions=builtins.BaseException,builtins.Exception + +[DEPRECATED_BUILTINS] + +# List of builtins function names that should not be used, separated by a comma +bad-functions=input, + apply, + reduce diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index ac3341a22..000000000 --- a/.travis.yml +++ /dev/null @@ -1,9 +0,0 @@ -language: python -python: - - "2.7" - - "3.3" - - "3.5" -# command to install dependencies -install: "pip install -r requirements.txt" -# command to run tests -script: pytest diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 000000000..28bba4b55 --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,170 @@ +# Firebase Admin Python SDK - Agent Guide + +This document provides AI agents with a comprehensive guide to the conventions, design patterns, and architectural nuances of the Firebase Admin Python SDK. Adhering to this guide ensures that all contributions are idiomatic and align with the existing codebase. + +## 1. High-Level Overview + +The Firebase Admin Python SDK provides a Pythonic interface to Firebase services. Its design emphasizes thread-safety, a consistent and predictable API, and seamless integration with Google Cloud Platform services. + +## 2. Directory Structure + +- `firebase_admin/`: The main package directory. + - `__init__.py`: The primary entry point. It exposes the `initialize_app()` function and manages the lifecycle of `App` instances. + - `exceptions.py`: Defines the custom exception hierarchy for the SDK. + - `_http_client.py`: Contains the centralized `JsonHttpClient` and `HttpxAsyncClient` for all outgoing HTTP requests. + - Service modules (e.g., `auth.py`, `db.py`, `messaging.py`): Each module contains the logic for a specific Firebase service. +- `tests/`: Contains all unit tests. + - `tests/resources/`: Contains mock data, keys, and other test assets. +- `integration/`: Contains all integration tests.* + - These integration tests require a real Firebase project to run against. + - `integration/conftest.py`: Contains provides configurations for these integration tests including how credentials are provided through pytest. +- `snippets/`: Contains code snippets used in documentation. +- `setup.py`: Package definition, including the required environment dependencies. +- `requirements.txt`: A list of all development dependencies. +- `.pylintrc`: Configuration file for the `pylint` linter. +- `CONTRIBUTING.md`: General guidelines for human contributors. Your instructions here supersede this file. + +## 3. Core Design Patterns + +### Initialization + +The SDK is initialized by calling the `initialize_app(credential, options)` function. This creates a default `App` instance that SDK modules use implicitly. For multi-project use cases, named apps can be created by providing a `name` argument: `initialize_app(credential, options, name='my_app')`. + +### Service Clients + +Service clients are accessed via module-level factory functions. These functions automatically use the default app unless a specific `App` object is provided via the `app` parameter. The clients are created lazily and cached for the lifetime of the application. + +- **Direct Action Modules (auth, db)**: Some modules provide functions that perform actions directly. +- **Client Factory Modules (firestore, storage)**: Other modules have a function (e.g., client() or bucket()) that returns a client object, which you then use for operations. + + +### Error Handling + +- All SDK-specific exceptions inherit from `firebase_admin.exceptions.FirebaseError`. +- Specific error conditions are represented by subclasses, such as `firebase_admin.exceptions.InvalidArgumentError` and `firebase_admin.exceptions.UnauthenticatedError`. +- Each service may additionaly define exceptions under these subclasses and apply them by passing a handle function to `_utils.handle_platform_error_from_requests()` or `_utils.handle_platform_error_from_httpx()`. Each services error handling patterns should be considered before making changes. + +### HTTP Communication + +- All synchronous HTTP requests are made through the `JsonHttpClient` class in `firebase_admin._http_client`. +- All asynchronous HTTP requests are made through the `HttpxAsyncClient` class in `firebase_admin._http_client`. +- These clients handle authentication and retries for all API calls. + +### Asynchronous Operations + +Asynchronous operations are supported using Python's `asyncio` library. Asynchronous methods are typically named with an `_async` suffix (e.g., `messaging.send_each_async()`). + +## 4. Coding Style and Naming Conventions + +- **Formatting:** This project uses **pylint** to enforce code style and detect potential errors. Before submitting code, you **must** run the linter and ensure your changes do not introduce any new errors. Run the linter from the repository's root directory with the following command: + ```bash + ./lint.sh all # Lint all source files + ``` + or + ```bash + ./lint.sh # Lint locally modified source files + ``` +- **Naming:** + - Classes: `PascalCase` (e.g., `FirebaseError`). + - Methods and Functions: `snake_case` (e.g., `initialize_app`). + - Private Members: An underscore prefix (e.g., `_http_client`). + - Constants: `UPPER_SNAKE_CASE` (e.g., `INVALID_ARGUMENT`). + +## 5. Testing Philosophy + +- **Unit Tests:** + - Located in the `tests/` directory. + - Test files follow the `test_*.py` naming convention. + - Unit tests can be run using the following command: + ```bash + pytest + ``` +- **Integration Tests:** + - Located in the `integration/` directory. + - These tests make real API calls to Firebase services and require a configured project. Running these tests be should be ignored without a project and instead rely on the repository's GitHub Actions. + +## 6. Dependency Management + +- **Manager:** `pip` +- **Manifest:** `requirements.txt` +- **Command:** `pip install -r requirements.txt` + +## 7. Critical Developer Journeys + +### Journey 1: How to Add a New API Method + +1. **Define Public Method:** Add the new method or change to the appropriate service client files (e.g., `firebase_admin/auth.py`). +2. **Expose the public API method** by updating the `__all__` constant with the name of the new method. +3. **Internal Logic:** Implement the core logic within the service package. +4. **HTTP Client:** Use the HTTP client (`JsonHttpClient` or `HttpxAsyncClient`) to make the API call. +5. **Error Handling:** Catching exceptions from the HTTP client and raise the appropriate `FirebaseError` subclass using the services error handling logic +6. **Testing:** + - Add unit tests in the corresponding `test_*.py` file (e.g., `tests/test_user_mgt.py`). + - Add integration tests in the `integration/` directory if applicable. +7. **Snippets:** (Optional) Add or update code snippets in the `snippets/` directory. + +### Journey 2: How to Deprecate a Field/Method in an Existing API + +1. **Add Deprecation Note:** Locate where the deprecated object is defined and add a deprecation note to its docstring (e.g. `X is deprecated. Use Y instead.`). +2. **Add Deprecation Warning:** In the same location where the deprecated object is defined, add a deprecation warning to the code. (e.g. `warnings.warn('X is deprecated. Use Y instead.', DeprecationWarning)`) + +## 8. Critical Do's and Don'ts + +- **DO:** Use the centralized `JsonHttpClient` or `HttpxAsyncClient` for all HTTP requests. +- **DO:** Follow the established error handling patterns by using `FirebaseError` and its subclasses. +- **DON'T:** Expose implementation details from private (underscored) modules or functions in the public API. +- **DON'T:** Introduce new third-party dependencies without updating `requirements.txt` and `setup.py`. + +## 9. Branch Creation +- When creating a new barnch use the format `agentName-short-description`. + * Example: `jules-auth-token-parsing` + * Example: `gemini-add-storage-file-signer` + +## 10. Commit and Pull Request Generation + +After implementing and testing a change, you may create a commit and pull request which must follow the following these rules: + +### Commit and Pull Request Title Format: +Use the [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/) specification: `type(scope): subject` +- `type` should be one of `feat`, `fix` or `chore`. +- `scope` should be the service package changed (e.g., `auth`, `rtdb`, `deps`). + - **Note**: Some services use specific abbreviations. Use the abbreviation if one exists. Common abbreviations include: + - `messaging` -> `fcm` + - `dataconnect` -> `fdc` + - `database` -> `rtdb` + - `appcheck` -> `fac` +- `subject` should be a brief summary of the change depending on the action: + - For pull requests this should focus on the larger goal the included commits achieve. + - Example: `fix(auth): Resolved issue with custom token verification` + - For commits this should focus on the specific changes made in that commit. + - Example: `fix(auth): Added a new token verification check` + +### Commit Body: +This should be a brief explanation of code changes. + +Example: +``` +feat(fcm): Added `send_each_for_multicast` support for multicast messages + +Added a new `send_each_for_multicast` method to the messaging client. This method wraps the `send_each` method and sends the same message to each token. +``` + +### Pull Request Body: +- A brief explanation of the problem and the solution. +- A summary of the testing strategy (e.g., "Added a new unit test to verify the fix."). +- A **Context Sources** section that lists the `id` and repository path of every `AGENTS.md` file you used. + +Example: +``` +feat(fcm): Added support for multicast messages + +This change introduces a new `send_each_for_multicast` method to the messaging client, allowing developers to send a single message to multiple tokens efficiently. + +Testing: Added unit tests in `tests/test_messaging.py` with mock requests and an integration test in `integration/test_messaging.py`. + +Context Sources Used: +- id: firebase-admin-python +``` + +## 11. Metadata +- id: firebase-admin-python \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 14b422143..139e7f96c 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -47,7 +47,7 @@ Great, we love hearing how we can improve our products! Share you idea through o ## Want to submit a pull request? Sweet, we'd love to accept your contribution! -[Open a new pull request](https://github.com/firebase/firebase-admin-python/pull/new/master) and fill +[Open a new pull request](https://github.com/firebase/firebase-admin-python/pull/new) and fill out the provided template. **If you want to implement a new feature, please open an issue with a proposal first so that we can @@ -85,6 +85,8 @@ information on using pull requests. ### Initial Setup +You need Python 3.9+ to build and test the code in this repo. + We recommend using [pip](https://pypi.python.org/pypi/pip) for installing the necessary tools and project dependencies. Most recent versions of Python ship with pip. If your development environment does not already have pip, use the software package manager of your platform (e.g. apt-get, brew) @@ -104,23 +106,23 @@ $ pip install -r requirements.txt # Install additional tools and dependencies We use [pylint](https://pylint.org/) for verifying source code format, and enforcing other Python programming best practices. -There is a pylint configuration file ([`.pylintrc`](../.pylintrc)) at the root of this Git +There is a pylint configuration file ([`.pylintrc`](.pylintrc)) at the root of this Git repository. This enables you to invoke pylint directly from the command line: ``` pylint firebase_admin ``` -However, it is recommended that you use the [`lint.sh`](../lint.sh) bash script to invoke +However, it is recommended that you use the [`lint.sh`](lint.sh) bash script to invoke pylint. This script will run the linter on both `firebase_admin` and the corresponding -`tests` module. It suprresses some of the noisy warnings that get generated +`tests` module. It suppresses some of the noisy warnings that get generated when running pylint on test code. Note that by default `lint.sh` will only validate the locally modified source files. To validate all source files, pass `all` as an argument. ``` ./lint.sh # Lint locally modified source files -./lint.sh all # Lint all source files +./lint.sh all # Lint all source files ``` Ideally you should not see any pylint errors or warnings when you run the @@ -158,63 +160,147 @@ You can also get a code coverage report by launching pytest as follows: pytest --cov=firebase_admin --cov=tests ``` -### Testing in Different Environments +### Integration Testing -Sometimes we want to run unit tests in multiple environments (e.g. different Python versions), and -ensure that the SDK works as expected in each of them. We use -[tox](https://tox.readthedocs.io/en/latest/) for this purpose. -But before you can invoke tox, you must set up all the necessary target environments on your -workstation. The easiest and cleanest way to achieve this is by using a tool like -[pyenv](https://github.com/pyenv/pyenv). Refer to the -[pyenv documentation](https://github.com/pyenv/pyenv#installation) for instructions on how to -install it. This generally involves installing some binaries as well as modifying a system level -configuration file such as `.bash_profile`. Once pyenv is installed, you can install multiple -versions of Python as follows: +Integration tests are executed against a real life Firebase project. If you do not already +have one suitable for running the tests against, you can create a new project in the +[Firebase Console](https://console.firebase.google.com) following the setup guide below. +If you already have a Firebase project, you'll need to obtain credentials to communicate and +authorize access to your Firebase project: -``` -pyenv install 2.7.6 # install Python 2.7.6 -pyenv install 3.3.0 # install Python 3.3.0 -pyenv install pypy2-5.6.0 # install pypy2 -``` -Refer to the [`tox.ini`](../tox.ini) file for a list of target environments that we usually test. -Use pyenv to install all the required Python versions on your workstation. Verify that they are -installed by running the following command: +1. Service account certificate: This allows access to your Firebase project through a service account +which is required for all integration tests. This can be downloaded as a JSON file from the +**Settings > Service Accounts** tab of the Firebase console when you click the +**Generate new private key** button. Copy the file into the repo so it's available at `cert.json`. + > **Note:** Service accounts should be carefully managed and their keys should never be stored in publicly accessible source code or repositories. -``` -pyenv versions -``` -To make all the required Python versions available to tox for testing, run the `pyenv local` command -with all the Python versions as arguments. The following example shows how to make Python versions -2.7.6, 3.3.0 and pypy2 available to tox. +2. Web API key: This allows for Auth sign-in needed for some Authentication and Tenant Management +integration tests. This is displayed in the **Settings > General** tab of the Firebase console +after enabling Authentication as described in the steps below. Copy it and save to a new text +file at `apikey.txt`. + + +Set up your Firebase project as follows: + + +1. Enable Authentication: + 1. Go to the Firebase Console, and select **Authentication** from the **Build** menu. + 2. Click on **Get Started**. + 3. Select **Sign-in method > Add new provider > Email/Password** then enable both the + **Email/Password** and **Email link (passwordless sign-in)** options. + + +2. Enable Firestore: + 1. Go to the Firebase Console, and select **Firestore Database** from the **Build** menu. + 2. Click on the **Create database** button. You can choose to set up Firestore either in + the production mode or in the test mode. + + +3. Enable Realtime Database: + 1. Go to the Firebase Console, and select **Realtime Database** from the **Build** menu. + 2. Click on the **Create Database** button. You can choose to set up the Realtime Database + either in the locked mode or in the test mode. + + > **Note:** Integration tests are not run against the default Realtime Database reference and are + instead run against a database created at `https://{PROJECT_ID}.firebaseio.com`. + This second Realtime Database reference is created in the following steps. + + 3. In the **Data** tab click on the kebab menu (3 dots) and select **Create Database**. + 4. Enter your Project ID (Found in the **General** tab in **Account Settings**) as the + **Realtime Database reference**. Again, you can choose to set up the Realtime Database + either in the locked mode or in the test mode. + + +4. Enable Storage: + 1. Go to the Firebase Console, and select **Storage** from the **Build** menu. + 2. Click on the **Get started** button. You can choose to set up Cloud Storage + either in the production mode or in the test mode. + + +5. Enable the Firebase ML API: + 1. Go to the + [Google Cloud console | Firebase ML API](https://console.cloud.google.com/apis/api/firebaseml.googleapis.com/overview) + and make sure your project is selected. + 2. If the API is not already enabled, click **Enable**. + + +6. Enable the IAM API: + 1. Go to the [Google Cloud console](https://console.cloud.google.com) + and make sure your Firebase project is selected. + 2. Select **APIs & Services** from the main menu, and click the + **ENABLE APIS AND SERVICES** button. + 3. Search for and enable **Identity and Access Management (IAM) API** by Google Enterprise API. + + +7. Enable Tenant Management: + 1. Go to + [Google Cloud console | Identity Platform](https://console.cloud.google.com/customer-identity/) + and if it is not already enabled, click **Enable**. + 2. Then + [enable multi-tenancy](https://cloud.google.com/identity-platform/docs/multi-tenancy-quickstart#enabling_multi-tenancy) + for your project. + + +8. Ensure your service account has the **Firebase Authentication Admin** role. This is required +to ensure that exported user records contain the password hashes of the user accounts: + 1. Go to [Google Cloud console | IAM & admin](https://console.cloud.google.com/iam-admin). + 2. Find your service account in the list. If not added click the pencil icon to edit its + permissions. + 3. Click **ADD ANOTHER ROLE** and choose **Firebase Authentication Admin**. + 4. Click **SAVE**. + +9. Enable Cloud Tasks: + 1. Search for and enable **Cloud Run**. + 2. Search for and enable **Cloud Tasks**. + 3. Go to [Google Cloud console | IAM & admin](https://console.cloud.google.com/iam-admin) + and make sure your Firebase project is selected. + 4. Ensure your service account has the following required roles: + * **Cloud Tasks Enqueuer** - `cloudtasks.taskEnqueuer` + * **Cloud Tasks Task Deleter** - `cloudtasks.taskDeleter` + * **Cloud Run Invoker** - `run.invoker` + * **Service Account User** - `iam.serviceAccountUser` + + +Now you can invoke the integration test suite as follows: ``` -pyenv local 2.7.6 3.3.0 pypy2-5.6.0 +pytest integration/ --cert cert.json --apikey apikey.txt ``` -Once your system is fully set up, you can execute the following command from the root of the -repository to launch tox: +### Emulator-based Integration Testing + +Some integration tests can run against emulators. This allows local testing +without using real projects or credentials. For now, only the RTDB Emulator +is supported. + +First, install the Firebase CLI, then run: ``` -tox +firebase emulators:exec --only database --project fake-project-id 'pytest integration/test_db.py' ``` -This command will read the list of target environments from `tox.ini`, and execute tests in each of -those environments. It will also generate a code coverage report at the end of the execution. +### Test Coverage +To review the test coverage, run `pytest` with the `--cov` flag. To view a detailed line by line +coverage, use +```bash +pytest --cov --cov-report html ``` -pyenv local 2.7.6 3.3.0 pypy2-5.6.0 -``` +and point your browser to +`file:////htmlcov/index.html` (where `dir` is the location from which the report was created). ### Repo Organization Here are some highlights of the directory structure and notable source files * `firebase_admin/` - Source directory for the `firebase_admin` module. +* `integration/` - Integration tests. * `tests/` - Unit tests. * `data/` - Provides mocks for several variables as well as mock service account keys. +* `scripts/` - A collection of shell scripts used to create and verify releases. * `.github/` - Contribution instructions as well as issue and pull request templates. * `lint.sh` - Runs pylint to check for code quality. * `.pylintrc` - Default configuration for pylint. diff --git a/README.md b/README.md index 80adc0583..29303fd4f 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,6 @@ -[![Build Status](https://travis-ci.org/firebase/firebase-admin-python.svg?branch=master)](https://travis-ci.org/firebase/firebase-admin-python) +[![Nightly Builds](https://github.com/firebase/firebase-admin-python/actions/workflows/nightly.yml/badge.svg)](https://github.com/firebase/firebase-admin-python/actions/workflows/nightly.yml) +[![Python](https://img.shields.io/pypi/pyversions/firebase-admin.svg)](https://pypi.org/project/firebase-admin/) +[![Version](https://img.shields.io/pypi/v/firebase-admin.svg)](https://pypi.org/project/firebase-admin/) # Firebase Admin Python SDK @@ -36,13 +38,15 @@ pip install firebase-admin Please refer to the [CONTRIBUTING page](./CONTRIBUTING.md) for more information about how you can contribute to this project. We welcome bug reports, feature -requests, code review feedback, and also pull requests. +requests, code review feedback, and also pull requests. ## Supported Python Versions -We support Python 2.7 and Python 3.3+. Firebase Admin Python SDK is also tested -on PyPy and [Google App Engine](https://cloud.google.com/appengine/) environments. +We currently support Python 3.9+. However, Python 3.9 support is deprecated, +and developers are strongly advised to use Python 3.10 or higher. Firebase +Admin Python SDK is also tested on PyPy and +[Google App Engine](https://cloud.google.com/appengine/) environments. ## Documentation diff --git a/scripts/bash_utils.sh b/firebase_admin/__about__.py similarity index 59% rename from scripts/bash_utils.sh rename to firebase_admin/__about__.py index 628068fb7..d219f5ed7 100644 --- a/scripts/bash_utils.sh +++ b/firebase_admin/__about__.py @@ -12,14 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -#!/bin/bash +"""About information (version, etc) for Firebase Admin SDK.""" -function parseVersion { - if [[ ! "$1" =~ ^([0-9]*)\.([0-9]*)\.([0-9]*)$ ]]; then - return 1 - fi - MAJOR_VERSION=$(echo "$1" | sed -e 's/^\([0-9]*\)\.\([0-9]*\)\.\([0-9]*\)$/\1/') - MINOR_VERSION=$(echo "$1" | sed -e 's/^\([0-9]*\)\.\([0-9]*\)\.\([0-9]*\)$/\2/') - PATCH_VERSION=$(echo "$1" | sed -e 's/^\([0-9]*\)\.\([0-9]*\)\.\([0-9]*\)$/\3/') - return 0 -} +__version__ = '7.2.0' +__title__ = 'firebase_admin' +__author__ = 'Firebase' +__license__ = 'Apache License 2.0' +__url__ = 'https://firebase.google.com/docs/admin/setup/' diff --git a/firebase_admin/__init__.py b/firebase_admin/__init__.py index 3f340a956..8c9f628e5 100644 --- a/firebase_admin/__init__.py +++ b/firebase_admin/__init__.py @@ -13,38 +13,48 @@ # limitations under the License. """Firebase Admin SDK for Python.""" +import datetime +import json +import os import threading -import six - +from google.auth.credentials import Credentials as GoogleAuthCredentials +from google.auth.exceptions import DefaultCredentialsError from firebase_admin import credentials +from firebase_admin.__about__ import __version__ -# Declaring module version as per https://www.python.org/dev/peps/pep-0396/#specification -# Update this accordingly for each release. -__version__ = '2.0.0' - _apps = {} _apps_lock = threading.RLock() +_clock = datetime.datetime.utcnow _DEFAULT_APP_NAME = '[DEFAULT]' - +_FIREBASE_CONFIG_ENV_VAR = 'FIREBASE_CONFIG' +_CONFIG_VALID_KEYS = ['databaseAuthVariableOverride', 'databaseURL', 'httpTimeout', 'projectId', + 'storageBucket'] def initialize_app(credential=None, options=None, name=_DEFAULT_APP_NAME): """Initializes and returns a new App instance. Creates a new App instance using the specified options and the app name. If an instance already exists by the same - app name a ValueError is raised. Use this function whenever - a new App instance is required. Do not directly invoke the + app name a ValueError is raised. + If options are not provided an attempt is made to load the options from the environment. + This is done by looking up the ``FIREBASE_CONFIG`` environment variable. If the value of + the variable starts with ``"{"``, it is parsed as a JSON object. Otherwise it is treated + as a file name and the JSON content is read from the corresponding file. + Use this function whenever a new App instance is required. Do not directly invoke the App constructor. Args: credential: A credential object used to initialize the SDK (optional). If none is provided, Google Application Default Credentials are used. - options: A dictionary of configuration options (optional). - name: Name of the app (optional). + options: A dictionary of configuration options (optional). Supported options include + ``databaseURL``, ``storageBucket``, ``projectId``, ``databaseAuthVariableOverride``, + ``serviceAccountId`` and ``httpTimeout``. If ``httpTimeout`` is not set, the SDK uses + a default timeout of 120 seconds. + name: Name of the app (optional). Returns: App: A newly initialized instance of App. @@ -68,12 +78,12 @@ def initialize_app(credential=None, options=None, name=_DEFAULT_APP_NAME): 'initialize_app() once. But if you do want to initialize multiple ' 'apps, pass a second argument to initialize_app() to give each app ' 'a unique name.')) - else: - raise ValueError(( - 'Firebase app named "{0}" already exists. This means you called ' - 'initialize_app() more than once with the same app name as the ' - 'second argument. Make sure you provide a unique name every time ' - 'you call initialize_app().').format(name)) + + raise ValueError( + f'Firebase app named "{name}" already exists. This means you called ' + 'initialize_app() more than once with the same app name as the ' + 'second argument. Make sure you provide a unique name every time ' + 'you call initialize_app().') def delete_app(app): @@ -86,21 +96,21 @@ def delete_app(app): ValueError: If the app is not initialized. """ if not isinstance(app, App): - raise ValueError('Illegal app argument type: "{}". Argument must be of ' - 'type App.'.format(type(app))) + raise ValueError(f'Illegal app argument type: "{type(app)}". Argument must be of type App.') with _apps_lock: if _apps.get(app.name) is app: del _apps[app.name] + app._cleanup() # pylint: disable=protected-access return if app.name == _DEFAULT_APP_NAME: raise ValueError( 'The default Firebase app is not initialized. Make sure to initialize ' 'the default app by calling initialize_app().') - else: - raise ValueError( - ('Firebase app named "{0}" is not initialized. Make sure to initialize ' - 'the app by calling initialize_app() with your app name as the ' - 'second argument.').format(app.name)) + + raise ValueError( + f'Firebase app named "{app.name}" is not initialized. Make sure to initialize ' + 'the app by calling initialize_app() with your app name as the ' + 'second argument.') def get_app(name=_DEFAULT_APP_NAME): @@ -116,9 +126,9 @@ def get_app(name=_DEFAULT_APP_NAME): ValueError: If the specified name is not a string, or if the specified app does not exist. """ - if not isinstance(name, six.string_types): - raise ValueError('Illegal app name argument type: "{}". App name ' - 'must be a string.'.format(type(name))) + if not isinstance(name, str): + raise ValueError( + f'Illegal app name argument type: "{type(name)}". App name must be a string.') with _apps_lock: if name in _apps: return _apps[name] @@ -127,30 +137,61 @@ def get_app(name=_DEFAULT_APP_NAME): raise ValueError( 'The default Firebase app does not exist. Make sure to initialize ' 'the SDK by calling initialize_app().') - else: - raise ValueError( - ('Firebase app named "{0}" does not exist. Make sure to initialize ' - 'the SDK by calling initialize_app() with your app name as the ' - 'second argument.').format(name)) + raise ValueError( + f'Firebase app named "{name}" does not exist. Make sure to initialize ' + 'the SDK by calling initialize_app() with your app name as the ' + 'second argument.') -class _AppOptions(object): + +class _AppOptions: """A collection of configuration options for an App.""" def __init__(self, options): if options is None: - options = {} + options = self._load_from_environment() + if not isinstance(options, dict): - raise ValueError('Illegal Firebase app options type: {0}. Options ' - 'must be a dictionary.'.format(type(options))) + raise ValueError( + f'Illegal Firebase app options type: {type(options)}. ' + 'Options must be a dictionary.') self._options = options + def get(self, key, default=None): + """Returns the option identified by the provided key.""" + return self._options.get(key, default) + + def _load_from_environment(self): + """Invoked when no options are passed to __init__, loads options from FIREBASE_CONFIG. -class App(object): + If the value of the FIREBASE_CONFIG environment variable starts with "{" an attempt is made + to parse it as a JSON object, otherwise it is assumed to be pointing to a JSON file. + """ + + config_file = os.getenv(_FIREBASE_CONFIG_ENV_VAR) + if not config_file: + return {} + if config_file.startswith('{'): + json_str = config_file + else: + try: + with open(config_file, 'r', encoding='utf-8') as json_file: + json_str = json_file.read() + except Exception as err: + raise ValueError(f'Unable to read file {config_file}. {err}') from err + try: + json_data = json.loads(json_str) + except Exception as err: + raise ValueError( + f'JSON string "{json_str}" is not valid json. {err}') from err + return {k: v for k, v in json_data.items() if k in _CONFIG_VALID_KEYS} + + +class App: """The entry point for Firebase Python SDK. - Represents a Firebase app, while holding the configuration and state - common to all Firebase APIs. + Represents a Firebase app, while holding the configuration and state + common to all Firebase APIs. """ def __init__(self, name, credential, options): @@ -164,16 +205,31 @@ def __init__(self, name, credential, options): Raises: ValueError: If an argument is None or invalid. """ - if not name or not isinstance(name, six.string_types): - raise ValueError('Illegal Firebase app name "{0}" provided. App name must be a ' - 'non-empty string.'.format(name)) + if not name or not isinstance(name, str): + raise ValueError( + f'Illegal Firebase app name "{name}" provided. App name must be a ' + 'non-empty string.') self._name = name - if not isinstance(credential, credentials.Base): + if isinstance(credential, GoogleAuthCredentials): + self._credential = credentials._ExternalCredentials(credential) # pylint: disable=protected-access + elif isinstance(credential, credentials.Base): + self._credential = credential + else: raise ValueError('Illegal Firebase credential provided. App must be initialized ' 'with a valid credential instance.') - self._credential = credential self._options = _AppOptions(options) + self._lock = threading.RLock() + self._services = {} + + App._validate_project_id(self._options.get('projectId')) + self._project_id_initialized = False + + @classmethod + def _validate_project_id(cls, project_id): + if project_id is not None and not isinstance(project_id, str): + raise ValueError( + f'Invalid project ID: "{project_id}". project ID must be a string.') @property def name(self): @@ -186,3 +242,76 @@ def credential(self): @property def options(self): return self._options + + @property + def project_id(self): + if not self._project_id_initialized: + self._project_id = self._lookup_project_id() + self._project_id_initialized = True + return self._project_id + + def _lookup_project_id(self): + """Looks up the Firebase project ID associated with an App. + + If a ``projectId`` is specified in app options, it is returned. Then tries to + get the project ID from the credential used to initialize the app. If that also fails, + attempts to look up the ``GOOGLE_CLOUD_PROJECT`` and ``GCLOUD_PROJECT`` environment + variables. + + Returns: + str: A project ID string or None. + """ + project_id = self._options.get('projectId') + if not project_id: + try: + project_id = self._credential.project_id + except (AttributeError, DefaultCredentialsError): + pass + if not project_id: + project_id = os.environ.get('GOOGLE_CLOUD_PROJECT', + os.environ.get('GCLOUD_PROJECT')) + App._validate_project_id(self._options.get('projectId')) + return project_id + + def _get_service(self, name, initializer): + """Returns the service instance identified by the given name. + + Services are functional entities exposed by the Admin SDK (e.g. auth, database). Each + service instance is associated with exactly one App. If the named service + instance does not exist yet, _get_service() calls the provided initializer function to + create the service instance. The created instance will be cached, so that subsequent + calls would always fetch it from the cache. + + Args: + name: Name of the service to retrieve. + initializer: A function that can be used to initialize a service for the first time. + + Returns: + object: The specified service instance. + + Raises: + ValueError: If the provided name is invalid, or if the App is already deleted. + """ + if not name or not isinstance(name, str): + raise ValueError( + f'Illegal name argument: "{name}". Name must be a non-empty string.') + with self._lock: + if self._services is None: + raise ValueError( + f'Service requested from deleted Firebase App: "{self._name}".') + if name not in self._services: + self._services[name] = initializer(self) + return self._services[name] + + def _cleanup(self): + """Cleans up any services associated with this App. + + Checks whether each service contains a close() method, and calls it if available. + This is to be called when an App is being deleted, thus ensuring graceful termination of + any services started by the App. + """ + with self._lock: + for service in self._services.values(): + if hasattr(service, 'close') and hasattr(service.close, '__call__'): + service.close() + self._services = None diff --git a/firebase_admin/_auth_client.py b/firebase_admin/_auth_client.py new file mode 100644 index 000000000..74261fa37 --- /dev/null +++ b/firebase_admin/_auth_client.py @@ -0,0 +1,761 @@ +# Copyright 2020 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Firebase auth client sub module.""" + +import time + +import firebase_admin +from firebase_admin import _auth_providers +from firebase_admin import _auth_utils +from firebase_admin import _http_client +from firebase_admin import _token_gen +from firebase_admin import _user_identifier +from firebase_admin import _user_import +from firebase_admin import _user_mgt +from firebase_admin import _utils + + +class Client: + """Firebase Authentication client scoped to a specific tenant.""" + + def __init__(self, app, tenant_id=None): + if not app.project_id: + raise ValueError("""A project ID is required to access the auth service. + 1. Use a service account credential, or + 2. set the project ID explicitly via Firebase App options, or + 3. set the project ID via the GOOGLE_CLOUD_PROJECT environment variable.""") + + credential = None + version_header = f'Python/Admin/{firebase_admin.__version__}' + timeout = app.options.get('httpTimeout', _http_client.DEFAULT_TIMEOUT_SECONDS) + # Non-default endpoint URLs for emulator support are set in this dict later. + endpoint_urls = {} + self.emulated = False + + # If an emulator is present, check that the given value matches the expected format and set + # endpoint URLs to use the emulator. Additionally, use a fake credential. + emulator_host = _auth_utils.get_emulator_host() + if emulator_host: + base_url = f'http://{emulator_host}/identitytoolkit.googleapis.com' + endpoint_urls['v1'] = base_url + '/v1' + endpoint_urls['v2'] = base_url + '/v2' + credential = _utils.EmulatorAdminCredentials() + self.emulated = True + else: + # Use credentials if provided + credential = app.credential.get_credential() + + http_client = _http_client.JsonHttpClient( + credential=credential, headers={'X-Client-Version': version_header}, timeout=timeout) + + self._tenant_id = tenant_id + self._token_generator = _token_gen.TokenGenerator( + app, http_client, url_override=endpoint_urls.get('v1')) + self._token_verifier = _token_gen.TokenVerifier(app) + self._user_manager = _user_mgt.UserManager( + http_client, app.project_id, tenant_id, url_override=endpoint_urls.get('v1')) + self._provider_manager = _auth_providers.ProviderConfigClient( + http_client, app.project_id, tenant_id, url_override=endpoint_urls.get('v2')) + + @property + def tenant_id(self): + """Tenant ID associated with this client.""" + return self._tenant_id + + def create_custom_token(self, uid, developer_claims=None): + """Builds and signs a Firebase custom auth token. + + Args: + uid: ID of the user for whom the token is created. + developer_claims: A dictionary of claims to be included in the token + (optional). + + Returns: + bytes: A token minted from the input parameters. + + Raises: + ValueError: If input parameters are invalid. + TokenSignError: If an error occurs while signing the token using the remote IAM service. + """ + return self._token_generator.create_custom_token( + uid, developer_claims, tenant_id=self.tenant_id) + + def verify_id_token(self, id_token, check_revoked=False, clock_skew_seconds=0): + """Verifies the signature and data for the provided JWT. + + Accepts a signed token string, verifies that it is current, was issued + to this project, and that it was correctly signed by Google. + + Args: + id_token: A string of the encoded JWT. + check_revoked: Boolean, If true, checks whether the token has been revoked or + the user disabled (optional). + clock_skew_seconds: The number of seconds to tolerate when checking the token. + Must be between 0-60. Defaults to 0. + + Returns: + dict: A dictionary of key-value pairs parsed from the decoded JWT. + + Raises: + ValueError: If ``id_token`` is a not a string or is empty. + InvalidIdTokenError: If ``id_token`` is not a valid Firebase ID token. + ExpiredIdTokenError: If the specified ID token has expired. + RevokedIdTokenError: If ``check_revoked`` is ``True`` and the ID token has been + revoked. + TenantIdMismatchError: If ``id_token`` belongs to a tenant that is different than + this ``Client`` instance. + CertificateFetchError: If an error occurs while fetching the public key certificates + required to verify the ID token. + UserDisabledError: If ``check_revoked`` is ``True`` and the corresponding user + record is disabled. + """ + if not isinstance(check_revoked, bool): + # guard against accidental wrong assignment. + raise ValueError( + 'Illegal check_revoked argument. Argument must be of type bool, but given ' + f'"{type(check_revoked)}".') + + verified_claims = self._token_verifier.verify_id_token(id_token, clock_skew_seconds) + if self.tenant_id: + token_tenant_id = verified_claims.get('firebase', {}).get('tenant') + if self.tenant_id != token_tenant_id: + raise _auth_utils.TenantIdMismatchError( + f'Invalid tenant ID: {token_tenant_id}') + + if check_revoked: + self._check_jwt_revoked_or_disabled( + verified_claims, _token_gen.RevokedIdTokenError, 'ID token') + return verified_claims + + def revoke_refresh_tokens(self, uid): + """Revokes all refresh tokens for an existing user. + + This method updates the user's ``tokens_valid_after_timestamp`` to the current UTC + in seconds since the epoch. It is important that the server on which this is called has its + clock set correctly and synchronized. + + While this revokes all sessions for a specified user and disables any new ID tokens for + existing sessions from getting minted, existing ID tokens may remain active until their + natural expiration (one hour). To verify that ID tokens are revoked, use + ``verify_id_token(idToken, check_revoked=True)``. + + Args: + uid: A user ID string. + + Raises: + ValueError: If the user ID is None, empty or malformed. + FirebaseError: If an error occurs while revoking the refresh token. + """ + self._user_manager.update_user(uid, valid_since=int(time.time())) + + def get_user(self, uid): + """Gets the user data corresponding to the specified user ID. + + Args: + uid: A user ID string. + + Returns: + UserRecord: A user record instance. + + Raises: + ValueError: If the user ID is None, empty or malformed. + UserNotFoundError: If the specified user ID does not exist. + FirebaseError: If an error occurs while retrieving the user. + """ + response = self._user_manager.get_user(uid=uid) + return _user_mgt.UserRecord(response) + + def get_user_by_email(self, email): + """Gets the user data corresponding to the specified user email. + + Args: + email: A user email address string. + + Returns: + UserRecord: A user record instance. + + Raises: + ValueError: If the email is None, empty or malformed. + UserNotFoundError: If no user exists for the specified email address. + FirebaseError: If an error occurs while retrieving the user. + """ + response = self._user_manager.get_user(email=email) + return _user_mgt.UserRecord(response) + + def get_user_by_phone_number(self, phone_number): + """Gets the user data corresponding to the specified phone number. + + Args: + phone_number: A phone number string. + + Returns: + UserRecord: A user record instance. + + Raises: + ValueError: If the phone number is ``None``, empty or malformed. + UserNotFoundError: If no user exists for the specified phone number. + FirebaseError: If an error occurs while retrieving the user. + """ + response = self._user_manager.get_user(phone_number=phone_number) + return _user_mgt.UserRecord(response) + + def get_users(self, identifiers): + """Gets the user data corresponding to the specified identifiers. + + There are no ordering guarantees; in particular, the nth entry in the + result list is not guaranteed to correspond to the nth entry in the input + parameters list. + + A maximum of 100 identifiers may be supplied. If more than 100 + identifiers are supplied, this method raises a `ValueError`. + + Args: + identifiers (list[Identifier]): A list of ``Identifier`` instances used + to indicate which user records should be returned. Must have <= 100 + entries. + + Returns: + GetUsersResult: A ``GetUsersResult`` instance corresponding to the + specified identifiers. + + Raises: + ValueError: If any of the identifiers are invalid or if more than 100 + identifiers are specified. + """ + response = self._user_manager.get_users(identifiers=identifiers) + + def _matches(identifier, user_record): + if isinstance(identifier, _user_identifier.UidIdentifier): + return identifier.uid == user_record.uid + if isinstance(identifier, _user_identifier.EmailIdentifier): + return identifier.email == user_record.email + if isinstance(identifier, _user_identifier.PhoneIdentifier): + return identifier.phone_number == user_record.phone_number + if isinstance(identifier, _user_identifier.ProviderIdentifier): + return next(( + True + for user_info in user_record.provider_data + if identifier.provider_id == user_info.provider_id + and identifier.provider_uid == user_info.uid + ), False) + raise TypeError(f"Unexpected type: {type(identifier)}") + + def _is_user_found(identifier, user_records): + return any(_matches(identifier, user_record) for user_record in user_records) + + users = [_user_mgt.UserRecord(user) for user in response] + not_found = [ + identifier for identifier in identifiers if not _is_user_found(identifier, users)] + + return _user_mgt.GetUsersResult(users=users, not_found=not_found) + + def list_users(self, page_token=None, max_results=_user_mgt.MAX_LIST_USERS_RESULTS): + """Retrieves a page of user accounts from a Firebase project. + + The ``page_token`` argument governs the starting point of the page. The ``max_results`` + argument governs the maximum number of user accounts that may be included in the returned + page. This function never returns ``None``. If there are no user accounts in the Firebase + project, this returns an empty page. + + Args: + page_token: A non-empty page token string, which indicates the starting point of the + page (optional). Defaults to ``None``, which will retrieve the first page of users. + max_results: A positive integer indicating the maximum number of users to include in + the returned page (optional). Defaults to 1000, which is also the maximum number + allowed. + + Returns: + ListUsersPage: A page of user accounts. + + Raises: + ValueError: If max_results or page_token are invalid. + FirebaseError: If an error occurs while retrieving the user accounts. + """ + def download(page_token, max_results): + return self._user_manager.list_users(page_token, max_results) + return _user_mgt.ListUsersPage(download, page_token, max_results) + + def create_user(self, **kwargs): # pylint: disable=differing-param-doc + """Creates a new user account with the specified properties. + + Args: + **kwargs: A series of keyword arguments (optional). + + Keyword Args: + uid: User ID to assign to the newly created user (optional). + display_name: The user's display name (optional). + email: The user's primary email (optional). + email_verified: A boolean indicating whether or not the user's primary email is + verified (optional). + phone_number: The user's primary phone number (optional). + photo_url: The user's photo URL (optional). + password: The user's raw, unhashed password. (optional). + disabled: A boolean indicating whether or not the user account is disabled (optional). + + Returns: + UserRecord: A UserRecord instance for the newly created user. + + Raises: + ValueError: If the specified user properties are invalid. + FirebaseError: If an error occurs while creating the user account. + """ + uid = self._user_manager.create_user(**kwargs) + return self.get_user(uid=uid) + + def update_user(self, uid, **kwargs): # pylint: disable=differing-param-doc + """Updates an existing user account with the specified properties. + + Args: + uid: A user ID string. + **kwargs: A series of keyword arguments (optional). + + Keyword Args: + display_name: The user's display name (optional). Can be removed by explicitly passing + ``auth.DELETE_ATTRIBUTE``. + email: The user's primary email (optional). + email_verified: A boolean indicating whether or not the user's primary email is + verified (optional). + phone_number: The user's primary phone number (optional). Can be removed by explicitly + passing ``auth.DELETE_ATTRIBUTE``. + photo_url: The user's photo URL (optional). Can be removed by explicitly passing + ``auth.DELETE_ATTRIBUTE``. + password: The user's raw, unhashed password. (optional). + disabled: A boolean indicating whether or not the user account is disabled (optional). + custom_claims: A dictionary or a JSON string contining the custom claims to be set on + the user account (optional). To remove all custom claims, pass + ``auth.DELETE_ATTRIBUTE``. + valid_since: An integer signifying the seconds since the epoch (optional). This field + is set by ``revoke_refresh_tokens`` and it is discouraged to set this field + directly. + providers_to_delete: The list of provider IDs to unlink, + eg: 'google.com', 'password', etc. + + Returns: + UserRecord: An updated UserRecord instance for the user. + + Raises: + ValueError: If the specified user ID or properties are invalid. + FirebaseError: If an error occurs while updating the user account. + """ + self._user_manager.update_user(uid, **kwargs) + return self.get_user(uid=uid) + + def set_custom_user_claims(self, uid, custom_claims): + """Sets additional claims on an existing user account. + + Custom claims set via this function can be used to define user roles and privilege levels. + These claims propagate to all the devices where the user is already signed in (after token + expiration or when token refresh is forced), and next time the user signs in. The claims + can be accessed via the user's ID token JWT. If a reserved OIDC claim is specified (sub, + iat, iss, etc), an error is thrown. Claims payload must also not be larger then 1000 + characters when serialized into a JSON string. + + Args: + uid: A user ID string. + custom_claims: A dictionary or a JSON string of custom claims. Pass None to unset any + claims set previously. + + Raises: + ValueError: If the specified user ID or the custom claims are invalid. + FirebaseError: If an error occurs while updating the user account. + """ + if custom_claims is None: + custom_claims = _user_mgt.DELETE_ATTRIBUTE + self._user_manager.update_user(uid, custom_claims=custom_claims) + + def delete_user(self, uid): + """Deletes the user identified by the specified user ID. + + Args: + uid: A user ID string. + + Raises: + ValueError: If the user ID is None, empty or malformed. + FirebaseError: If an error occurs while deleting the user account. + """ + self._user_manager.delete_user(uid) + + def delete_users(self, uids): + """Deletes the users specified by the given identifiers. + + Deleting a non-existing user does not generate an error (the method is + idempotent.) Non-existing users are considered to be successfully + deleted and are therefore included in the + `DeleteUserResult.success_count` value. + + A maximum of 1000 identifiers may be supplied. If more than 1000 + identifiers are supplied, this method raises a `ValueError`. + + Args: + uids: A list of strings indicating the uids of the users to be deleted. + Must have <= 1000 entries. + + Returns: + DeleteUsersResult: The total number of successful/failed deletions, as + well as the array of errors that correspond to the failed + deletions. + + Raises: + ValueError: If any of the identifiers are invalid or if more than 1000 + identifiers are specified. + """ + result = self._user_manager.delete_users(uids, force_delete=True) + return _user_mgt.DeleteUsersResult(result, len(uids)) + + def import_users(self, users, hash_alg=None): + """Imports the specified list of users into Firebase Auth. + + At most 1000 users can be imported at a time. This operation is optimized for bulk imports + and ignores checks on identifier uniqueness, which could result in duplications. The + ``hash_alg`` parameter must be specified when importing users with passwords. Refer to the + ``UserImportHash`` class for supported hash algorithms. + + Args: + users: A list of ``ImportUserRecord`` instances to import. Length of the list must not + exceed 1000. + hash_alg: A ``UserImportHash`` object (optional). Required when importing users with + passwords. + + Returns: + UserImportResult: An object summarizing the result of the import operation. + + Raises: + ValueError: If the provided arguments are invalid. + FirebaseError: If an error occurs while importing users. + """ + result = self._user_manager.import_users(users, hash_alg) + return _user_import.UserImportResult(result, len(users)) + + def generate_password_reset_link(self, email, action_code_settings=None): + """Generates the out-of-band email action link for password reset flows for the specified + email address. + + Args: + email: The email of the user whose password is to be reset. + action_code_settings: ``ActionCodeSettings`` instance (optional). Defines whether + the link is to be handled by a mobile app and the additional state information to + be passed in the deep link. + + Returns: + link: The password reset link created by the API + + Raises: + ValueError: If the provided arguments are invalid + EmailNotFoundError: If no user exists for the specified email address. + FirebaseError: If an error occurs while generating the link + """ + return self._user_manager.generate_email_action_link( + 'PASSWORD_RESET', email, action_code_settings=action_code_settings) + + def generate_email_verification_link(self, email, action_code_settings=None): + """Generates the out-of-band email action link for email verification flows for the + specified email address. + + Args: + email: The email of the user to be verified. + action_code_settings: ``ActionCodeSettings`` instance (optional). Defines whether + the link is to be handled by a mobile app and the additional state information to + be passed in the deep link. + + Returns: + link: The email verification link created by the API + + Raises: + ValueError: If the provided arguments are invalid + UserNotFoundError: If no user exists for the specified email address. + FirebaseError: If an error occurs while generating the link + """ + return self._user_manager.generate_email_action_link( + 'VERIFY_EMAIL', email, action_code_settings=action_code_settings) + + def generate_sign_in_with_email_link(self, email, action_code_settings): + """Generates the out-of-band email action link for email link sign-in flows, using the + action code settings provided. + + Args: + email: The email of the user signing in. + action_code_settings: ``ActionCodeSettings`` instance. Defines whether + the link is to be handled by a mobile app and the additional state information to be + passed in the deep link. + + Returns: + link: The email sign-in link created by the API + + Raises: + ValueError: If the provided arguments are invalid + FirebaseError: If an error occurs while generating the link + """ + return self._user_manager.generate_email_action_link( + 'EMAIL_SIGNIN', email, action_code_settings=action_code_settings) + + def get_oidc_provider_config(self, provider_id): + """Returns the ``OIDCProviderConfig`` with the given ID. + + Args: + provider_id: Provider ID string. + + Returns: + SAMLProviderConfig: An OIDC provider config instance. + + Raises: + ValueError: If the provider ID is invalid, empty or does not have ``oidc.`` prefix. + ConfigurationNotFoundError: If no OIDC provider is available with the given identifier. + FirebaseError: If an error occurs while retrieving the OIDC provider. + """ + return self._provider_manager.get_oidc_provider_config(provider_id) + + def create_oidc_provider_config( + self, provider_id, client_id, issuer, display_name=None, enabled=None, + client_secret=None, id_token_response_type=None, code_response_type=None): + """Creates a new OIDC provider config from the given parameters. + + OIDC provider support requires Google Cloud's Identity Platform (GCIP). To learn more about + GCIP, including pricing and features, see https://cloud.google.com/identity-platform. + + Args: + provider_id: Provider ID string. Must have the prefix ``oidc.``. + client_id: Client ID of the new config. + issuer: Issuer of the new config. Must be a valid URL. + display_name: The user-friendly display name to the current configuration (optional). + This name is also used as the provider label in the Cloud Console. + enabled: A boolean indicating whether the provider configuration is enabled or disabled + (optional). A user cannot sign in using a disabled provider. + client_secret: A string which sets the client secret for the new provider. + This is required for the code flow. + code_response_type: A boolean which sets whether to enable the code response flow for + the new provider. By default, this is not enabled if no response type is + specified. A client secret must be set for this response type. + Having both the code and ID token response flows is currently not supported. + id_token_response_type: A boolean which sets whether to enable the ID token response + flow for the new provider. By default, this is enabled if no response type is + specified. + Having both the code and ID token response flows is currently not supported. + + Returns: + OIDCProviderConfig: The newly created OIDC provider config instance. + + Raises: + ValueError: If any of the specified input parameters are invalid. + FirebaseError: If an error occurs while creating the new OIDC provider config. + """ + return self._provider_manager.create_oidc_provider_config( + provider_id, client_id=client_id, issuer=issuer, display_name=display_name, + enabled=enabled, client_secret=client_secret, + id_token_response_type=id_token_response_type, code_response_type=code_response_type) + + def update_oidc_provider_config( + self, provider_id, client_id=None, issuer=None, display_name=None, enabled=None, + client_secret=None, id_token_response_type=None, code_response_type=None): + """Updates an existing OIDC provider config with the given parameters. + + Args: + provider_id: Provider ID string. Must have the prefix ``oidc.``. + client_id: Client ID of the new config (optional). + issuer: Issuer of the new config (optional). Must be a valid URL. + display_name: The user-friendly display name to the current configuration (optional). + Pass ``auth.DELETE_ATTRIBUTE`` to delete the current display name. + enabled: A boolean indicating whether the provider configuration is enabled or disabled + (optional). + client_secret: A string which sets the client secret for the new provider. + This is required for the code flow. + code_response_type: A boolean which sets whether to enable the code response flow for + the new provider. By default, this is not enabled if no response type is specified. + A client secret must be set for this response type. + Having both the code and ID token response flows is currently not supported. + id_token_response_type: A boolean which sets whether to enable the ID token response + flow for the new provider. By default, this is enabled if no response type is + specified. + Having both the code and ID token response flows is currently not supported. + + Returns: + OIDCProviderConfig: The updated OIDC provider config instance. + + Raises: + ValueError: If any of the specified input parameters are invalid. + FirebaseError: If an error occurs while updating the OIDC provider config. + """ + return self._provider_manager.update_oidc_provider_config( + provider_id, client_id=client_id, issuer=issuer, display_name=display_name, + enabled=enabled, client_secret=client_secret, + id_token_response_type=id_token_response_type, code_response_type=code_response_type) + + def delete_oidc_provider_config(self, provider_id): + """Deletes the ``OIDCProviderConfig`` with the given ID. + + Args: + provider_id: Provider ID string. + + Raises: + ValueError: If the provider ID is invalid, empty or does not have ``oidc.`` prefix. + ConfigurationNotFoundError: If no OIDC provider is available with the given identifier. + FirebaseError: If an error occurs while deleting the OIDC provider. + """ + self._provider_manager.delete_oidc_provider_config(provider_id) + + def list_oidc_provider_configs( + self, page_token=None, max_results=_auth_providers.MAX_LIST_CONFIGS_RESULTS): + """Retrieves a page of OIDC provider configs from a Firebase project. + + The ``page_token`` argument governs the starting point of the page. The ``max_results`` + argument governs the maximum number of configs that may be included in the returned + page. This function never returns ``None``. If there are no OIDC configs in the Firebase + project, this returns an empty page. + + Args: + page_token: A non-empty page token string, which indicates the starting point of the + page (optional). Defaults to ``None``, which will retrieve the first page of users. + max_results: A positive integer indicating the maximum number of users to include in + the returned page (optional). Defaults to 100, which is also the maximum number + allowed. + + Returns: + ListProviderConfigsPage: A page of OIDC provider config instances. + + Raises: + ValueError: If ``max_results`` or ``page_token`` are invalid. + FirebaseError: If an error occurs while retrieving the OIDC provider configs. + """ + return self._provider_manager.list_oidc_provider_configs(page_token, max_results) + + def get_saml_provider_config(self, provider_id): + """Returns the ``SAMLProviderConfig`` with the given ID. + + Args: + provider_id: Provider ID string. + + Returns: + SAMLProviderConfig: A SAML provider config instance. + + Raises: + ValueError: If the provider ID is invalid, empty or does not have ``saml.`` prefix. + ConfigurationNotFoundError: If no SAML provider is available with the given identifier. + FirebaseError: If an error occurs while retrieving the SAML provider. + """ + return self._provider_manager.get_saml_provider_config(provider_id) + + def create_saml_provider_config( + self, provider_id, idp_entity_id, sso_url, x509_certificates, rp_entity_id, + callback_url, display_name=None, enabled=None): + """Creates a new SAML provider config from the given parameters. + + SAML provider support requires Google Cloud's Identity Platform (GCIP). To learn more about + GCIP, including pricing and features, see https://cloud.google.com/identity-platform. + + Args: + provider_id: Provider ID string. Must have the prefix ``saml.``. + idp_entity_id: The SAML IdP entity identifier. + sso_url: The SAML IdP SSO URL. Must be a valid URL. + x509_certificates: The list of SAML IdP X.509 certificates issued by CA for this + provider. Multiple certificates are accepted to prevent outages during IdP key + rotation (for example ADFS rotates every 10 days). When the Auth server receives a + SAML response, it will match the SAML response with the certificate on record. + Otherwise the response is rejected. Developers are expected to manage the + certificate updates as keys are rotated. + rp_entity_id: The SAML relying party (service provider) entity ID. This is defined by + the developer but needs to be provided to the SAML IdP. + callback_url: Callback URL string. This is fixed and must always be the same as the + OAuth redirect URL provisioned by Firebase Auth, unless a custom authDomain is + used. + display_name: The user-friendly display name to the current configuration (optional). + This name is also used as the provider label in the Cloud Console. + enabled: A boolean indicating whether the provider configuration is enabled or disabled + (optional). A user cannot sign in using a disabled provider. + + Returns: + SAMLProviderConfig: The newly created SAML provider config instance. + + Raises: + ValueError: If any of the specified input parameters are invalid. + FirebaseError: If an error occurs while creating the new SAML provider config. + """ + return self._provider_manager.create_saml_provider_config( + provider_id, idp_entity_id=idp_entity_id, sso_url=sso_url, + x509_certificates=x509_certificates, rp_entity_id=rp_entity_id, + callback_url=callback_url, display_name=display_name, enabled=enabled) + + def update_saml_provider_config( + self, provider_id, idp_entity_id=None, sso_url=None, x509_certificates=None, + rp_entity_id=None, callback_url=None, display_name=None, enabled=None): + """Updates an existing SAML provider config with the given parameters. + + Args: + provider_id: Provider ID string. Must have the prefix ``saml.``. + idp_entity_id: The SAML IdP entity identifier (optional). + sso_url: The SAML IdP SSO URL. Must be a valid URL (optional). + x509_certificates: The list of SAML IdP X.509 certificates issued by CA for this + provider (optional). + rp_entity_id: The SAML relying party entity ID (optional). + callback_url: Callback URL string (optional). + display_name: The user-friendly display name of the current configuration (optional). + Pass ``auth.DELETE_ATTRIBUTE`` to delete the current display name. + enabled: A boolean indicating whether the provider configuration is enabled or disabled + (optional). + + Returns: + SAMLProviderConfig: The updated SAML provider config instance. + + Raises: + ValueError: If any of the specified input parameters are invalid. + FirebaseError: If an error occurs while updating the SAML provider config. + """ + return self._provider_manager.update_saml_provider_config( + provider_id, idp_entity_id=idp_entity_id, sso_url=sso_url, + x509_certificates=x509_certificates, rp_entity_id=rp_entity_id, + callback_url=callback_url, display_name=display_name, enabled=enabled) + + def delete_saml_provider_config(self, provider_id): + """Deletes the ``SAMLProviderConfig`` with the given ID. + + Args: + provider_id: Provider ID string. + + Raises: + ValueError: If the provider ID is invalid, empty or does not have ``saml.`` prefix. + ConfigurationNotFoundError: If no SAML provider is available with the given identifier. + FirebaseError: If an error occurs while deleting the SAML provider. + """ + self._provider_manager.delete_saml_provider_config(provider_id) + + def list_saml_provider_configs( + self, page_token=None, max_results=_auth_providers.MAX_LIST_CONFIGS_RESULTS): + """Retrieves a page of SAML provider configs from a Firebase project. + + The ``page_token`` argument governs the starting point of the page. The ``max_results`` + argument governs the maximum number of configs that may be included in the returned + page. This function never returns ``None``. If there are no SAML configs in the Firebase + project, this returns an empty page. + + Args: + page_token: A non-empty page token string, which indicates the starting point of the + page (optional). Defaults to ``None``, which will retrieve the first page of users. + max_results: A positive integer indicating the maximum number of users to include in + the returned page (optional). Defaults to 100, which is also the maximum number + allowed. + + Returns: + ListProviderConfigsPage: A page of SAML provider config instances. + + Raises: + ValueError: If ``max_results`` or ``page_token`` are invalid. + FirebaseError: If an error occurs while retrieving the SAML provider configs. + """ + return self._provider_manager.list_saml_provider_configs(page_token, max_results) + + def _check_jwt_revoked_or_disabled(self, verified_claims, exc_type, label): + user = self.get_user(verified_claims.get('uid')) + if user.disabled: + raise _auth_utils.UserDisabledError('The user record is disabled.') + if verified_claims.get('iat') * 1000 < user.tokens_valid_after_timestamp: + raise exc_type(f'The Firebase {label} has been revoked.') diff --git a/firebase_admin/_auth_providers.py b/firebase_admin/_auth_providers.py new file mode 100644 index 000000000..cc7949526 --- /dev/null +++ b/firebase_admin/_auth_providers.py @@ -0,0 +1,431 @@ +# Copyright 2020 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Firebase auth providers management sub module.""" + +from urllib import parse + +import requests + +from firebase_admin import _auth_utils +from firebase_admin import _user_mgt + + +MAX_LIST_CONFIGS_RESULTS = 100 + + +class ProviderConfig: + """Parent type for all authentication provider config types.""" + + def __init__(self, data): + self._data = data + + @property + def provider_id(self): + name = self._data['name'] + return name.split('/')[-1] + + @property + def display_name(self): + return self._data.get('displayName') + + @property + def enabled(self): + return self._data.get('enabled', False) + + +class OIDCProviderConfig(ProviderConfig): + """Represents the OIDC auth provider configuration. + + See https://openid.net/specs/openid-connect-core-1_0-final.html. + """ + + @property + def issuer(self): + return self._data['issuer'] + + @property + def client_id(self): + return self._data['clientId'] + + @property + def client_secret(self): + return self._data.get('clientSecret') + + @property + def id_token_response_type(self): + return self._data.get('responseType', {}).get('idToken', False) + + @property + def code_response_type(self): + return self._data.get('responseType', {}).get('code', False) + + +class SAMLProviderConfig(ProviderConfig): + """Represents he SAML auth provider configuration. + + See http://docs.oasis-open.org/security/saml/Post2.0/sstc-saml-tech-overview-2.0.html. + """ + + @property + def idp_entity_id(self): + return self._data.get('idpConfig', {})['idpEntityId'] + + @property + def sso_url(self): + return self._data.get('idpConfig', {})['ssoUrl'] + + @property + def x509_certificates(self): + certs = self._data.get('idpConfig', {})['idpCertificates'] + return [c['x509Certificate'] for c in certs] + + @property + def callback_url(self): + return self._data.get('spConfig', {})['callbackUri'] + + @property + def rp_entity_id(self): + return self._data.get('spConfig', {})['spEntityId'] + + +class ListProviderConfigsPage: + """Represents a page of AuthProviderConfig instances retrieved from a Firebase project. + + Provides methods for traversing the provider configs included in this page, as well as + retrieving subsequent pages. The iterator returned by ``iterate_all()`` can be used to iterate + through all provider configs in the Firebase project starting from this page. + """ + + def __init__(self, download, page_token, max_results): + self._download = download + self._max_results = max_results + self._current = download(page_token, max_results) + + @property + def provider_configs(self): + """A list of ``AuthProviderConfig`` instances available in this page.""" + raise NotImplementedError + + @property + def next_page_token(self): + """Page token string for the next page (empty string indicates no more pages).""" + return self._current.get('nextPageToken', '') + + @property + def has_next_page(self): + """A boolean indicating whether more pages are available.""" + return bool(self.next_page_token) + + def get_next_page(self): + """Retrieves the next page of provider configs, if available. + + Returns: + ListProviderConfigsPage: Next page of provider configs, or None if this is the last + page. + """ + if self.has_next_page: + return self.__class__(self._download, self.next_page_token, self._max_results) + return None + + def iterate_all(self): + """Retrieves an iterator for provider configs. + + Returned iterator will iterate through all the provider configs in the Firebase project + starting from this page. The iterator will never buffer more than one page of configs + in memory at a time. + + Returns: + iterator: An iterator of AuthProviderConfig instances. + """ + return _ProviderConfigIterator(self) + + +class _ListOIDCProviderConfigsPage(ListProviderConfigsPage): + + @property + def provider_configs(self): + return [OIDCProviderConfig(data) for data in self._current.get('oauthIdpConfigs', [])] + + +class _ListSAMLProviderConfigsPage(ListProviderConfigsPage): + + @property + def provider_configs(self): + return [SAMLProviderConfig(data) for data in self._current.get('inboundSamlConfigs', [])] + + +class _ProviderConfigIterator(_auth_utils.PageIterator): + + @property + def items(self): + return self._current_page.provider_configs + + +class ProviderConfigClient: + """Client for managing Auth provider configurations.""" + + PROVIDER_CONFIG_URL = 'https://identitytoolkit.googleapis.com/v2' + + def __init__(self, http_client, project_id, tenant_id=None, url_override=None): + self.http_client = http_client + url_prefix = url_override or self.PROVIDER_CONFIG_URL + self.base_url = f'{url_prefix}/projects/{project_id}' + if tenant_id: + self.base_url += f'/tenants/{tenant_id}' + + def get_oidc_provider_config(self, provider_id): + _validate_oidc_provider_id(provider_id) + body = self._make_request('get', f'/oauthIdpConfigs/{provider_id}') + return OIDCProviderConfig(body) + + def create_oidc_provider_config( + self, provider_id, client_id, issuer, display_name=None, enabled=None, + client_secret=None, id_token_response_type=None, code_response_type=None): + """Creates a new OIDC provider config from the given parameters.""" + _validate_oidc_provider_id(provider_id) + req = { + 'clientId': _validate_non_empty_string(client_id, 'client_id'), + 'issuer': _validate_url(issuer, 'issuer'), + } + if display_name is not None: + req['displayName'] = _auth_utils.validate_string(display_name, 'display_name') + if enabled is not None: + req['enabled'] = _auth_utils.validate_boolean(enabled, 'enabled') + + response_type = {} + if id_token_response_type is False and code_response_type is False: + raise ValueError('At least one response type must be returned.') + if id_token_response_type is not None: + response_type['idToken'] = _auth_utils.validate_boolean( + id_token_response_type, 'id_token_response_type') + if code_response_type is not None: + response_type['code'] = _auth_utils.validate_boolean( + code_response_type, 'code_response_type') + if code_response_type: + req['clientSecret'] = _validate_non_empty_string(client_secret, 'client_secret') + if response_type: + req['responseType'] = response_type + + params = f'oauthIdpConfigId={provider_id}' + body = self._make_request('post', '/oauthIdpConfigs', json=req, params=params) + return OIDCProviderConfig(body) + + def update_oidc_provider_config( + self, provider_id, client_id=None, issuer=None, display_name=None, + enabled=None, client_secret=None, id_token_response_type=None, + code_response_type=None): + """Updates an existing OIDC provider config with the given parameters.""" + _validate_oidc_provider_id(provider_id) + req = {} + if display_name is not None: + if display_name == _user_mgt.DELETE_ATTRIBUTE: + req['displayName'] = None + else: + req['displayName'] = _auth_utils.validate_string(display_name, 'display_name') + if enabled is not None: + req['enabled'] = _auth_utils.validate_boolean(enabled, 'enabled') + if client_id: + req['clientId'] = _validate_non_empty_string(client_id, 'client_id') + if issuer: + req['issuer'] = _validate_url(issuer, 'issuer') + + response_type = {} + if id_token_response_type is False and code_response_type is False: + raise ValueError('At least one response type must be returned.') + if id_token_response_type is not None: + response_type['idToken'] = _auth_utils.validate_boolean( + id_token_response_type, 'id_token_response_type') + if code_response_type is not None: + response_type['code'] = _auth_utils.validate_boolean( + code_response_type, 'code_response_type') + if code_response_type: + req['clientSecret'] = _validate_non_empty_string(client_secret, 'client_secret') + if response_type: + req['responseType'] = response_type + + if not req: + raise ValueError('At least one parameter must be specified for update.') + + update_mask = _auth_utils.build_update_mask(req) + params = f'updateMask={",".join(update_mask)}' + url = f'/oauthIdpConfigs/{provider_id}' + body = self._make_request('patch', url, json=req, params=params) + return OIDCProviderConfig(body) + + def delete_oidc_provider_config(self, provider_id): + _validate_oidc_provider_id(provider_id) + self._make_request('delete', f'/oauthIdpConfigs/{provider_id}') + + def list_oidc_provider_configs(self, page_token=None, max_results=MAX_LIST_CONFIGS_RESULTS): + return _ListOIDCProviderConfigsPage( + self._fetch_oidc_provider_configs, page_token, max_results) + + def _fetch_oidc_provider_configs(self, page_token=None, max_results=MAX_LIST_CONFIGS_RESULTS): + return self._fetch_provider_configs('/oauthIdpConfigs', page_token, max_results) + + def get_saml_provider_config(self, provider_id): + _validate_saml_provider_id(provider_id) + body = self._make_request('get', f'/inboundSamlConfigs/{provider_id}') + return SAMLProviderConfig(body) + + def create_saml_provider_config( + self, provider_id, idp_entity_id, sso_url, x509_certificates, + rp_entity_id, callback_url, display_name=None, enabled=None): + """Creates a new SAML provider config from the given parameters.""" + _validate_saml_provider_id(provider_id) + req = { + 'idpConfig': { + 'idpEntityId': _validate_non_empty_string(idp_entity_id, 'idp_entity_id'), + 'ssoUrl': _validate_url(sso_url, 'sso_url'), + 'idpCertificates': _validate_x509_certificates(x509_certificates), + }, + 'spConfig': { + 'spEntityId': _validate_non_empty_string(rp_entity_id, 'rp_entity_id'), + 'callbackUri': _validate_url(callback_url, 'callback_url'), + }, + } + if display_name is not None: + req['displayName'] = _auth_utils.validate_string(display_name, 'display_name') + if enabled is not None: + req['enabled'] = _auth_utils.validate_boolean(enabled, 'enabled') + + params = f'inboundSamlConfigId={provider_id}' + body = self._make_request('post', '/inboundSamlConfigs', json=req, params=params) + return SAMLProviderConfig(body) + + def update_saml_provider_config( + self, provider_id, idp_entity_id=None, sso_url=None, x509_certificates=None, + rp_entity_id=None, callback_url=None, display_name=None, enabled=None): + """Updates an existing SAML provider config with the given parameters.""" + _validate_saml_provider_id(provider_id) + idp_config = {} + if idp_entity_id is not None: + idp_config['idpEntityId'] = _validate_non_empty_string(idp_entity_id, 'idp_entity_id') + if sso_url is not None: + idp_config['ssoUrl'] = _validate_url(sso_url, 'sso_url') + if x509_certificates is not None: + idp_config['idpCertificates'] = _validate_x509_certificates(x509_certificates) + + sp_config = {} + if rp_entity_id is not None: + sp_config['spEntityId'] = _validate_non_empty_string(rp_entity_id, 'rp_entity_id') + if callback_url is not None: + sp_config['callbackUri'] = _validate_url(callback_url, 'callback_url') + + req = {} + if display_name is not None: + if display_name == _user_mgt.DELETE_ATTRIBUTE: + req['displayName'] = None + else: + req['displayName'] = _auth_utils.validate_string(display_name, 'display_name') + if enabled is not None: + req['enabled'] = _auth_utils.validate_boolean(enabled, 'enabled') + if idp_config: + req['idpConfig'] = idp_config + if sp_config: + req['spConfig'] = sp_config + + if not req: + raise ValueError('At least one parameter must be specified for update.') + + update_mask = _auth_utils.build_update_mask(req) + params = f'updateMask={",".join(update_mask)}' + url = f'/inboundSamlConfigs/{provider_id}' + body = self._make_request('patch', url, json=req, params=params) + return SAMLProviderConfig(body) + + def delete_saml_provider_config(self, provider_id): + _validate_saml_provider_id(provider_id) + self._make_request('delete', f'/inboundSamlConfigs/{provider_id}') + + def list_saml_provider_configs(self, page_token=None, max_results=MAX_LIST_CONFIGS_RESULTS): + return _ListSAMLProviderConfigsPage( + self._fetch_saml_provider_configs, page_token, max_results) + + def _fetch_saml_provider_configs(self, page_token=None, max_results=MAX_LIST_CONFIGS_RESULTS): + return self._fetch_provider_configs('/inboundSamlConfigs', page_token, max_results) + + def _fetch_provider_configs(self, path, page_token=None, max_results=MAX_LIST_CONFIGS_RESULTS): + """Fetches a page of auth provider configs""" + if page_token is not None: + if not isinstance(page_token, str) or not page_token: + raise ValueError('Page token must be a non-empty string.') + if not isinstance(max_results, int): + raise ValueError('Max results must be an integer.') + if max_results < 1 or max_results > MAX_LIST_CONFIGS_RESULTS: + raise ValueError( + 'Max results must be a positive integer less than or equal to ' + f'{MAX_LIST_CONFIGS_RESULTS}.') + + params = f'pageSize={max_results}' + if page_token: + params += f'&pageToken={page_token}' + return self._make_request('get', path, params=params) + + def _make_request(self, method, path, **kwargs): + url = f'{self.base_url}{path}' + try: + return self.http_client.body(method, url, **kwargs) + except requests.exceptions.RequestException as error: + raise _auth_utils.handle_auth_backend_error(error) + + +def _validate_oidc_provider_id(provider_id): + if not isinstance(provider_id, str): + raise ValueError( + f'Invalid OIDC provider ID: {provider_id}. Provider ID must be a non-empty string.') + if not provider_id.startswith('oidc.'): + raise ValueError(f'Invalid OIDC provider ID: {provider_id}.') + return provider_id + + +def _validate_saml_provider_id(provider_id): + if not isinstance(provider_id, str): + raise ValueError( + f'Invalid SAML provider ID: {provider_id}. Provider ID must be a non-empty string.') + if not provider_id.startswith('saml.'): + raise ValueError(f'Invalid SAML provider ID: {provider_id}.') + return provider_id + + +def _validate_non_empty_string(value, label): + """Validates that the given value is a non-empty string.""" + if not isinstance(value, str): + raise ValueError(f'Invalid type for {label}: {value}.') + if not value: + raise ValueError(f'{label} must not be empty.') + return value + + +def _validate_url(url, label): + """Validates that the given value is a well-formed URL string.""" + if not isinstance(url, str) or not url: + raise ValueError( + f'Invalid photo URL: "{url}". {label} must be a non-empty string.') + try: + parsed = parse.urlparse(url) + if not parsed.netloc: + raise ValueError(f'Malformed {label}: "{url}".') + return url + except Exception as exception: + raise ValueError(f'Malformed {label}: "{url}".') from exception + + +def _validate_x509_certificates(x509_certificates): + if not isinstance(x509_certificates, list) or not x509_certificates: + raise ValueError('x509_certificates must be a non-empty list.') + if not all(isinstance(cert, str) and cert for cert in x509_certificates): + raise ValueError('x509_certificates must only contain non-empty strings.') + return [{'x509Certificate': cert} for cert in x509_certificates] diff --git a/firebase_admin/_auth_utils.py b/firebase_admin/_auth_utils.py new file mode 100644 index 000000000..8f3c419a7 --- /dev/null +++ b/firebase_admin/_auth_utils.py @@ -0,0 +1,491 @@ +# Copyright 2018 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Firebase auth utils.""" + +import json +import os +import re +from urllib import parse + +from firebase_admin import exceptions +from firebase_admin import _utils + + +EMULATOR_HOST_ENV_VAR = 'FIREBASE_AUTH_EMULATOR_HOST' +MAX_CLAIMS_PAYLOAD_SIZE = 1000 +RESERVED_CLAIMS = set([ + 'acr', 'amr', 'at_hash', 'aud', 'auth_time', 'azp', 'cnf', 'c_hash', 'exp', 'iat', + 'iss', 'jti', 'nbf', 'nonce', 'sub', 'firebase', +]) +VALID_EMAIL_ACTION_TYPES = set(['VERIFY_EMAIL', 'EMAIL_SIGNIN', 'PASSWORD_RESET']) + + +class PageIterator: + """An iterator that allows iterating over a sequence of items, one at a time. + + This implementation loads a page of items into memory, and iterates on them. When the whole + page has been traversed, it loads another page. This class never keeps more than one page + of entries in memory. + """ + + def __init__(self, current_page): + if not current_page: + raise ValueError('Current page must not be None.') + + self._current_page = current_page + self._iter = None + + def __next__(self): + if self._iter is None: + self._iter = iter(self.items) + + try: + return next(self._iter) + except StopIteration: + if self._current_page.has_next_page: + self._current_page = self._current_page.get_next_page() + self._iter = iter(self.items) + + return next(self._iter) + + raise + + def __iter__(self): + return self + + @property + def items(self): + raise NotImplementedError + + +def get_emulator_host(): + emulator_host = os.getenv(EMULATOR_HOST_ENV_VAR, '') + if emulator_host and '//' in emulator_host: + raise ValueError( + f'Invalid {EMULATOR_HOST_ENV_VAR}: "{emulator_host}". ' + 'It must follow format "host:port".') + return emulator_host + + +def is_emulated(): + return get_emulator_host() != '' + + +def validate_uid(uid, required=False): + if uid is None and not required: + return None + if not isinstance(uid, str) or not uid or len(uid) > 128: + raise ValueError( + f'Invalid uid: "{uid}". The uid must be a non-empty string with no more than 128 ' + 'characters.') + return uid + +def validate_email(email, required=False): + if email is None and not required: + return None + if not isinstance(email, str) or not email: + raise ValueError( + f'Invalid email: "{email}". Email must be a non-empty string.') + parts = email.split('@') + if len(parts) != 2 or not parts[0] or not parts[1]: + raise ValueError(f'Malformed email address string: "{email}".') + return email + +def validate_phone(phone, required=False): + """Validates the specified phone number. + + Phone number vlidation is very lax here. Backend will enforce E.164 spec compliance, and + normalize accordingly. Here we check if the number starts with + sign, and contains at + least one alphanumeric character. + """ + if phone is None and not required: + return None + if not isinstance(phone, str) or not phone: + raise ValueError( + f'Invalid phone number: "{phone}". Phone number must be a non-empty string.') + if not phone.startswith('+') or not re.search('[a-zA-Z0-9]', phone): + raise ValueError( + f'Invalid phone number: "{phone}". Phone number must be a valid, E.164 ' + 'compliant identifier.') + return phone + +def validate_password(password, required=False): + if password is None and not required: + return None + if not isinstance(password, str) or len(password) < 6: + raise ValueError( + 'Invalid password string. Password must be a string at least 6 characters long.') + return password + +def validate_bytes(value, label, required=False): + if value is None and not required: + return None + if not isinstance(value, bytes) or not value: + raise ValueError(f'{label} must be a non-empty byte sequence.') + return value + +def validate_display_name(display_name, required=False): + if display_name is None and not required: + return None + if not isinstance(display_name, str) or not display_name: + raise ValueError( + f'Invalid display name: "{display_name}". Display name must be a non-empty ' + 'string.') + return display_name + +def validate_provider_id(provider_id, required=True): + if provider_id is None and not required: + return None + if not isinstance(provider_id, str) or not provider_id: + raise ValueError( + f'Invalid provider ID: "{provider_id}". Provider ID must be a non-empty string.') + return provider_id + +def validate_provider_uid(provider_uid, required=True): + if provider_uid is None and not required: + return None + if not isinstance(provider_uid, str) or not provider_uid: + raise ValueError( + f'Invalid provider UID: "{provider_uid}". Provider UID must be a non-empty string.') + return provider_uid + +def validate_photo_url(photo_url, required=False): + """Parses and validates the given URL string.""" + if photo_url is None and not required: + return None + if not isinstance(photo_url, str) or not photo_url: + raise ValueError( + f'Invalid photo URL: "{photo_url}". Photo URL must be a non-empty string.') + try: + parsed = parse.urlparse(photo_url) + if not parsed.netloc: + raise ValueError(f'Malformed photo URL: "{photo_url}".') + return photo_url + except Exception as err: + raise ValueError(f'Malformed photo URL: "{photo_url}".') from err + +def validate_timestamp(timestamp, label, required=False): + """Validates the given timestamp value. Timestamps must be positive integers.""" + if timestamp is None and not required: + return None + if isinstance(timestamp, bool): + raise ValueError('Boolean value specified as timestamp.') + try: + timestamp_int = int(timestamp) + except TypeError as err: + raise ValueError(f'Invalid type for timestamp value: {timestamp}.') from err + if timestamp_int != timestamp: + raise ValueError(f'{label} must be a numeric value and a whole number.') + if timestamp_int <= 0: + raise ValueError(f'{label} timestamp must be a positive interger.') + return timestamp_int + +def validate_int(value, label, low=None, high=None): + """Validates that the given value represents an integer. + + There are several ways to represent an integer in Python (e.g. 2, 2L, 2.0). This method allows + for all such representations except for booleans. Booleans also behave like integers, but + always translate to 1 and 0. Passing a boolean to an API that expects integers is most likely + a developer error. + """ + if value is None or isinstance(value, bool): + raise ValueError(f'Invalid type for integer value: {value}.') + try: + val_int = int(value) + except TypeError as err: + raise ValueError(f'Invalid type for integer value: {value}.') from err + if val_int != value: + # This will be True for non-numeric values like '2' and non-whole numbers like 2.5. + raise ValueError(f'{label} must be a numeric value and a whole number.') + if low is not None and val_int < low: + raise ValueError(f'{label} must not be smaller than {low}.') + if high is not None and val_int > high: + raise ValueError(f'{label} must not be larger than {high}.') + return val_int + +def validate_string(value, label): + """Validates that the given value is a string.""" + if not isinstance(value, str): + raise ValueError(f'Invalid type for {label}: {value}.') + return value + +def validate_boolean(value, label): + """Validates that the given value is a boolean.""" + if not isinstance(value, bool): + raise ValueError(f'Invalid type for {label}: {value}.') + return value + +def validate_custom_claims(custom_claims, required=False): + """Validates the specified custom claims. + + Custom claims must be specified as a JSON string. The string must not exceed 1000 + characters, and the parsed JSON payload must not contain reserved JWT claims. + """ + if custom_claims is None and not required: + return None + claims_str = str(custom_claims) + if len(claims_str) > MAX_CLAIMS_PAYLOAD_SIZE: + raise ValueError( + f'Custom claims payload must not exceed {MAX_CLAIMS_PAYLOAD_SIZE} characters.') + try: + parsed = json.loads(claims_str) + except Exception as err: + raise ValueError('Failed to parse custom claims string as JSON.') from err + + if not isinstance(parsed, dict): + raise ValueError('Custom claims must be parseable as a JSON object.') + invalid_claims = RESERVED_CLAIMS.intersection(set(parsed.keys())) + if len(invalid_claims) > 1: + joined = ', '.join(sorted(invalid_claims)) + raise ValueError(f'Claims "{joined}" are reserved, and must not be set.') + if len(invalid_claims) == 1: + raise ValueError( + f'Claim "{invalid_claims.pop()}" is reserved, and must not be set.') + return claims_str + +def validate_action_type(action_type): + if action_type not in VALID_EMAIL_ACTION_TYPES: + raise ValueError( + f'Invalid action type provided action_type: {action_type}. Valid values are ' + f'{", ".join(VALID_EMAIL_ACTION_TYPES)}') + return action_type + +def validate_provider_ids(provider_ids, required=False): + if not provider_ids: + if required: + raise ValueError('Invalid provider IDs. Provider ids should be provided') + return [] + for provider_id in provider_ids: + validate_provider_id(provider_id, True) + return provider_ids + +def build_update_mask(params): + """Creates an update mask list from the given dictionary.""" + mask = [] + for key, value in params.items(): + if isinstance(value, dict): + child_mask = build_update_mask(value) + for child in child_mask: + mask.append(f'{key}.{child}') + else: + mask.append(key) + + return sorted(mask) + + +class UidAlreadyExistsError(exceptions.AlreadyExistsError): + """The user with the provided uid already exists.""" + + default_message = 'The user with the provided uid already exists' + + def __init__(self, message, cause, http_response): + exceptions.AlreadyExistsError.__init__(self, message, cause, http_response) + + +class EmailAlreadyExistsError(exceptions.AlreadyExistsError): + """The user with the provided email already exists.""" + + default_message = 'The user with the provided email already exists' + + def __init__(self, message, cause, http_response): + exceptions.AlreadyExistsError.__init__(self, message, cause, http_response) + + +class InsufficientPermissionError(exceptions.PermissionDeniedError): + """The credential used to initialize the SDK lacks required permissions.""" + + default_message = ('The credential used to initialize the SDK has insufficient ' + 'permissions to perform the requested operation. See ' + 'https://firebase.google.com/docs/admin/setup for details ' + 'on how to initialize the Admin SDK with appropriate permissions') + + def __init__(self, message, cause, http_response): + exceptions.PermissionDeniedError.__init__(self, message, cause, http_response) + + +class InvalidDynamicLinkDomainError(exceptions.InvalidArgumentError): + """Dynamic link domain in ActionCodeSettings is not authorized.""" + + default_message = 'Dynamic link domain specified in ActionCodeSettings is not authorized' + + def __init__(self, message, cause, http_response): + exceptions.InvalidArgumentError.__init__(self, message, cause, http_response) + + +class InvalidHostingLinkDomainError(exceptions.InvalidArgumentError): + """The provided hosting link domain is not configured in Firebase Hosting + or is not owned by the current project.""" + + default_message = ('The provided hosting link domain is not configured in Firebase ' + 'Hosting or is not owned by the current project') + + def __init__(self, message, cause, http_response): + exceptions.InvalidArgumentError.__init__(self, message, cause, http_response) + + +class InvalidIdTokenError(exceptions.InvalidArgumentError): + """The provided ID token is not a valid Firebase ID token.""" + + default_message = 'The provided ID token is invalid' + + def __init__(self, message, cause=None, http_response=None): + exceptions.InvalidArgumentError.__init__(self, message, cause, http_response) + + +class PhoneNumberAlreadyExistsError(exceptions.AlreadyExistsError): + """The user with the provided phone number already exists.""" + + default_message = 'The user with the provided phone number already exists' + + def __init__(self, message, cause, http_response): + exceptions.AlreadyExistsError.__init__(self, message, cause, http_response) + + +class UnexpectedResponseError(exceptions.UnknownError): + """Backend service responded with an unexpected or malformed response.""" + + def __init__(self, message, cause=None, http_response=None): + exceptions.UnknownError.__init__(self, message, cause, http_response) + + +class UserNotFoundError(exceptions.NotFoundError): + """No user record found for the specified identifier.""" + + default_message = 'No user record found for the given identifier' + + def __init__(self, message, cause=None, http_response=None): + exceptions.NotFoundError.__init__(self, message, cause, http_response) + + +class EmailNotFoundError(exceptions.NotFoundError): + """No user record found for the specified email.""" + + default_message = 'No user record found for the given email' + + def __init__(self, message, cause=None, http_response=None): + exceptions.NotFoundError.__init__(self, message, cause, http_response) + + +class TenantNotFoundError(exceptions.NotFoundError): + """No tenant found for the specified identifier.""" + + default_message = 'No tenant found for the given identifier' + + def __init__(self, message, cause=None, http_response=None): + exceptions.NotFoundError.__init__(self, message, cause, http_response) + + +class TenantIdMismatchError(exceptions.InvalidArgumentError): + """Missing or invalid tenant ID field in the given JWT.""" + + def __init__(self, message): + exceptions.InvalidArgumentError.__init__(self, message) + + +class ConfigurationNotFoundError(exceptions.NotFoundError): + """No auth provider found for the specified identifier.""" + + default_message = 'No auth provider found for the given identifier' + + def __init__(self, message, cause=None, http_response=None): + exceptions.NotFoundError.__init__(self, message, cause, http_response) + + +class UserDisabledError(exceptions.InvalidArgumentError): + """An operation failed due to a user record being disabled.""" + + default_message = 'The user record is disabled' + + def __init__(self, message, cause=None, http_response=None): + exceptions.InvalidArgumentError.__init__(self, message, cause, http_response) + + +class TooManyAttemptsTryLaterError(exceptions.ResourceExhaustedError): + """Rate limited because of too many attempts.""" + + def __init__(self, message, cause=None, http_response=None): + exceptions.ResourceExhaustedError.__init__(self, message, cause, http_response) + + +class ResetPasswordExceedLimitError(exceptions.ResourceExhaustedError): + """Reset password emails exceeded their limits.""" + + def __init__(self, message, cause=None, http_response=None): + exceptions.ResourceExhaustedError.__init__(self, message, cause, http_response) + + +_CODE_TO_EXC_TYPE = { + 'CONFIGURATION_NOT_FOUND': ConfigurationNotFoundError, + 'DUPLICATE_EMAIL': EmailAlreadyExistsError, + 'DUPLICATE_LOCAL_ID': UidAlreadyExistsError, + 'EMAIL_EXISTS': EmailAlreadyExistsError, + 'EMAIL_NOT_FOUND': EmailNotFoundError, + 'INSUFFICIENT_PERMISSION': InsufficientPermissionError, + 'INVALID_DYNAMIC_LINK_DOMAIN': InvalidDynamicLinkDomainError, + 'INVALID_HOSTING_LINK_DOMAIN': InvalidHostingLinkDomainError, + 'INVALID_ID_TOKEN': InvalidIdTokenError, + 'PHONE_NUMBER_EXISTS': PhoneNumberAlreadyExistsError, + 'TENANT_NOT_FOUND': TenantNotFoundError, + 'USER_NOT_FOUND': UserNotFoundError, + 'TOO_MANY_ATTEMPTS_TRY_LATER': TooManyAttemptsTryLaterError, + 'RESET_PASSWORD_EXCEED_LIMIT': ResetPasswordExceedLimitError, +} + + +def handle_auth_backend_error(error): + """Converts a requests error received from the Firebase Auth service into a FirebaseError.""" + if error.response is None: + return _utils.handle_requests_error(error) + + code, custom_message = _parse_error_body(error.response) + if not code: + msg = f'Unexpected error response: {error.response.content.decode()}' + return _utils.handle_requests_error(error, message=msg) + + exc_type = _CODE_TO_EXC_TYPE.get(code) + msg = _build_error_message(code, exc_type, custom_message) + if not exc_type: + return _utils.handle_requests_error(error, message=msg) + + return exc_type(msg, cause=error, http_response=error.response) + + +def _parse_error_body(response): + """Parses the given error response to extract Auth error code and message.""" + error_dict = {} + try: + parsed_body = response.json() + if isinstance(parsed_body, dict): + error_dict = parsed_body.get('error', {}) + except ValueError: + pass + + # Auth error response format: {"error": {"message": "AUTH_ERROR_CODE: Optional text"}} + code = error_dict.get('message') if isinstance(error_dict, dict) else None + custom_message = None + if code: + separator = code.find(':') + if separator != -1: + custom_message = code[separator + 1:].strip() + code = code[:separator].strip() + + return code, custom_message + + +def _build_error_message(code, exc_type, custom_message): + default_message = exc_type.default_message if ( + exc_type and hasattr(exc_type, 'default_message')) else 'Error while calling Auth service' + ext = f' {custom_message}' if custom_message else '' + return f'{default_message} ({code}).{ext}' diff --git a/firebase_admin/_http_client.py b/firebase_admin/_http_client.py new file mode 100644 index 000000000..6d2582291 --- /dev/null +++ b/firebase_admin/_http_client.py @@ -0,0 +1,357 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Internal HTTP client module. + +This module provides utilities for making HTTP calls using the requests library. +""" + +from __future__ import annotations +import logging +from typing import Any, Dict, Generator, Optional, Tuple, Union +import httpx +import requests.adapters +from requests.packages.urllib3.util import retry # pylint: disable=import-error +from google.auth import credentials +from google.auth import transport +from google.auth.transport import requests as google_auth_requests + +from firebase_admin import _utils +from firebase_admin._retry import HttpxRetry, HttpxRetryTransport + +logger = logging.getLogger(__name__) + +if hasattr(retry.Retry.DEFAULT, 'allowed_methods'): + _ANY_METHOD = {'allowed_methods': None} +else: + _ANY_METHOD = {'method_whitelist': None} +# Default retry configuration: Retries once on low-level connection and socket read errors. +# Retries up to 4 times on HTTP 500 and 503 errors, with exponential backoff. Returns the +# last response upon exhausting all retries. +DEFAULT_RETRY_CONFIG = retry.Retry( + connect=1, read=1, status=4, status_forcelist=[500, 503], + raise_on_status=False, backoff_factor=0.5, **_ANY_METHOD) + +DEFAULT_HTTPX_RETRY_CONFIG = HttpxRetry( + max_retries=4, status_forcelist=[500, 503], backoff_factor=0.5) + + +DEFAULT_TIMEOUT_SECONDS = 120 + +METRICS_HEADERS = { + 'x-goog-api-client': _utils.get_metrics_header(), +} + +class HttpClient: + """Base HTTP client used to make HTTP calls. + + HttpClient maintains an HTTP session, and handles request authentication and retries if + necessary. + """ + + def __init__( + self, credential=None, session=None, base_url='', headers=None, + retries=DEFAULT_RETRY_CONFIG, timeout=DEFAULT_TIMEOUT_SECONDS): + """Creates a new HttpClient instance from the provided arguments. + + If a credential is provided, initializes a new HTTP session authorized with it. If neither + a credential nor a session is provided, initializes a new unauthorized session. + + Args: + credential: A Google credential that can be used to authenticate requests (optional). + session: A custom HTTP session (optional). + base_url: A URL prefix to be added to all outgoing requests (optional). + headers: A map of headers to be added to all outgoing requests (optional). + retries: A urllib retry configuration. Default settings would retry once for low-level + connection and socket read errors, and up to 4 times for HTTP 500 and 503 errors. + Pass a False value to disable retries (optional). + timeout: HTTP timeout in seconds. Defaults to 120 seconds when not specified. Set to + None to disable timeouts (optional). + """ + if credential: + self._session = transport.requests.AuthorizedSession(credential) + elif session: + self._session = session + else: + self._session = requests.Session() # pylint: disable=redefined-variable-type + + if headers: + self._session.headers.update(headers) + if retries: + self._session.mount('http://', requests.adapters.HTTPAdapter(max_retries=retries)) + self._session.mount('https://', requests.adapters.HTTPAdapter(max_retries=retries)) + self._base_url = base_url + self._timeout = timeout + + @property + def session(self): + return self._session + + @property + def base_url(self): + return self._base_url + + @property + def timeout(self): + return self._timeout + + def parse_body(self, resp): + raise NotImplementedError + + def request(self, method, url, **kwargs): + """Makes an HTTP call using the Python requests library. + + This is the sole entry point to the requests library. All other helper methods in this + class call this method to send HTTP requests out. Refer to + http://docs.python-requests.org/en/master/api/ for more information on supported options + and features. + + Args: + method: HTTP method name as a string (e.g. get, post). + url: URL of the remote endpoint. + **kwargs: An additional set of keyword arguments to be passed into the requests API + (e.g. json, params, timeout). + + Returns: + Response: An HTTP response object. + + Raises: + RequestException: Any requests exceptions encountered while making the HTTP call. + """ + if 'timeout' not in kwargs: + kwargs['timeout'] = self.timeout + kwargs.setdefault('headers', {}).update(METRICS_HEADERS) + resp = self._session.request(method, self.base_url + url, **kwargs) + resp.raise_for_status() + return resp + + def headers(self, method, url, **kwargs): + resp = self.request(method, url, **kwargs) + return resp.headers + + def body_and_response(self, method, url, **kwargs): + resp = self.request(method, url, **kwargs) + return self.parse_body(resp), resp + + def body(self, method, url, **kwargs): + resp = self.request(method, url, **kwargs) + return self.parse_body(resp) + + def headers_and_body(self, method, url, **kwargs): + resp = self.request(method, url, **kwargs) + return resp.headers, self.parse_body(resp) + + def close(self): + self._session.close() + self._session = None + +class JsonHttpClient(HttpClient): + """An HTTP client that parses response messages as JSON.""" + + def __init__(self, **kwargs): + HttpClient.__init__(self, **kwargs) + + def parse_body(self, resp): + return resp.json() + +class GoogleAuthCredentialFlow(httpx.Auth): + """Google Auth Credential Auth Flow""" + def __init__(self, credential: credentials.Credentials): + self._credential = credential + self._max_refresh_attempts = 2 + self._refresh_status_codes = (401,) + + def apply_auth_headers( + self, + request: httpx.Request, + auth_request: google_auth_requests.Request + ) -> None: + """A helper function that refreshes credentials if needed and mutates the request headers + to contain access token and any other Google Auth headers.""" + + logger.debug( + 'Attempting to apply auth headers. Credential validity before: %s', + self._credential.valid + ) + self._credential.before_request( + auth_request, request.method, str(request.url), request.headers + ) + logger.debug('Auth headers applied. Credential validity after: %s', self._credential.valid) + + def auth_flow(self, request: httpx.Request) -> Generator[httpx.Request, httpx.Response, None]: + _original_headers = request.headers.copy() + _credential_refresh_attempt = 0 + + # Create a Google auth request object to be used for refreshing credentials + auth_request = google_auth_requests.Request() + + while True: + # Copy original headers for each attempt + request.headers = _original_headers.copy() + + # Apply auth headers (which might include an implicit refresh if token is expired) + self.apply_auth_headers(request, auth_request) + + logger.debug( + 'Dispatching request, attempt %d of %d', + _credential_refresh_attempt, self._max_refresh_attempts + ) + response: httpx.Response = yield request + + if response.status_code in self._refresh_status_codes: + if _credential_refresh_attempt < self._max_refresh_attempts: + logger.debug( + 'Received status %d. Attempting explicit credential refresh. \ + Attempt %d of %d.', + response.status_code, + _credential_refresh_attempt + 1, + self._max_refresh_attempts + ) + # Explicitly force a credentials refresh + self._credential.refresh(auth_request) + _credential_refresh_attempt += 1 + else: + logger.debug( + 'Received status %d, but max auth refresh attempts (%d) reached. \ + Returning last response.', + response.status_code, self._max_refresh_attempts + ) + break + else: + # Status code is not one that requires a refresh, so break and return response + logger.debug( + 'Status code %d does not require refresh. Returning response.', + response.status_code + ) + break + # The last yielded response is automatically returned by httpx's auth flow. + +class HttpxAsyncClient(): + """Async HTTP client used to make HTTP/2 calls using HTTPX. + + HttpxAsyncClient maintains an async HTTPX client, handles request authentication, and retries + if necessary. + """ + def __init__( + self, + credential: Optional[credentials.Credentials] = None, + base_url: str = '', + headers: Optional[Union[httpx.Headers, Dict[str, str]]] = None, + retry_config: HttpxRetry = DEFAULT_HTTPX_RETRY_CONFIG, + timeout: int = DEFAULT_TIMEOUT_SECONDS, + http2: bool = True + ) -> None: + """Creates a new HttpxAsyncClient instance from the provided arguments. + + If a credential is provided, initializes a new async HTTPX client authorized with it. + Otherwise, initializes a new unauthorized async HTTPX client. + + Args: + credential: A Google credential that can be used to authenticate requests (optional). + base_url: A URL prefix to be added to all outgoing requests (optional). + headers: A map of headers to be added to all outgoing requests (optional). + retry_config: A HttpxRetry configuration. Default settings would retry up to 4 times for + HTTP 500 and 503 errors (optional). + timeout: HTTP timeout in seconds. Defaults to 120 seconds when not specified (optional). + http2: A boolean indicating if HTTP/2 support should be enabled. Defaults to `True` when + not specified (optional). + """ + self._base_url = base_url + self._timeout = timeout + self._headers = {**headers, **METRICS_HEADERS} if headers else {**METRICS_HEADERS} + self._retry_config = retry_config + + # Only set up retries on urls starting with 'http://' and 'https://' + self._mounts = { + 'http://': HttpxRetryTransport(retry=self._retry_config, http2=http2), + 'https://': HttpxRetryTransport(retry=self._retry_config, http2=http2) + } + + if credential: + self._async_client = httpx.AsyncClient( + http2=http2, + timeout=self._timeout, + headers=self._headers, + auth=GoogleAuthCredentialFlow(credential), # Add auth flow for credentials. + mounts=self._mounts + ) + else: + self._async_client = httpx.AsyncClient( + http2=http2, + timeout=self._timeout, + headers=self._headers, + mounts=self._mounts + ) + + @property + def base_url(self): + return self._base_url + + @property + def timeout(self): + return self._timeout + + @property + def async_client(self): + return self._async_client + + async def request(self, method: str, url: str, **kwargs: Any) -> httpx.Response: + """Makes an HTTP call using the HTTPX library. + + This is the sole entry point to the HTTPX library. All other helper methods in this + class call this method to send HTTP requests out. Refer to + https://www.python-httpx.org/api/ for more information on supported options + and features. + + Args: + method: HTTP method name as a string (e.g. get, post). + url: URL of the remote endpoint. + **kwargs: An additional set of keyword arguments to be passed into the HTTPX API + (e.g. json, params, timeout). + + Returns: + Response: An HTTPX response object. + + Raises: + HTTPError: Any HTTPX exceptions encountered while making the HTTP call. + RequestException: Any requests exceptions encountered while making the HTTP call. + """ + if 'timeout' not in kwargs: + kwargs['timeout'] = self.timeout + resp = await self._async_client.request(method, self.base_url + url, **kwargs) + return resp.raise_for_status() + + async def headers(self, method: str, url: str, **kwargs: Any) -> httpx.Headers: + resp = await self.request(method, url, **kwargs) + return resp.headers + + async def body_and_response( + self, method: str, url: str, **kwargs: Any) -> Tuple[Any, httpx.Response]: + resp = await self.request(method, url, **kwargs) + return self.parse_body(resp), resp + + async def body(self, method: str, url: str, **kwargs: Any) -> Any: + resp = await self.request(method, url, **kwargs) + return self.parse_body(resp) + + async def headers_and_body( + self, method: str, url: str, **kwargs: Any) -> Tuple[httpx.Headers, Any]: + resp = await self.request(method, url, **kwargs) + return resp.headers, self.parse_body(resp) + + def parse_body(self, resp: httpx.Response) -> Any: + return resp.json() + + async def aclose(self) -> None: + await self._async_client.aclose() diff --git a/firebase_admin/_messaging_encoder.py b/firebase_admin/_messaging_encoder.py new file mode 100644 index 000000000..4c0c6daa4 --- /dev/null +++ b/firebase_admin/_messaging_encoder.py @@ -0,0 +1,722 @@ +# Copyright 2019 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Encoding and validation utils for the messaging (FCM) module.""" + +import datetime +import json +import math +import numbers +import re + +from firebase_admin import _messaging_utils + + +class Message: + """A message that can be sent via Firebase Cloud Messaging. + + Contains payload information as well as recipient information. In particular, the message must + contain exactly one of token, topic or condition fields. + + Args: + data: A dictionary of data fields (optional). All keys and values in the dictionary must be + strings. + notification: An instance of ``messaging.Notification`` (optional). + android: An instance of ``messaging.AndroidConfig`` (optional). + webpush: An instance of ``messaging.WebpushConfig`` (optional). + apns: An instance of ``messaging.ApnsConfig`` (optional). + fcm_options: An instance of ``messaging.FCMOptions`` (optional). + token: The registration token of the device to which the message should be sent (optional). + topic: Name of the FCM topic to which the message should be sent (optional). Topic name + may contain the ``/topics/`` prefix. + condition: The FCM condition to which the message should be sent (optional). + """ + + def __init__(self, data=None, notification=None, android=None, webpush=None, apns=None, + fcm_options=None, token=None, topic=None, condition=None): + self.data = data + self.notification = notification + self.android = android + self.webpush = webpush + self.apns = apns + self.fcm_options = fcm_options + self.token = token + self.topic = topic + self.condition = condition + + def __str__(self): + return json.dumps(self, cls=MessageEncoder, sort_keys=True) + + +class MulticastMessage: + """A message that can be sent to multiple tokens via Firebase Cloud Messaging. + + Args: + tokens: A list of registration tokens of targeted devices. + data: A dictionary of data fields (optional). All keys and values in the dictionary must be + strings. + notification: An instance of ``messaging.Notification`` (optional). + android: An instance of ``messaging.AndroidConfig`` (optional). + webpush: An instance of ``messaging.WebpushConfig`` (optional). + apns: An instance of ``messaging.ApnsConfig`` (optional). + fcm_options: An instance of ``messaging.FCMOptions`` (optional). + """ + def __init__(self, tokens, data=None, notification=None, android=None, webpush=None, apns=None, + fcm_options=None): + _Validators.check_string_list('MulticastMessage.tokens', tokens) + if len(tokens) > 500: + raise ValueError('MulticastMessage.tokens must not contain more than 500 tokens.') + self.tokens = tokens + self.data = data + self.notification = notification + self.android = android + self.webpush = webpush + self.apns = apns + self.fcm_options = fcm_options + + +class _Validators: + """A collection of data validation utilities. + + Methods provided in this class raise ``ValueErrors`` if any validations fail. + """ + + @classmethod + def check_string(cls, label, value, non_empty=False): + """Checks if the given value is a string.""" + if value is None: + return None + if not isinstance(value, str): + if non_empty: + raise ValueError(f'{label} must be a non-empty string.') + raise ValueError(f'{label} must be a string.') + if non_empty and not value: + raise ValueError(f'{label} must be a non-empty string.') + return value + + @classmethod + def check_number(cls, label, value): + if value is None: + return None + if not isinstance(value, numbers.Number): + raise ValueError(f'{label} must be a number.') + return value + + @classmethod + def check_string_dict(cls, label, value): + """Checks if the given value is a dictionary comprised only of string keys and values.""" + if value is None or value == {}: + return None + if not isinstance(value, dict): + raise ValueError(f'{label} must be a dictionary.') + non_str = [k for k in value if not isinstance(k, str)] + if non_str: + raise ValueError(f'{label} must not contain non-string keys.') + non_str = [v for v in value.values() if not isinstance(v, str)] + if non_str: + raise ValueError(f'{label} must not contain non-string values.') + return value + + @classmethod + def check_string_list(cls, label, value): + """Checks if the given value is a list comprised only of strings.""" + if value is None or value == []: + return None + if not isinstance(value, list): + raise ValueError(f'{label} must be a list of strings.') + non_str = [k for k in value if not isinstance(k, str)] + if non_str: + raise ValueError(f'{label} must not contain non-string values.') + return value + + @classmethod + def check_number_list(cls, label, value): + """Checks if the given value is a list comprised only of numbers.""" + if value is None or value == []: + return None + if not isinstance(value, list): + raise ValueError(f'{label} must be a list of numbers.') + non_number = [k for k in value if not isinstance(k, numbers.Number)] + if non_number: + raise ValueError(f'{label} must not contain non-number values.') + return value + + @classmethod + def check_analytics_label(cls, label, value): + """Checks if the given value is a valid analytics label.""" + value = _Validators.check_string(label, value) + if value is not None and not re.match(r'^[a-zA-Z0-9-_.~%]{1,50}$', value): + raise ValueError(f'Malformed {label}.') + return value + + @classmethod + def check_boolean(cls, label, value): + """Checks if the given value is boolean.""" + if value is None: + return None + if not isinstance(value, bool): + raise ValueError(f'{label} must be a boolean.') + return value + + @classmethod + def check_datetime(cls, label, value): + """Checks if the given value is a datetime.""" + if value is None: + return None + if not isinstance(value, datetime.datetime): + raise ValueError(f'{label} must be a datetime.') + return value + + +class MessageEncoder(json.JSONEncoder): + """A custom ``JSONEncoder`` implementation for serializing Message instances into JSON.""" + + @classmethod + def remove_null_values(cls, dict_value): + return {k: v for k, v in dict_value.items() if v not in [None, [], {}]} + + @classmethod + def encode_android(cls, android): + """Encodes an ``AndroidConfig`` instance into JSON.""" + if android is None: + return None + if not isinstance(android, _messaging_utils.AndroidConfig): + raise ValueError('Message.android must be an instance of AndroidConfig class.') + result = { + 'collapse_key': _Validators.check_string( + 'AndroidConfig.collapse_key', android.collapse_key), + 'data': _Validators.check_string_dict( + 'AndroidConfig.data', android.data), + 'notification': cls.encode_android_notification(android.notification), + 'priority': _Validators.check_string( + 'AndroidConfig.priority', android.priority, non_empty=True), + 'restricted_package_name': _Validators.check_string( + 'AndroidConfig.restricted_package_name', android.restricted_package_name), + 'ttl': cls.encode_ttl(android.ttl), + 'fcm_options': cls.encode_android_fcm_options(android.fcm_options), + 'direct_boot_ok': _Validators.check_boolean( + 'AndroidConfig.direct_boot_ok', android.direct_boot_ok), + 'bandwidth_constrained_ok': _Validators.check_boolean( + 'AndroidConfig.bandwidth_constrained_ok', android.bandwidth_constrained_ok), + 'restricted_satellite_ok': _Validators.check_boolean( + 'AndroidConfig.restricted_satellite_ok', android.restricted_satellite_ok), + } + result = cls.remove_null_values(result) + priority = result.get('priority') + if priority and priority not in ('high', 'normal'): + raise ValueError('AndroidConfig.priority must be "high" or "normal".') + return result + + @classmethod + def encode_android_fcm_options(cls, fcm_options): + """Encodes an ``AndroidFCMOptions`` instance into JSON.""" + if fcm_options is None: + return None + if not isinstance(fcm_options, _messaging_utils.AndroidFCMOptions): + raise ValueError('AndroidConfig.fcm_options must be an instance of ' + 'AndroidFCMOptions class.') + result = { + 'analytics_label': _Validators.check_analytics_label( + 'AndroidFCMOptions.analytics_label', fcm_options.analytics_label), + } + result = cls.remove_null_values(result) + return result + + @classmethod + def encode_ttl(cls, ttl): + """Encodes an ``AndroidConfig`` ``TTL`` duration into a string.""" + if ttl is None: + return None + if isinstance(ttl, numbers.Number): + ttl = datetime.timedelta(seconds=ttl) + if not isinstance(ttl, datetime.timedelta): + raise ValueError('AndroidConfig.ttl must be a duration in seconds or an instance of ' + 'datetime.timedelta.') + total_seconds = ttl.total_seconds() + if total_seconds < 0: + raise ValueError('AndroidConfig.ttl must not be negative.') + seconds = int(math.floor(total_seconds)) + nanos = int((total_seconds - seconds) * 1e9) + if nanos: + return f'{seconds}.{str(nanos).zfill(9)}s' + return f'{seconds}s' + + @classmethod + def encode_milliseconds(cls, label, msec): + """Encodes a duration in milliseconds into a string.""" + if msec is None: + return None + if isinstance(msec, numbers.Number): + msec = datetime.timedelta(milliseconds=msec) + if not isinstance(msec, datetime.timedelta): + raise ValueError( + f'{label} must be a duration in milliseconds or an instance of datetime.timedelta.') + total_seconds = msec.total_seconds() + if total_seconds < 0: + raise ValueError(f'{label} must not be negative.') + seconds = int(math.floor(total_seconds)) + nanos = int((total_seconds - seconds) * 1e9) + if nanos: + return f'{seconds}.{str(nanos).zfill(9)}s' + return f'{seconds}s' + + @classmethod + def encode_android_notification(cls, notification): + """Encodes an ``AndroidNotification`` instance into JSON.""" + if notification is None: + return None + if not isinstance(notification, _messaging_utils.AndroidNotification): + raise ValueError('AndroidConfig.notification must be an instance of ' + 'AndroidNotification class.') + result = { + 'body': _Validators.check_string( + 'AndroidNotification.body', notification.body), + 'body_loc_args': _Validators.check_string_list( + 'AndroidNotification.body_loc_args', notification.body_loc_args), + 'body_loc_key': _Validators.check_string( + 'AndroidNotification.body_loc_key', notification.body_loc_key), + 'click_action': _Validators.check_string( + 'AndroidNotification.click_action', notification.click_action), + 'color': _Validators.check_string( + 'AndroidNotification.color', notification.color, non_empty=True), + 'icon': _Validators.check_string( + 'AndroidNotification.icon', notification.icon), + 'sound': _Validators.check_string( + 'AndroidNotification.sound', notification.sound), + 'tag': _Validators.check_string( + 'AndroidNotification.tag', notification.tag), + 'title': _Validators.check_string( + 'AndroidNotification.title', notification.title), + 'title_loc_args': _Validators.check_string_list( + 'AndroidNotification.title_loc_args', notification.title_loc_args), + 'title_loc_key': _Validators.check_string( + 'AndroidNotification.title_loc_key', notification.title_loc_key), + 'channel_id': _Validators.check_string( + 'AndroidNotification.channel_id', notification.channel_id), + 'image': _Validators.check_string( + 'image', notification.image), + 'ticker': _Validators.check_string( + 'AndroidNotification.ticker', notification.ticker), + 'sticky': notification.sticky, + 'event_time': _Validators.check_datetime( + 'AndroidNotification.event_timestamp', notification.event_timestamp), + 'local_only': notification.local_only, + 'notification_priority': _Validators.check_string( + 'AndroidNotification.priority', notification.priority, non_empty=True), + 'vibrate_timings': _Validators.check_number_list( + 'AndroidNotification.vibrate_timings_millis', notification.vibrate_timings_millis), + 'default_vibrate_timings': notification.default_vibrate_timings, + 'default_sound': notification.default_sound, + 'default_light_settings': notification.default_light_settings, + 'light_settings': cls.encode_light_settings(notification.light_settings), + 'visibility': _Validators.check_string( + 'AndroidNotification.visibility', notification.visibility, non_empty=True), + 'notification_count': _Validators.check_number( + 'AndroidNotification.notification_count', notification.notification_count), + 'proxy': _Validators.check_string( + 'AndroidNotification.proxy', notification.proxy, non_empty=True) + } + result = cls.remove_null_values(result) + color = result.get('color') + if color and not re.match(r'^#[0-9a-fA-F]{6}$', color): + raise ValueError( + 'AndroidNotification.color must be in the form #RRGGBB.') + if result.get('body_loc_args') and not result.get('body_loc_key'): + raise ValueError( + 'AndroidNotification.body_loc_key is required when specifying body_loc_args.') + if result.get('title_loc_args') and not result.get('title_loc_key'): + raise ValueError( + 'AndroidNotification.title_loc_key is required when specifying title_loc_args.') + + event_time = result.get('event_time') + if event_time: + # if the datetime instance is not naive (tzinfo is present), convert to UTC + # otherwise (tzinfo is None) assume the datetime instance is already in UTC + if event_time.tzinfo is not None: + event_time = event_time.astimezone(datetime.timezone.utc) + result['event_time'] = event_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + + priority = result.get('notification_priority') + if priority: + if priority not in ('min', 'low', 'default', 'high', 'max'): + raise ValueError('AndroidNotification.priority must be "default", "min", "low", ' + '"high" or "max".') + result['notification_priority'] = 'PRIORITY_' + priority.upper() + + visibility = result.get('visibility') + if visibility: + if visibility not in ('private', 'public', 'secret'): + raise ValueError( + 'AndroidNotification.visibility must be "private", "public" or "secret".') + result['visibility'] = visibility.upper() + + vibrate_timings_millis = result.get('vibrate_timings') + if vibrate_timings_millis: + vibrate_timing_strings = [] + for msec in vibrate_timings_millis: + formated_string = cls.encode_milliseconds( + 'AndroidNotification.vibrate_timings_millis', msec) + vibrate_timing_strings.append(formated_string) + result['vibrate_timings'] = vibrate_timing_strings + + proxy = result.get('proxy') + if proxy: + if proxy not in ('allow', 'deny', 'if_priority_lowered'): + raise ValueError( + 'AndroidNotification.proxy must be "allow", "deny" or "if_priority_lowered".') + result['proxy'] = proxy.upper() + return result + + @classmethod + def encode_light_settings(cls, light_settings): + """Encodes a ``LightSettings`` instance into JSON.""" + if light_settings is None: + return None + if not isinstance(light_settings, _messaging_utils.LightSettings): + raise ValueError( + 'AndroidNotification.light_settings must be an instance of LightSettings class.') + result = { + 'color': _Validators.check_string( + 'LightSettings.color', light_settings.color, non_empty=True), + 'light_on_duration': cls.encode_milliseconds( + 'LightSettings.light_on_duration_millis', light_settings.light_on_duration_millis), + 'light_off_duration': cls.encode_milliseconds( + 'LightSettings.light_off_duration_millis', + light_settings.light_off_duration_millis), + } + result = cls.remove_null_values(result) + light_on_duration = result.get('light_on_duration') + if not light_on_duration: + raise ValueError( + 'LightSettings.light_on_duration_millis is required.') + + light_off_duration = result.get('light_off_duration') + if not light_off_duration: + raise ValueError( + 'LightSettings.light_off_duration_millis is required.') + + color = result.get('color') + if not color: + raise ValueError('LightSettings.color is required.') + if not re.match(r'^#[0-9a-fA-F]{6}$', color) and not re.match(r'^#[0-9a-fA-F]{8}$', color): + raise ValueError( + 'LightSettings.color must be in the form #RRGGBB or #RRGGBBAA.') + if len(color) == 7: + color = color+'FF' + rgba = [int(color[i:i + 2], 16) / 255.0 for i in (1, 3, 5, 7)] + result['color'] = {'red': rgba[0], 'green': rgba[1], + 'blue': rgba[2], 'alpha': rgba[3]} + return result + + @classmethod + def encode_webpush(cls, webpush): + """Encodes a ``WebpushConfig`` instance into JSON.""" + if webpush is None: + return None + if not isinstance(webpush, _messaging_utils.WebpushConfig): + raise ValueError('Message.webpush must be an instance of WebpushConfig class.') + result = { + 'data': _Validators.check_string_dict( + 'WebpushConfig.data', webpush.data), + 'headers': _Validators.check_string_dict( + 'WebpushConfig.headers', webpush.headers), + 'notification': cls.encode_webpush_notification(webpush.notification), + 'fcm_options': cls.encode_webpush_fcm_options(webpush.fcm_options), + } + return cls.remove_null_values(result) + + @classmethod + def encode_webpush_notification(cls, notification): + """Encodes a ``WebpushNotification`` instance into JSON.""" + if notification is None: + return None + if not isinstance(notification, _messaging_utils.WebpushNotification): + raise ValueError('WebpushConfig.notification must be an instance of ' + 'WebpushNotification class.') + result = { + 'actions': cls.encode_webpush_notification_actions(notification.actions), + 'badge': _Validators.check_string( + 'WebpushNotification.badge', notification.badge), + 'body': _Validators.check_string( + 'WebpushNotification.body', notification.body), + 'data': notification.data, + 'dir': _Validators.check_string( + 'WebpushNotification.direction', notification.direction), + 'icon': _Validators.check_string( + 'WebpushNotification.icon', notification.icon), + 'image': _Validators.check_string( + 'WebpushNotification.image', notification.image), + 'lang': _Validators.check_string( + 'WebpushNotification.language', notification.language), + 'renotify': notification.renotify, + 'requireInteraction': notification.require_interaction, + 'silent': notification.silent, + 'tag': _Validators.check_string( + 'WebpushNotification.tag', notification.tag), + 'timestamp': _Validators.check_number( + 'WebpushNotification.timestamp_millis', notification.timestamp_millis), + 'title': _Validators.check_string( + 'WebpushNotification.title', notification.title), + 'vibrate': notification.vibrate, + } + direction = result.get('dir') + if direction and direction not in ('auto', 'ltr', 'rtl'): + raise ValueError('WebpushNotification.direction must be "auto", "ltr" or "rtl".') + if notification.custom_data is not None: + if not isinstance(notification.custom_data, dict): + raise ValueError('WebpushNotification.custom_data must be a dict.') + for key, value in notification.custom_data.items(): + if key in result: + raise ValueError( + f'Multiple specifications for {key} in WebpushNotification.') + result[key] = value + return cls.remove_null_values(result) + + @classmethod + def encode_webpush_notification_actions(cls, actions): + """Encodes a list of ``WebpushNotificationActions`` into JSON.""" + if actions is None: + return None + if not isinstance(actions, list): + raise ValueError('WebpushConfig.notification.actions must be a list of ' + 'WebpushNotificationAction instances.') + results = [] + for action in actions: + if not isinstance(action, _messaging_utils.WebpushNotificationAction): + raise ValueError('WebpushConfig.notification.actions must be a list of ' + 'WebpushNotificationAction instances.') + result = { + 'action': _Validators.check_string( + 'WebpushNotificationAction.action', action.action), + 'title': _Validators.check_string( + 'WebpushNotificationAction.title', action.title), + 'icon': _Validators.check_string( + 'WebpushNotificationAction.icon', action.icon), + } + results.append(cls.remove_null_values(result)) + return results + + @classmethod + def encode_webpush_fcm_options(cls, options): + """Encodes a ``WebpushFCMOptions`` instance into JSON.""" + if options is None: + return None + result = { + 'link': _Validators.check_string('WebpushConfig.fcm_options.link', options.link), + } + result = cls.remove_null_values(result) + link = result.get('link') + if link is not None and not link.startswith('https://'): + raise ValueError('WebpushFCMOptions.link must be a HTTPS URL.') + return result + + @classmethod + def encode_apns(cls, apns): + """Encodes an ``APNSConfig`` instance into JSON.""" + if apns is None: + return None + if not isinstance(apns, _messaging_utils.APNSConfig): + raise ValueError('Message.apns must be an instance of APNSConfig class.') + result = { + 'headers': _Validators.check_string_dict( + 'APNSConfig.headers', apns.headers), + 'payload': cls.encode_apns_payload(apns.payload), + 'fcm_options': cls.encode_apns_fcm_options(apns.fcm_options), + 'live_activity_token': _Validators.check_string( + 'APNSConfig.live_activity_token', apns.live_activity_token), + } + return cls.remove_null_values(result) + + @classmethod + def encode_apns_payload(cls, payload): + """Encodes an ``APNSPayload`` instance into JSON.""" + if payload is None: + return None + if not isinstance(payload, _messaging_utils.APNSPayload): + raise ValueError('APNSConfig.payload must be an instance of APNSPayload class.') + result = { + 'aps': cls.encode_aps(payload.aps) + } + for key, value in payload.custom_data.items(): + result[key] = value + return cls.remove_null_values(result) + + @classmethod + def encode_apns_fcm_options(cls, fcm_options): + """Encodes an ``APNSFCMOptions`` instance into JSON.""" + if fcm_options is None: + return None + if not isinstance(fcm_options, _messaging_utils.APNSFCMOptions): + raise ValueError('APNSConfig.fcm_options must be an instance of APNSFCMOptions class.') + result = { + 'analytics_label': _Validators.check_analytics_label( + 'APNSFCMOptions.analytics_label', fcm_options.analytics_label), + 'image': _Validators.check_string('APNSFCMOptions.image', fcm_options.image) + } + result = cls.remove_null_values(result) + return result + + @classmethod + def encode_aps(cls, aps): + """Encodes an ``Aps`` instance into JSON.""" + if not isinstance(aps, _messaging_utils.Aps): + raise ValueError('APNSPayload.aps must be an instance of Aps class.') + result = { + 'alert': cls.encode_aps_alert(aps.alert), + 'badge': _Validators.check_number('Aps.badge', aps.badge), + 'sound': cls.encode_aps_sound(aps.sound), + 'category': _Validators.check_string('Aps.category', aps.category), + 'thread-id': _Validators.check_string('Aps.thread_id', aps.thread_id), + } + if aps.content_available is True: + result['content-available'] = 1 + if aps.mutable_content is True: + result['mutable-content'] = 1 + if aps.custom_data is not None: + if not isinstance(aps.custom_data, dict): + raise ValueError('Aps.custom_data must be a dict.') + for key, val in aps.custom_data.items(): + _Validators.check_string('Aps.custom_data key', key) + if key in result: + raise ValueError(f'Multiple specifications for {key} in Aps.') + result[key] = val + return cls.remove_null_values(result) + + @classmethod + def encode_aps_sound(cls, sound): + """Encodes an APNs sound configuration into JSON.""" + if sound is None: + return None + if sound and isinstance(sound, str): + return sound + if not isinstance(sound, _messaging_utils.CriticalSound): + raise ValueError( + 'Aps.sound must be a non-empty string or an instance of CriticalSound class.') + result = { + 'name': _Validators.check_string('CriticalSound.name', sound.name, non_empty=True), + 'volume': _Validators.check_number('CriticalSound.volume', sound.volume), + } + if sound.critical: + result['critical'] = 1 + if not result['name']: + raise ValueError('CriticalSond.name must be a non-empty string.') + volume = result['volume'] + if volume is not None and (volume < 0 or volume > 1): + raise ValueError('CriticalSound.volume must be in the interval [0,1].') + return cls.remove_null_values(result) + + @classmethod + def encode_aps_alert(cls, alert): + """Encodes an ``ApsAlert`` instance into JSON.""" + if alert is None: + return None + if isinstance(alert, str): + return alert + if not isinstance(alert, _messaging_utils.ApsAlert): + raise ValueError('Aps.alert must be a string or an instance of ApsAlert class.') + result = { + 'title': _Validators.check_string('ApsAlert.title', alert.title), + 'subtitle': _Validators.check_string('ApsAlert.subtitle', alert.subtitle), + 'body': _Validators.check_string('ApsAlert.body', alert.body), + 'title-loc-key': _Validators.check_string( + 'ApsAlert.title_loc_key', alert.title_loc_key), + 'title-loc-args': _Validators.check_string_list( + 'ApsAlert.title_loc_args', alert.title_loc_args), + 'loc-key': _Validators.check_string( + 'ApsAlert.loc_key', alert.loc_key), + 'loc-args': _Validators.check_string_list( + 'ApsAlert.loc_args', alert.loc_args), + 'action-loc-key': _Validators.check_string( + 'ApsAlert.action_loc_key', alert.action_loc_key), + 'launch-image': _Validators.check_string( + 'ApsAlert.launch_image', alert.launch_image), + } + if result.get('loc-args') and not result.get('loc-key'): + raise ValueError( + 'ApsAlert.loc_key is required when specifying loc_args.') + if result.get('title-loc-args') and not result.get('title-loc-key'): + raise ValueError( + 'ApsAlert.title_loc_key is required when specifying title_loc_args.') + if alert.custom_data is not None: + if not isinstance(alert.custom_data, dict): + raise ValueError('ApsAlert.custom_data must be a dict.') + for key, val in alert.custom_data.items(): + _Validators.check_string('ApsAlert.custom_data key', key) + # allow specifying key override because Apple could update API so that key + # could have unexpected value type + result[key] = val + return cls.remove_null_values(result) + + @classmethod + def encode_notification(cls, notification): + """Encodes a ``Notification`` instance into JSON.""" + if notification is None: + return None + if not isinstance(notification, _messaging_utils.Notification): + raise ValueError('Message.notification must be an instance of Notification class.') + result = { + 'body': _Validators.check_string('Notification.body', notification.body), + 'title': _Validators.check_string('Notification.title', notification.title), + 'image': _Validators.check_string('Notification.image', notification.image) + } + return cls.remove_null_values(result) + + @classmethod + def sanitize_topic_name(cls, topic): + """Removes the /topics/ prefix from the topic name, if present.""" + if not topic: + return None + prefix = '/topics/' + if topic.startswith(prefix): + topic = topic[len(prefix):] + # Checks for illegal characters and empty string. + if not re.match(r'^[a-zA-Z0-9-_\.~%]+$', topic): + raise ValueError('Malformed topic name.') + return topic + + def default(self, o): # pylint: disable=method-hidden + if not isinstance(o, Message): + return json.JSONEncoder.default(self, o) + result = { + 'android': MessageEncoder.encode_android(o.android), + 'apns': MessageEncoder.encode_apns(o.apns), + 'condition': _Validators.check_string( + 'Message.condition', o.condition, non_empty=True), + 'data': _Validators.check_string_dict('Message.data', o.data), + 'notification': MessageEncoder.encode_notification(o.notification), + 'token': _Validators.check_string('Message.token', o.token, non_empty=True), + 'topic': _Validators.check_string('Message.topic', o.topic, non_empty=True), + 'webpush': MessageEncoder.encode_webpush(o.webpush), + 'fcm_options': MessageEncoder.encode_fcm_options(o.fcm_options), + } + result['topic'] = MessageEncoder.sanitize_topic_name(result.get('topic')) + result = MessageEncoder.remove_null_values(result) + target_count = sum(t in result for t in ['token', 'topic', 'condition']) + if target_count != 1: + raise ValueError('Exactly one of token, topic or condition must be specified.') + return result + + @classmethod + def encode_fcm_options(cls, fcm_options): + """Encodes an ``FCMOptions`` instance into JSON.""" + if fcm_options is None: + return None + if not isinstance(fcm_options, _messaging_utils.FCMOptions): + raise ValueError('Message.fcm_options must be an instance of FCMOptions class.') + result = { + 'analytics_label': _Validators.check_analytics_label( + 'FCMOptions.analytics_label', fcm_options.analytics_label), + } + result = cls.remove_null_values(result) + return result diff --git a/firebase_admin/_messaging_utils.py b/firebase_admin/_messaging_utils.py new file mode 100644 index 000000000..773ed6057 --- /dev/null +++ b/firebase_admin/_messaging_utils.py @@ -0,0 +1,525 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Types and utilities used by the messaging (FCM) module.""" +from __future__ import annotations +import datetime +from typing import Dict, Optional, Union + +from firebase_admin import exceptions + + +class Notification: + """A notification that can be included in a message. + + Args: + title: Title of the notification (optional). + body: Body of the notification (optional). + image: Image url of the notification (optional) + """ + + def __init__(self, title=None, body=None, image=None): + self.title = title + self.body = body + self.image = image + + +class AndroidConfig: + """Android-specific options that can be included in a message. + + Args: + collapse_key: Collapse key string for the message (optional). This is an identifier for a + group of messages that can be collapsed, so that only the last message is sent when + delivery can be resumed. A maximum of 4 different collapse keys may be active at a + given time. + priority: Priority of the message (optional). Must be one of ``high`` or ``normal``. + ttl: The time-to-live duration of the message (optional). This can be specified + as a numeric seconds value or a ``datetime.timedelta`` instance. + restricted_package_name: The package name of the application where the registration tokens + must match in order to receive the message (optional). + data: A dictionary of data fields (optional). All keys and values in the dictionary must be + strings. When specified, overrides any data fields set via ``Message.data``. + notification: A ``messaging.AndroidNotification`` to be included in the message (optional). + fcm_options: A ``messaging.AndroidFCMOptions`` to be included in the message (optional). + direct_boot_ok: A boolean indicating whether messages will be allowed to be delivered to + the app while the device is in direct boot mode (optional). + bandwidth_constrained_ok: A boolean indicating whether messages will be allowed to be + delivered to the app while the device is on a bandwidth constrained network (optional). + restricted_satellite_ok: A boolean indicating whether messages will be allowed to be + delivered to the app while the device is on a restricted satellite network (optional). + """ + + def __init__( + self, + collapse_key: Optional[str] = None, + priority: Optional[str] = None, + ttl: Optional[Union[int, float, datetime.timedelta]] = None, + restricted_package_name: Optional[str] = None, + data: Optional[Dict[str, str]] = None, + notification: Optional[AndroidNotification] = None, + fcm_options: Optional[AndroidFCMOptions] = None, + direct_boot_ok: Optional[bool] = None, + bandwidth_constrained_ok: Optional[bool] = None, + restricted_satellite_ok: Optional[bool] = None + ): + self.collapse_key = collapse_key + self.priority = priority + self.ttl = ttl + self.restricted_package_name = restricted_package_name + self.data = data + self.notification = notification + self.fcm_options = fcm_options + self.direct_boot_ok = direct_boot_ok + self.bandwidth_constrained_ok = bandwidth_constrained_ok + self.restricted_satellite_ok = restricted_satellite_ok + + +class AndroidNotification: + """Android-specific notification parameters. + + Args: + title: Title of the notification (optional). If specified, overrides the title set via + ``messaging.Notification``. + body: Body of the notification (optional). If specified, overrides the body set via + ``messaging.Notification``. + icon: Icon of the notification (optional). + color: Color of the notification icon expressed in ``#rrggbb`` form (optional). + sound: Sound to be played when the device receives the notification (optional). This is + usually the file name of the sound resource. + tag: Tag of the notification (optional). This is an identifier used to replace existing + notifications in the notification drawer. If not specified, each request creates a new + notification. + click_action: The action associated with a user click on the notification (optional). If + specified, an activity with a matching intent filter is launched when a user clicks on + the notification. + body_loc_key: Key of the body string in the app's string resources to use to localize the + body text (optional). + body_loc_args: A list of resource keys that will be used in place of the format specifiers + in ``body_loc_key`` (optional). + title_loc_key: Key of the title string in the app's string resources to use to localize the + title text (optional). + title_loc_args: A list of resource keys that will be used in place of the format specifiers + in ``title_loc_key`` (optional). + channel_id: channel_id of the notification (optional). + image: Image url of the notification (optional). + ticker: Sets the ``ticker`` text, which is sent to accessibility services. Prior to API + level 21 (Lollipop), sets the text that is displayed in the status bar when the + notification first arrives (optional). + sticky: When set to ``False`` or unset, the notification is automatically dismissed when the + user clicks it in the panel. When set to ``True``, the notification persists even when + the user clicks it (optional). + event_timestamp: For notifications that inform users about events with an absolute time + reference, sets the time that the event in the notification occurred as a + ``datetime.datetime`` instance. If the ``datetime.datetime`` instance is naive, it + defaults to be in the UTC timezone. Notifications in the panel are sorted by this time + (optional). + local_only: Sets whether or not this notification is relevant only to the current device. + Some notifications can be bridged to other devices for remote display, such as a Wear OS + watch. This hint can be set to recommend this notification not be bridged (optional). + See Wear OS guides: + https://developer.android.com/training/wearables/notifications/bridger#existing-method-of-preventing-bridging + priority: Sets the relative priority for this notification. Low-priority notifications may + be hidden from the user in certain situations. Note this priority differs from + ``AndroidMessagePriority``. This priority is processed by the client after the message + has been delivered. Whereas ``AndroidMessagePriority`` is an FCM concept that controls + when the message is delivered (optional). Must be one of ``default``, ``min``, ``low``, + ``high``, ``max`` or ``normal``. + vibrate_timings_millis: Sets the vibration pattern to use. Pass in an array of milliseconds + to turn the vibrator on or off. The first value indicates the duration to wait before + turning the vibrator on. The next value indicates the duration to keep the vibrator on. + Subsequent values alternate between duration to turn the vibrator off and to turn the + vibrator on. If ``vibrate_timings`` is set and ``default_vibrate_timings`` is set to + ``True``, the default value is used instead of the user-specified ``vibrate_timings``. + default_vibrate_timings: If set to ``True``, use the Android framework's default vibrate + pattern for the notification (optional). Default values are specified in ``config.xml`` + https://android.googlesource.com/platform/frameworks/base/+/master/core/res/res/values/config.xml. + If ``default_vibrate_timings`` is set to ``True`` and ``vibrate_timings`` is also set, + the default value is used instead of the user-specified ``vibrate_timings``. + default_sound: If set to ``True``, use the Android framework's default sound for the + notification (optional). Default values are specified in ``config.xml`` + https://android.googlesource.com/platform/frameworks/base/+/master/core/res/res/values/config.xml + light_settings: Settings to control the notification's LED blinking rate and color if LED is + available on the device. The total blinking time is controlled by the OS (optional). + default_light_settings: If set to ``True``, use the Android framework's default LED light + settings for the notification. Default values are specified in ``config.xml`` + https://android.googlesource.com/platform/frameworks/base/+/master/core/res/res/values/config.xml. + If ``default_light_settings`` is set to ``True`` and ``light_settings`` is also set, the + user-specified ``light_settings`` is used instead of the default value. + visibility: Sets the visibility of the notification. Must be either ``private``, ``public``, + or ``secret``. If unspecified, it remains undefined in the Admin SDK, and defers to + the FCM backend's default mapping. + notification_count: Sets the number of items this notification represents. May be displayed + as a badge count for Launchers that support badging. See ``NotificationBadge`` + https://developer.android.com/training/notify-user/badges. For example, this might be + useful if you're using just one notification to represent multiple new messages but you + want the count here to represent the number of total new messages. If zero or + unspecified, systems that support badging use the default, which is to increment a + number displayed on the long-press menu each time a new notification arrives. + proxy: Sets if the notification may be proxied. Must be one of ``allow``, ``deny``, or + ``if_priority_lowered``. If unspecified, it remains undefined in the Admin SDK, and + defers to the FCM backend's default mapping. + + + """ + + def __init__(self, title=None, body=None, icon=None, color=None, sound=None, tag=None, + click_action=None, body_loc_key=None, body_loc_args=None, title_loc_key=None, + title_loc_args=None, channel_id=None, image=None, ticker=None, sticky=None, + event_timestamp=None, local_only=None, priority=None, vibrate_timings_millis=None, + default_vibrate_timings=None, default_sound=None, light_settings=None, + default_light_settings=None, visibility=None, notification_count=None, + proxy=None): + self.title = title + self.body = body + self.icon = icon + self.color = color + self.sound = sound + self.tag = tag + self.click_action = click_action + self.body_loc_key = body_loc_key + self.body_loc_args = body_loc_args + self.title_loc_key = title_loc_key + self.title_loc_args = title_loc_args + self.channel_id = channel_id + self.image = image + self.ticker = ticker + self.sticky = sticky + self.event_timestamp = event_timestamp + self.local_only = local_only + self.priority = priority + self.vibrate_timings_millis = vibrate_timings_millis + self.default_vibrate_timings = default_vibrate_timings + self.default_sound = default_sound + self.light_settings = light_settings + self.default_light_settings = default_light_settings + self.visibility = visibility + self.notification_count = notification_count + self.proxy = proxy + + +class LightSettings: + """Represents settings to control notification LED that can be included in a + ``messaging.AndroidNotification``. + + Args: + color: Sets the color of the LED in ``#rrggbb`` or ``#rrggbbaa`` format. + light_on_duration_millis: Along with ``light_off_duration``, defines the blink rate of LED + flashes. + light_off_duration_millis: Along with ``light_on_duration``, defines the blink rate of LED + flashes. + """ + def __init__(self, color, light_on_duration_millis, + light_off_duration_millis): + self.color = color + self.light_on_duration_millis = light_on_duration_millis + self.light_off_duration_millis = light_off_duration_millis + + +class AndroidFCMOptions: + """Options for features provided by the FCM SDK for Android. + + Args: + analytics_label: contains additional options for features provided by the FCM Android SDK + (optional). + """ + + def __init__(self, analytics_label=None): + self.analytics_label = analytics_label + + +class WebpushConfig: + """Webpush-specific options that can be included in a message. + + Args: + headers: A dictionary of headers (optional). Refer `Webpush Specification`_ for supported + headers. + data: A dictionary of data fields (optional). All keys and values in the dictionary must be + strings. When specified, overrides any data fields set via ``Message.data``. + notification: A ``messaging.WebpushNotification`` to be included in the message (optional). + fcm_options: A ``messaging.WebpushFCMOptions`` instance to be included in the message + (optional). + + .. _Webpush Specification: https://tools.ietf.org/html/rfc8030#section-5 + """ + + def __init__(self, headers=None, data=None, notification=None, fcm_options=None): + self.headers = headers + self.data = data + self.notification = notification + self.fcm_options = fcm_options + + +class WebpushNotificationAction: + """An action available to the users when the notification is presented. + + Args: + action: Action string. + title: Title string. + icon: Icon URL for the action (optional). + """ + + def __init__(self, action, title, icon=None): + self.action = action + self.title = title + self.icon = icon + + +class WebpushNotification: + """Webpush-specific notification parameters. + + Refer to the `Notification Reference`_ for more information. + + Args: + title: Title of the notification (optional). If specified, overrides the title set via + ``messaging.Notification``. + body: Body of the notification (optional). If specified, overrides the body set via + ``messaging.Notification``. + icon: Icon URL of the notification (optional). + actions: A list of ``messaging.WebpushNotificationAction`` instances (optional). + badge: URL of the image used to represent the notification when there is + not enough space to display the notification itself (optional). + data: Any arbitrary JSON data that should be associated with the notification (optional). + direction: The direction in which to display the notification (optional). Must be either + 'auto', 'ltr' or 'rtl'. + image: The URL of an image to be displayed in the notification (optional). + language: Notification language (optional). + renotify: A boolean indicating whether the user should be notified after a new + notification replaces an old one (optional). + require_interaction: A boolean indicating whether a notification should remain active + until the user clicks or dismisses it, rather than closing automatically (optional). + silent: ``True`` to indicate that the notification should be silent (optional). + tag: An identifying tag on the notification (optional). + timestamp_millis: A timestamp value in milliseconds on the notification (optional). + vibrate: A vibration pattern for the device's vibration hardware to emit when the + notification fires (optional). The pattern is specified as an integer array. + custom_data: A dict of custom key-value pairs to be included in the notification + (optional) + + .. _Notification Reference: https://developer.mozilla.org/en-US/docs/Web/API\ + /notification/Notification + """ + + def __init__(self, title=None, body=None, icon=None, actions=None, badge=None, data=None, + direction=None, image=None, language=None, renotify=None, + require_interaction=None, silent=None, tag=None, timestamp_millis=None, + vibrate=None, custom_data=None): + self.title = title + self.body = body + self.icon = icon + self.actions = actions + self.badge = badge + self.data = data + self.direction = direction + self.image = image + self.language = language + self.renotify = renotify + self.require_interaction = require_interaction + self.silent = silent + self.tag = tag + self.timestamp_millis = timestamp_millis + self.vibrate = vibrate + self.custom_data = custom_data + + +class WebpushFCMOptions: + """Options for features provided by the FCM SDK for Web. + + Args: + link: The link to open when the user clicks on the notification. Must be an HTTPS URL + (optional). + """ + + def __init__(self, link=None): + self.link = link + + +class APNSConfig: + """APNS-specific options that can be included in a message. + + Refer to `APNS Documentation`_ for more information. + + Args: + headers: A dictionary of headers (optional). + payload: A ``messaging.APNSPayload`` to be included in the message (optional). + fcm_options: A ``messaging.APNSFCMOptions`` instance to be included in the message + (optional). + live_activity_token: A live activity token string (optional). + + .. _APNS Documentation: https://developer.apple.com/library/content/documentation\ + /NetworkingInternet/Conceptual/RemoteNotificationsPG/CommunicatingwithAPNs.html + """ + + def __init__(self, headers=None, payload=None, fcm_options=None, live_activity_token=None): + self.headers = headers + self.payload = payload + self.fcm_options = fcm_options + self.live_activity_token = live_activity_token + + +class APNSPayload: + """Payload of an APNS message. + + Args: + aps: A ``messaging.Aps`` instance to be included in the payload. + **kwargs: Arbitrary keyword arguments to be included as custom fields in the payload + (optional). + """ + + def __init__(self, aps, **kwargs): + self.aps = aps + self.custom_data = kwargs + + +class Aps: + """Aps dictionary to be included in an APNS payload. + + Args: + alert: A string or a ``messaging.ApsAlert`` instance (optional). + badge: A number representing the badge to be displayed with the message (optional). + sound: Name of the sound file to be played with the message or a + ``messaging.CriticalSound`` instance (optional). + content_available: A boolean indicating whether to configure a background update + notification (optional). + category: String identifier representing the message type (optional). + thread_id: An app-specific string identifier for grouping messages (optional). + mutable_content: A boolean indicating whether to support mutating notifications at + the client using app extensions (optional). + custom_data: A dict of custom key-value pairs to be included in the Aps dictionary + (optional). + """ + + def __init__(self, alert=None, badge=None, sound=None, content_available=None, category=None, + thread_id=None, mutable_content=None, custom_data=None): + self.alert = alert + self.badge = badge + self.sound = sound + self.content_available = content_available + self.category = category + self.thread_id = thread_id + self.mutable_content = mutable_content + self.custom_data = custom_data + + +class CriticalSound: + """Critical alert sound configuration that can be included in ``messaging.Aps``. + + Args: + name: The name of a sound file in your app's main bundle or in the ``Library/Sounds`` + folder of your app's container directory. Specify the string ``default`` to play the + system sound. + critical: Set to ``True`` to set the critical alert flag on the sound configuration + (optional). + volume: The volume for the critical alert's sound. Must be a value between 0.0 (silent) + and 1.0 (full volume) (optional). + """ + + def __init__(self, name, critical=None, volume=None): + self.name = name + self.critical = critical + self.volume = volume + + +class ApsAlert: + """An alert that can be included in ``messaging.Aps``. + + Args: + title: Title of the alert (optional). If specified, overrides the title set via + ``messaging.Notification``. + subtitle: Subtitle of the alert (optional). + body: Body of the alert (optional). If specified, overrides the body set via + ``messaging.Notification``. + loc_key: Key of the body string in the app's string resources to use to localize the + body text (optional). + loc_args: A list of resource keys that will be used in place of the format specifiers + in ``loc_key`` (optional). + title_loc_key: Key of the title string in the app's string resources to use to localize the + title text (optional). + title_loc_args: A list of resource keys that will be used in place of the format specifiers + in ``title_loc_key`` (optional). + action_loc_key: Key of the text in the app's string resources to use to localize the + action button text (optional). + launch_image: Image for the notification action (optional). + custom_data: A dict of custom key-value pairs to be included in the ApsAlert dictionary + (optional) + """ + + def __init__(self, title=None, subtitle=None, body=None, loc_key=None, loc_args=None, + title_loc_key=None, title_loc_args=None, action_loc_key=None, launch_image=None, + custom_data=None): + self.title = title + self.subtitle = subtitle + self.body = body + self.loc_key = loc_key + self.loc_args = loc_args + self.title_loc_key = title_loc_key + self.title_loc_args = title_loc_args + self.action_loc_key = action_loc_key + self.launch_image = launch_image + self.custom_data = custom_data + + +class APNSFCMOptions: + """Options for features provided by the FCM SDK for iOS. + + Args: + analytics_label: contains additional options for features provided by the FCM iOS SDK + (optional). + image: contains the URL of an image that is going to be displayed in a notification + (optional). + """ + + def __init__(self, analytics_label=None, image=None): + self.analytics_label = analytics_label + self.image = image + + +class FCMOptions: + """Options for features provided by SDK. + + Args: + analytics_label: contains additional options to use across all platforms (optional). + """ + + def __init__(self, analytics_label=None): + self.analytics_label = analytics_label + + +class ThirdPartyAuthError(exceptions.UnauthenticatedError): + """APNs certificate or web push auth key was invalid or missing.""" + + def __init__(self, message, cause=None, http_response=None): + exceptions.UnauthenticatedError.__init__(self, message, cause, http_response) + + +class QuotaExceededError(exceptions.ResourceExhaustedError): + """Sending limit exceeded for the message target.""" + + def __init__(self, message, cause=None, http_response=None): + exceptions.ResourceExhaustedError.__init__(self, message, cause, http_response) + + +class SenderIdMismatchError(exceptions.PermissionDeniedError): + """The authenticated sender ID is different from the sender ID for the registration token.""" + + def __init__(self, message, cause=None, http_response=None): + exceptions.PermissionDeniedError.__init__(self, message, cause, http_response) + + +class UnregisteredError(exceptions.NotFoundError): + """App instance was unregistered from FCM. + + This usually means that the token used is no longer valid and a new one must be used.""" + + def __init__(self, message, cause=None, http_response=None): + exceptions.NotFoundError.__init__(self, message, cause, http_response) diff --git a/firebase_admin/_retry.py b/firebase_admin/_retry.py new file mode 100644 index 000000000..efd90a743 --- /dev/null +++ b/firebase_admin/_retry.py @@ -0,0 +1,223 @@ +# Copyright 2025 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Internal retry logic module + +This module provides utilities for adding retry logic to HTTPX requests +""" + +from __future__ import annotations +import copy +import email.utils +import random +import re +import time +from typing import Any, Callable, List, Optional, Tuple, Coroutine +import logging +import asyncio +import httpx + +logger = logging.getLogger(__name__) + + +class HttpxRetry: + """HTTPX based retry config""" + # Status codes to be used for respecting `Retry-After` header + RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) + + # Default maximum backoff time. + DEFAULT_BACKOFF_MAX = 120 + + def __init__( + self, + max_retries: int = 10, + status_forcelist: Optional[List[int]] = None, + backoff_factor: float = 0, + backoff_max: float = DEFAULT_BACKOFF_MAX, + backoff_jitter: float = 0, + history: Optional[List[Tuple[ + httpx.Request, + Optional[httpx.Response], + Optional[Exception] + ]]] = None, + respect_retry_after_header: bool = False, + ) -> None: + self.retries_left = max_retries + self.status_forcelist = status_forcelist + self.backoff_factor = backoff_factor + self.backoff_max = backoff_max + self.backoff_jitter = backoff_jitter + if history: + self.history = history + else: + self.history = [] + self.respect_retry_after_header = respect_retry_after_header + + def copy(self) -> HttpxRetry: + """Creates a deep copy of this instance.""" + return copy.deepcopy(self) + + def is_retryable_response(self, response: httpx.Response) -> bool: + """Determine if a response implies that the request should be retried if possible.""" + if self.status_forcelist and response.status_code in self.status_forcelist: + return True + + has_retry_after = bool(response.headers.get("Retry-After")) + if ( + self.respect_retry_after_header + and has_retry_after + and response.status_code in self.RETRY_AFTER_STATUS_CODES + ): + return True + + return False + + def is_exhausted(self) -> bool: + """Determine if there are anymore more retires.""" + # retries_left is negative + return self.retries_left < 0 + + # Identical implementation of `urllib3.Retry.parse_retry_after()` + def _parse_retry_after(self, retry_after_header: str) -> float | None: + """Parses Retry-After string into a float with unit seconds.""" + seconds: float + # Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4 + if re.match(r"^\s*[0-9]+\s*$", retry_after_header): + seconds = int(retry_after_header) + else: + retry_date_tuple = email.utils.parsedate_tz(retry_after_header) + if retry_date_tuple is None: + raise httpx.RemoteProtocolError(f"Invalid Retry-After header: {retry_after_header}") + + retry_date = email.utils.mktime_tz(retry_date_tuple) + seconds = retry_date - time.time() + + seconds = max(seconds, 0) + + return seconds + + def get_retry_after(self, response: httpx.Response) -> float | None: + """Determine the Retry-After time needed before sending the next request.""" + retry_after_header = response.headers.get('Retry-After', None) + if retry_after_header: + # Convert retry header to a float in seconds + return self._parse_retry_after(retry_after_header) + return None + + def get_backoff_time(self): + """Determine the backoff time needed before sending the next request.""" + # attempt_count is the number of previous request attempts + attempt_count = len(self.history) + # Backoff should be set to 0 until after first retry. + if attempt_count <= 1: + return 0 + backoff = self.backoff_factor * (2 ** (attempt_count-1)) + if self.backoff_jitter: + backoff += random.random() * self.backoff_jitter + return float(max(0, min(self.backoff_max, backoff))) + + async def sleep_for_backoff(self) -> None: + """Determine and wait the backoff time needed before sending the next request.""" + backoff = self.get_backoff_time() + logger.debug('Sleeping for backoff of %f seconds following failed request', backoff) + await asyncio.sleep(backoff) + + async def sleep(self, response: httpx.Response) -> None: + """Determine and wait the time needed before sending the next request.""" + if self.respect_retry_after_header: + retry_after = self.get_retry_after(response) + if retry_after: + logger.debug( + 'Sleeping for Retry-After header of %f seconds following failed request', + retry_after + ) + await asyncio.sleep(retry_after) + return + await self.sleep_for_backoff() + + def increment( + self, + request: httpx.Request, + response: Optional[httpx.Response] = None, + error: Optional[Exception] = None + ) -> None: + """Update the retry state based on request attempt.""" + self.retries_left -= 1 + self.history.append((request, response, error)) + + +class HttpxRetryTransport(httpx.AsyncBaseTransport): + """HTTPX transport with retry logic.""" + + DEFAULT_RETRY = HttpxRetry(max_retries=4, status_forcelist=[500, 503], backoff_factor=0.5) + + def __init__(self, retry: HttpxRetry = DEFAULT_RETRY, **kwargs: Any) -> None: + self._retry = retry + + transport_kwargs = kwargs.copy() + transport_kwargs.update({'retries': 0, 'http2': True}) + # We use a full AsyncHTTPTransport under the hood that is already + # set up to handle requests. We also insure that that transport's internal + # retries are not allowed. + self._wrapped_transport = httpx.AsyncHTTPTransport(**transport_kwargs) + + async def handle_async_request(self, request: httpx.Request) -> httpx.Response: + return await self._dispatch_with_retry( + request, self._wrapped_transport.handle_async_request) + + async def _dispatch_with_retry( + self, + request: httpx.Request, + dispatch_method: Callable[[httpx.Request], Coroutine[Any, Any, httpx.Response]] + ) -> httpx.Response: + """Sends a request with retry logic using a provided dispatch method.""" + # This request config is used across all requests that use this transport and therefore + # needs to be copied to be used for just this request and it's retries. + retry = self._retry.copy() + # First request + response, error = None, None + + while not retry.is_exhausted(): + + # First retry + if response: + await retry.sleep(response) + + # Need to reset here so only last attempt's error or response is saved. + response, error = None, None + + try: + logger.debug('Sending request in _dispatch_with_retry(): %r', request) + response = await dispatch_method(request) + logger.debug('Received response: %r', response) + except httpx.HTTPError as err: + logger.debug('Received error: %r', err) + error = err + + if response and not retry.is_retryable_response(response): + return response + + if error: + raise error + + retry.increment(request, response, error) + + if response: + return response + if error: + raise error + raise AssertionError('_dispatch_with_retry() ended with no response or exception') + + async def aclose(self) -> None: + await self._wrapped_transport.aclose() diff --git a/firebase_admin/_rfc3339.py b/firebase_admin/_rfc3339.py new file mode 100644 index 000000000..8489bdcb9 --- /dev/null +++ b/firebase_admin/_rfc3339.py @@ -0,0 +1,87 @@ +# Copyright 2020 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Parse RFC3339 date strings""" + +from datetime import datetime, timezone +import re + +def parse_to_epoch(datestr): + """Parse an RFC3339 date string and return the number of seconds since the + epoch (as a float). + + In particular, this method is meant to parse the strings returned by the + JSON mapping of protobuf google.protobuf.timestamp.Timestamp instances: + https://github.com/protocolbuffers/protobuf/blob/4cf5bfee9546101d98754d23ff378ff718ba8438/src/google/protobuf/timestamp.proto#L99 + + This method has microsecond precision; nanoseconds will be truncated. + + Args: + datestr: A string in RFC3339 format. + Returns: + Float: The number of seconds since the Unix epoch. + Raises: + ValueError: Raised if the `datestr` is not a valid RFC3339 date string. + """ + return _parse_to_datetime(datestr).timestamp() + + +def _parse_to_datetime(datestr): + """Parse an RFC3339 date string and return a python datetime instance. + + Args: + datestr: A string in RFC3339 format. + Returns: + datetime: The corresponding `datetime` (with timezone information). + Raises: + ValueError: Raised if the `datestr` is not a valid RFC3339 date string. + """ + # If more than 6 digits appear in the fractional seconds position, truncate + # to just the most significant 6. (i.e. we only have microsecond precision; + # nanos are truncated.) + datestr_modified = re.sub(r'(\.\d{6})\d*', r'\1', datestr) + + # This format is the one we actually expect to occur from our backend. The + # others are only present because the spec says we *should* accept them. + try: + return datetime.strptime( + datestr_modified, '%Y-%m-%dT%H:%M:%S.%fZ' + ).replace(tzinfo=timezone.utc) + except ValueError: + pass + + try: + return datetime.strptime( + datestr_modified, '%Y-%m-%dT%H:%M:%SZ' + ).replace(tzinfo=timezone.utc) + except ValueError: + pass + + # Note: %z parses timezone offsets, but requires the timezone offset *not* + # include a separating ':'. As of python 3.7, this was relaxed. + # TODO(rsgowman): Once python3.7 becomes our floor, we can drop the regex + # replacement. + datestr_modified = re.sub(r'(\d\d):(\d\d)$', r'\1\2', datestr_modified) + + try: + return datetime.strptime(datestr_modified, '%Y-%m-%dT%H:%M:%S.%f%z') + except ValueError: + pass + + try: + return datetime.strptime(datestr_modified, '%Y-%m-%dT%H:%M:%S%z') + except ValueError: + pass + + raise ValueError(f'time data {datestr} does not match RFC3339 format') diff --git a/firebase_admin/_sseclient.py b/firebase_admin/_sseclient.py new file mode 100644 index 000000000..3372fe5f2 --- /dev/null +++ b/firebase_admin/_sseclient.py @@ -0,0 +1,205 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""SSEClient module to stream realtime updates from the Firebase Database. + +Based on a similar implementation from Pyrebase. +""" + +import re +import time +import warnings + +from google.auth import transport +import requests + + +# Technically, we should support streams that mix line endings. This regex, +# however, assumes that a system will provide consistent line endings. +end_of_field = re.compile(r'\r\n\r\n|\r\r|\n\n') + + +class KeepAuthSession(transport.requests.AuthorizedSession): + """A session that does not drop authentication on redirects between domains.""" + + def __init__(self, credential): + super().__init__(credential) + + def rebuild_auth(self, prepared_request, response): + pass + + +class _EventBuffer: + """A helper class for buffering and parsing raw SSE data.""" + + def __init__(self): + self._buffer = [] + self._tail = '' + + def append(self, char): + self._buffer.append(char) + self._tail += char + self._tail = self._tail[-4:] + + def truncate(self): + head, sep, _ = self.buffer_string.rpartition('\n') + rem = head + sep + self._buffer = list(rem) + self._tail = rem[-4:] + + @property + def is_end_of_field(self): + last_two_chars = self._tail[-2:] + return last_two_chars == '\n\n' or last_two_chars == '\r\r' or self._tail == '\r\n\r\n' + + @property + def buffer_string(self): + return ''.join(self._buffer) + + +class SSEClient: + """SSE client implementation.""" + + def __init__(self, url, session, retry=3000, **kwargs): + """Initializes the SSEClient. + + Args: + url: The remote url to connect to. + session: The requests session. + retry: The retry interval in milliseconds (optional). + **kwargs: Extra kwargs that will be sent to ``requests.get()`` (optional). + """ + self.url = url + self.session = session + self.retry = retry + self.requests_kwargs = kwargs + self.should_connect = True + self.last_id = None + self.buf = '' # Keep data here as it streams in + + headers = self.requests_kwargs.get('headers', {}) + # The SSE spec requires making requests with Cache-Control: no-cache + headers['Cache-Control'] = 'no-cache' + # The 'Accept' header is not required, but explicit > implicit + headers['Accept'] = 'text/event-stream' + self.requests_kwargs['headers'] = headers + self._connect() + + def close(self): + """Closes the SSEClient instance.""" + self.should_connect = False + self.retry = 0 + self.resp.close() + + def _connect(self): + """Connects to the server using requests.""" + if self.should_connect: + if self.last_id: + self.requests_kwargs['headers']['Last-Event-ID'] = self.last_id + self.resp = self.session.get(self.url, stream=True, **self.requests_kwargs) + self.resp_iterator = self.resp.iter_content(decode_unicode=True) + self.resp.raise_for_status() + else: + raise StopIteration() + + def __iter__(self): + return self + + def __next__(self): + if not re.search(end_of_field, self.buf): + temp_buffer = _EventBuffer() + while not temp_buffer.is_end_of_field: + try: + nextchar = next(self.resp_iterator) + temp_buffer.append(nextchar) + except (StopIteration, requests.RequestException): + time.sleep(self.retry / 1000.0) + self._connect() + # The SSE spec only supports resuming from a whole message, so + # if we have half a message we should throw it out. + temp_buffer.truncate() + continue + self.buf = temp_buffer.buffer_string + + split = re.split(end_of_field, self.buf) + head = split[0] + self.buf = '\n\n'.join(split[1:]) + event = Event.parse(head) + + if event.data == 'credential is no longer valid': + self._connect() + return None + if event.data == 'null': + return None + + # If the server requests a specific retry delay, we need to honor it. + if event.retry: + self.retry = event.retry + + # last_id should only be set if included in the message. It's not + # forgotten if a message omits it. + if event.event_id: + self.last_id = event.event_id + return event + + +class Event: + """Event represents the events fired by SSE.""" + + sse_line_pattern = re.compile('(?P[^:]*):?( ?(?P.*))?') + + def __init__(self, data='', event_type='message', event_id=None, retry=None): + self.data = data + self.event_type = event_type + self.event_id = event_id + self.retry = retry + + @classmethod + def parse(cls, raw): + """Given a possibly-multiline string representing an SSE message, parses it + and returns an Event object. + + Args: + raw: the raw data to parse. + + Returns: + Event: A new ``Event`` with the parameters initialized. + """ + event = cls() + for line in raw.split('\n'): + match = cls.sse_line_pattern.match(line) + if match is None: + # Malformed line. Discard but warn. + warnings.warn(f'Invalid SSE line: "{line}"', SyntaxWarning) + continue + + name = match.groupdict()['name'] + value = match.groupdict()['value'] + if name == '': + # line began with a ":", so is a comment. Ignore + continue + if name == 'data': + # If we already have some data, then join to it with a newline. + # Else this is it. + if event.data: + event.data = f'{event.data}\n{value}' + else: + event.data = value + elif name == 'event': + event.event_type = value + elif name == 'id': + event.event_id = value + elif name == 'retry': + event.retry = int(value) + return event diff --git a/firebase_admin/_token_gen.py b/firebase_admin/_token_gen.py new file mode 100644 index 000000000..1607ef0ba --- /dev/null +++ b/firebase_admin/_token_gen.py @@ -0,0 +1,466 @@ +# Copyright 2018 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Firebase token minting and validation sub module.""" + +import datetime +import time + +import cachecontrol +import requests +from google.auth import credentials +from google.auth import iam +from google.auth import jwt +from google.auth import transport +import google.auth.exceptions +import google.oauth2.id_token +import google.oauth2.service_account + +from firebase_admin import exceptions +from firebase_admin import _auth_utils +from firebase_admin import _http_client + + +# ID token constants +ID_TOKEN_ISSUER_PREFIX = 'https://securetoken.google.com/' +ID_TOKEN_CERT_URI = ('https://www.googleapis.com/robot/v1/metadata/x509/' + 'securetoken@system.gserviceaccount.com') + +# Session cookie constants +COOKIE_ISSUER_PREFIX = 'https://session.firebase.google.com/' +COOKIE_CERT_URI = 'https://www.googleapis.com/identitytoolkit/v3/relyingparty/publicKeys' +MIN_SESSION_COOKIE_DURATION_SECONDS = int(datetime.timedelta(minutes=5).total_seconds()) +MAX_SESSION_COOKIE_DURATION_SECONDS = int(datetime.timedelta(days=14).total_seconds()) + +# Custom token constants +MAX_TOKEN_LIFETIME_SECONDS = int(datetime.timedelta(hours=1).total_seconds()) +FIREBASE_AUDIENCE = ('https://identitytoolkit.googleapis.com/google.' + 'identity.identitytoolkit.v1.IdentityToolkit') +RESERVED_CLAIMS = set([ + 'acr', 'amr', 'at_hash', 'aud', 'auth_time', 'azp', 'cnf', 'c_hash', + 'exp', 'firebase', 'iat', 'iss', 'jti', 'nbf', 'nonce', 'sub' +]) +METADATA_SERVICE_URL = ('http://metadata.google.internal/computeMetadata/v1/instance/' + 'service-accounts/default/email') +ALGORITHM_RS256 = 'RS256' +ALGORITHM_NONE = 'none' + +# Emulator fake account +AUTH_EMULATOR_EMAIL = 'firebase-auth-emulator@example.com' + + +class _EmulatedSigner(google.auth.crypt.Signer): + key_id = None + + def __init__(self): + pass + + def sign(self, message): + return b'' + + +class _SigningProvider: + """Stores a reference to a google.auth.crypto.Signer.""" + + def __init__(self, signer, signer_email, alg=ALGORITHM_RS256): + self._signer = signer + self._signer_email = signer_email + self._alg = alg + + @property + def signer(self): + return self._signer + + @property + def signer_email(self): + return self._signer_email + + @property + def alg(self): + return self._alg + + @classmethod + def from_credential(cls, google_cred): + return _SigningProvider(google_cred.signer, google_cred.signer_email) + + @classmethod + def from_iam(cls, request, google_cred, service_account): + signer = iam.Signer(request, google_cred, service_account) + return _SigningProvider(signer, service_account) + + @classmethod + def for_emulator(cls): + return _SigningProvider(_EmulatedSigner(), AUTH_EMULATOR_EMAIL, ALGORITHM_NONE) + + +class TokenGenerator: + """Generates custom tokens and session cookies.""" + + ID_TOOLKIT_URL = 'https://identitytoolkit.googleapis.com/v1' + + def __init__(self, app, http_client, url_override=None): + self.app = app + self.http_client = http_client + self.request = transport.requests.Request() + url_prefix = url_override or self.ID_TOOLKIT_URL + self.base_url = f'{url_prefix}/projects/{app.project_id}' + self._signing_provider = None + + def _init_signing_provider(self): + """Initializes a signing provider by following the go/firebase-admin-sign protocol.""" + if _auth_utils.is_emulated(): + return _SigningProvider.for_emulator() + # If the SDK was initialized with a service account, use it to sign bytes. + google_cred = self.app.credential.get_credential() + if isinstance(google_cred, google.oauth2.service_account.Credentials): + return _SigningProvider.from_credential(google_cred) + + # If the SDK was initialized with a service account email, use it with the IAM service + # to sign bytes. + service_account = self.app.options.get('serviceAccountId') + if service_account: + return _SigningProvider.from_iam(self.request, google_cred, service_account) + + # If the SDK was initialized with some other credential type that supports signing + # (e.g. GAE credentials), use it to sign bytes. + if isinstance(google_cred, credentials.Signing): + return _SigningProvider.from_credential(google_cred) + + # Attempt to discover a service account email from the local Metadata service. Use it + # with the IAM service to sign bytes. + resp = self.request(url=METADATA_SERVICE_URL, headers={'Metadata-Flavor': 'Google'}) + if resp.status != 200: + raise ValueError( + f'Failed to contact the local metadata service: {resp.data.decode()}.') + service_account = resp.data.decode() + return _SigningProvider.from_iam(self.request, google_cred, service_account) + + @property + def signing_provider(self): + """Initializes and returns the SigningProvider instance to be used.""" + if not self._signing_provider: + try: + self._signing_provider = self._init_signing_provider() + except Exception as error: + url = 'https://firebase.google.com/docs/auth/admin/create-custom-tokens' + raise ValueError( + f'Failed to determine service account: {error}. Make sure to initialize the ' + 'SDK with service account credentials or specify a service account ID with ' + f'iam.serviceAccounts.signBlob permission. Please refer to {url} for more ' + 'details on creating custom tokens.') from error + return self._signing_provider + + def create_custom_token(self, uid, developer_claims=None, tenant_id=None): + """Builds and signs a Firebase custom auth token.""" + if developer_claims is not None: + if not isinstance(developer_claims, dict): + raise ValueError('developer_claims must be a dictionary') + + disallowed_keys = set(developer_claims.keys()) & RESERVED_CLAIMS + if disallowed_keys: + if len(disallowed_keys) > 1: + error_message = ( + f'Developer claims {", ".join(disallowed_keys)} are reserved and cannot be ' + 'specified.') + else: + error_message = ( + f'Developer claim {", ".join(disallowed_keys)} is reserved and cannot be ' + 'specified.') + raise ValueError(error_message) + + if not uid or not isinstance(uid, str) or len(uid) > 128: + raise ValueError('uid must be a string between 1 and 128 characters.') + + signing_provider = self.signing_provider + now = int(time.time()) + payload = { + 'iss': signing_provider.signer_email, + 'sub': signing_provider.signer_email, + 'aud': FIREBASE_AUDIENCE, + 'uid': uid, + 'iat': now, + 'exp': now + MAX_TOKEN_LIFETIME_SECONDS, + } + if tenant_id: + payload['tenant_id'] = tenant_id + + if developer_claims is not None: + payload['claims'] = developer_claims + + header = {'alg': signing_provider.alg} + try: + return jwt.encode(signing_provider.signer, payload, header=header) + except google.auth.exceptions.TransportError as error: + msg = f'Failed to sign custom token. {error}' + raise TokenSignError(msg, error) from error + + + def create_session_cookie(self, id_token, expires_in): + """Creates a session cookie from the provided ID token.""" + id_token = id_token.decode('utf-8') if isinstance(id_token, bytes) else id_token + if not isinstance(id_token, str) or not id_token: + raise ValueError( + f'Illegal ID token provided: {id_token}. ID token must be a non-empty string.') + + if isinstance(expires_in, datetime.timedelta): + expires_in = int(expires_in.total_seconds()) + if isinstance(expires_in, bool) or not isinstance(expires_in, int): + raise ValueError(f'Illegal expiry duration: {expires_in}.') + if expires_in < MIN_SESSION_COOKIE_DURATION_SECONDS: + raise ValueError( + f'Illegal expiry duration: {expires_in}. Duration must be at least ' + f'{MIN_SESSION_COOKIE_DURATION_SECONDS} seconds.') + if expires_in > MAX_SESSION_COOKIE_DURATION_SECONDS: + raise ValueError( + f'Illegal expiry duration: {expires_in}. Duration must be at most ' + f'{MAX_SESSION_COOKIE_DURATION_SECONDS} seconds.') + + url = f'{self.base_url}:createSessionCookie' + payload = { + 'idToken': id_token, + 'validDuration': expires_in, + } + try: + body, http_resp = self.http_client.body_and_response('post', url, json=payload) + except requests.exceptions.RequestException as error: + raise _auth_utils.handle_auth_backend_error(error) + if not body or not body.get('sessionCookie'): + raise _auth_utils.UnexpectedResponseError( + 'Failed to create session cookie.', http_response=http_resp) + return body.get('sessionCookie') + + +class CertificateFetchRequest(transport.Request): + """A google-auth transport that supports HTTP cache-control. + + Also injects a timeout to each outgoing HTTP request. + """ + + def __init__(self, timeout_seconds=None): + self._session = cachecontrol.CacheControl(requests.Session()) + self._delegate = transport.requests.Request(self.session) + self._timeout_seconds = timeout_seconds + + @property + def session(self): + return self._session + + @property + def timeout_seconds(self): + return self._timeout_seconds + + def __call__(self, url, method='GET', body=None, headers=None, timeout=None, **kwargs): + timeout = timeout or self.timeout_seconds + return self._delegate( + url, method=method, body=body, headers=headers, timeout=timeout, **kwargs) + + +class TokenVerifier: + """Verifies ID tokens and session cookies.""" + + def __init__(self, app): + timeout = app.options.get('httpTimeout', _http_client.DEFAULT_TIMEOUT_SECONDS) + self.request = CertificateFetchRequest(timeout) + self.id_token_verifier = _JWTVerifier( + project_id=app.project_id, short_name='ID token', + operation='verify_id_token()', + doc_url='https://firebase.google.com/docs/auth/admin/verify-id-tokens', + cert_url=ID_TOKEN_CERT_URI, + issuer=ID_TOKEN_ISSUER_PREFIX, + invalid_token_error=_auth_utils.InvalidIdTokenError, + expired_token_error=ExpiredIdTokenError) + self.cookie_verifier = _JWTVerifier( + project_id=app.project_id, short_name='session cookie', + operation='verify_session_cookie()', + doc_url='https://firebase.google.com/docs/auth/admin/verify-id-tokens', + cert_url=COOKIE_CERT_URI, + issuer=COOKIE_ISSUER_PREFIX, + invalid_token_error=InvalidSessionCookieError, + expired_token_error=ExpiredSessionCookieError) + + def verify_id_token(self, id_token, clock_skew_seconds=0): + return self.id_token_verifier.verify(id_token, self.request, clock_skew_seconds) + + def verify_session_cookie(self, cookie, clock_skew_seconds=0): + return self.cookie_verifier.verify(cookie, self.request, clock_skew_seconds) + + +class _JWTVerifier: + """Verifies Firebase JWTs (ID tokens or session cookies).""" + + def __init__(self, **kwargs): + self.project_id = kwargs.pop('project_id') + self.short_name = kwargs.pop('short_name') + self.operation = kwargs.pop('operation') + self.url = kwargs.pop('doc_url') + self.cert_url = kwargs.pop('cert_url') + self.issuer = kwargs.pop('issuer') + if self.short_name[0].lower() in 'aeiou': + self.articled_short_name = f'an {self.short_name}' + else: + self.articled_short_name = f'a {self.short_name}' + self._invalid_token_error = kwargs.pop('invalid_token_error') + self._expired_token_error = kwargs.pop('expired_token_error') + + def verify(self, token, request, clock_skew_seconds=0): + """Verifies the signature and data for the provided JWT.""" + token = token.encode('utf-8') if isinstance(token, str) else token + if not isinstance(token, bytes) or not token: + raise ValueError( + f'Illegal {self.short_name} provided: {token}. {self.short_name} must be a ' + 'non-empty string.') + + if not self.project_id: + raise ValueError( + 'Failed to ascertain project ID from the credential or the environment. Project ' + f'ID is required to call {self.operation}. Initialize the app with a ' + 'credentials.Certificate or set your Firebase project ID as an app option. ' + 'Alternatively set the GOOGLE_CLOUD_PROJECT environment variable.') + + if clock_skew_seconds < 0 or clock_skew_seconds > 60: + raise ValueError( + f'Illegal clock_skew_seconds value: {clock_skew_seconds}. Must be between 0 and 60' + ', inclusive.') + + header, payload = self._decode_unverified(token) + issuer = payload.get('iss') + audience = payload.get('aud') + subject = payload.get('sub') + expected_issuer = self.issuer + self.project_id + + project_id_match_msg = ( + f'Make sure the {self.short_name} comes from the same Firebase project as the service ' + 'account used to authenticate this SDK.') + verify_id_token_msg = ( + f'See {self.url} for details on how to retrieve {self.short_name}.') + + emulated = _auth_utils.is_emulated() + + error_message = None + if audience == FIREBASE_AUDIENCE: + error_message = ( + f'{self.operation} expects {self.articled_short_name}, but was given a custom ' + 'token.') + elif not emulated and not header.get('kid'): + if header.get('alg') == 'HS256' and payload.get( + 'v') == 0 and 'uid' in payload.get('d', {}): + error_message = ( + f'{self.operation} expects {self.articled_short_name}, but was given a legacy ' + 'custom token.') + else: + error_message = f'Firebase {self.short_name} has no "kid" claim.' + elif not emulated and header.get('alg') != 'RS256': + error_message = ( + f'Firebase {self.short_name} has incorrect algorithm. Expected "RS256" but got ' + f'"{header.get("alg")}". {verify_id_token_msg}') + elif audience != self.project_id: + error_message = ( + f'Firebase {self.short_name} has incorrect "aud" (audience) claim. Expected ' + f'"{self.project_id}" but got "{audience}". {project_id_match_msg} ' + f'{verify_id_token_msg}') + elif issuer != expected_issuer: + error_message = ( + f'Firebase {self.short_name} has incorrect "iss" (issuer) claim. Expected ' + f'"{expected_issuer}" but got "{issuer}". {project_id_match_msg} ' + f'{verify_id_token_msg}') + elif subject is None or not isinstance(subject, str): + error_message = ( + f'Firebase {self.short_name} has no "sub" (subject) claim. {verify_id_token_msg}') + elif not subject: + error_message = ( + f'Firebase {self.short_name} has an empty string "sub" (subject) claim. ' + f'{verify_id_token_msg}') + elif len(subject) > 128: + error_message = ( + f'Firebase {self.short_name} has a "sub" (subject) claim longer than 128 ' + f'characters. {verify_id_token_msg}') + + if error_message: + raise self._invalid_token_error(error_message) + + try: + if emulated: + verified_claims = payload + else: + verified_claims = google.oauth2.id_token.verify_token( + token, + request=request, + audience=self.project_id, + certs_url=self.cert_url, + clock_skew_in_seconds=clock_skew_seconds) + verified_claims['uid'] = verified_claims['sub'] + return verified_claims + except google.auth.exceptions.TransportError as error: + raise CertificateFetchError(str(error), cause=error) from error + except ValueError as error: + if 'Token expired' in str(error): + raise self._expired_token_error(str(error), cause=error) + raise self._invalid_token_error(str(error), cause=error) + + def _decode_unverified(self, token): + try: + header = jwt.decode_header(token) + payload = jwt.decode(token, verify=False) + return header, payload + except ValueError as error: + raise self._invalid_token_error(str(error), cause=error) + + +class TokenSignError(exceptions.UnknownError): + """Unexpected error while signing a Firebase custom token.""" + + def __init__(self, message, cause): + exceptions.UnknownError.__init__(self, message, cause) + + +class CertificateFetchError(exceptions.UnknownError): + """Failed to fetch some public key certificates required to verify a token.""" + + def __init__(self, message, cause): + exceptions.UnknownError.__init__(self, message, cause) + + +class ExpiredIdTokenError(_auth_utils.InvalidIdTokenError): + """The provided ID token is expired.""" + + def __init__(self, message, cause): + _auth_utils.InvalidIdTokenError.__init__(self, message, cause) + + +class RevokedIdTokenError(_auth_utils.InvalidIdTokenError): + """The provided ID token has been revoked.""" + + def __init__(self, message): + _auth_utils.InvalidIdTokenError.__init__(self, message) + + +class InvalidSessionCookieError(exceptions.InvalidArgumentError): + """The provided string is not a valid Firebase session cookie.""" + + def __init__(self, message, cause=None): + exceptions.InvalidArgumentError.__init__(self, message, cause) + + +class ExpiredSessionCookieError(InvalidSessionCookieError): + """The provided session cookie is expired.""" + + def __init__(self, message, cause): + InvalidSessionCookieError.__init__(self, message, cause) + + +class RevokedSessionCookieError(InvalidSessionCookieError): + """The provided session cookie has been revoked.""" + + def __init__(self, message): + InvalidSessionCookieError.__init__(self, message) diff --git a/firebase_admin/_user_identifier.py b/firebase_admin/_user_identifier.py new file mode 100644 index 000000000..85a224e0b --- /dev/null +++ b/firebase_admin/_user_identifier.py @@ -0,0 +1,103 @@ +# Copyright 2020 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Classes to uniquely identify a user.""" + +from firebase_admin import _auth_utils + +class UserIdentifier: + """Identifies a user to be looked up.""" + + +class UidIdentifier(UserIdentifier): + """Used for looking up an account by uid. + + See ``auth.get_user()``. + """ + + def __init__(self, uid): + """Constructs a new `UidIdentifier` object. + + Args: + uid: A user ID string. + """ + self._uid = _auth_utils.validate_uid(uid, required=True) + + @property + def uid(self): + return self._uid + + +class EmailIdentifier(UserIdentifier): + """Used for looking up an account by email. + + See ``auth.get_user()``. + """ + + def __init__(self, email): + """Constructs a new `EmailIdentifier` object. + + Args: + email: A user email address string. + """ + self._email = _auth_utils.validate_email(email, required=True) + + @property + def email(self): + return self._email + + +class PhoneIdentifier(UserIdentifier): + """Used for looking up an account by phone number. + + See ``auth.get_user()``. + """ + + def __init__(self, phone_number): + """Constructs a new `PhoneIdentifier` object. + + Args: + phone_number: A phone number string. + """ + self._phone_number = _auth_utils.validate_phone(phone_number, required=True) + + @property + def phone_number(self): + return self._phone_number + + +class ProviderIdentifier(UserIdentifier): + """Used for looking up an account by provider. + + See ``auth.get_user()``. + """ + + def __init__(self, provider_id, provider_uid): + """Constructs a new `ProviderIdentifier` object. + +   Args: +     provider_id: A provider ID string. +     provider_uid: A provider UID string. + """ + self._provider_id = _auth_utils.validate_provider_id(provider_id, required=True) + self._provider_uid = _auth_utils.validate_provider_uid( + provider_uid, required=True) + + @property + def provider_id(self): + return self._provider_id + + @property + def provider_uid(self): + return self._provider_uid diff --git a/firebase_admin/_user_import.py b/firebase_admin/_user_import.py new file mode 100644 index 000000000..7c7a9e70b --- /dev/null +++ b/firebase_admin/_user_import.py @@ -0,0 +1,520 @@ +# Copyright 2018 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Firebase user import sub module.""" + +import base64 +import json + +from firebase_admin import _auth_utils + + +def b64_encode(bytes_value): + return base64.urlsafe_b64encode(bytes_value).decode() + + +class UserProvider: + """Represents a user identity provider that can be associated with a Firebase user. + + One or more providers can be specified in an ``ImportUserRecord`` when importing users via + ``auth.import_users()``. + + Args: + uid: User's unique ID assigned by the identity provider. + provider_id: ID of the identity provider. This can be a short domain name or the identifier + of an OpenID identity provider. + email: User's email address (optional). + display_name: User's display name (optional). + photo_url: User's photo URL (optional). + """ + + def __init__(self, uid, provider_id, email=None, display_name=None, photo_url=None): + self.uid = uid + self.provider_id = provider_id + self.email = email + self.display_name = display_name + self.photo_url = photo_url + + @property + def uid(self): + return self._uid + + @uid.setter + def uid(self, uid): + self._uid = _auth_utils.validate_uid(uid, required=True) + + @property + def provider_id(self): + return self._provider_id + + @provider_id.setter + def provider_id(self, provider_id): + self._provider_id = _auth_utils.validate_provider_id(provider_id, required=True) + + @property + def email(self): + return self._email + + @email.setter + def email(self, email): + self._email = _auth_utils.validate_email(email) + + @property + def display_name(self): + return self._display_name + + @display_name.setter + def display_name(self, display_name): + self._display_name = _auth_utils.validate_display_name(display_name) + + @property + def photo_url(self): + return self._photo_url + + @photo_url.setter + def photo_url(self, photo_url): + self._photo_url = _auth_utils.validate_photo_url(photo_url) + + def to_dict(self): + payload = { + 'rawId': self.uid, + 'providerId': self.provider_id, + 'displayName': self.display_name, + 'email': self.email, + 'photoUrl': self.photo_url, + } + return {k: v for k, v in payload.items() if v is not None} + + +class ImportUserRecord: + """Represents a user account to be imported to Firebase Auth. + + Must specify the ``uid`` field at a minimum. A sequence of ``ImportUserRecord`` objects can be + passed to the ``auth.import_users()`` function, in order to import those users into Firebase + Auth in bulk. If the ``password_hash`` is set on a user, a hash configuration must be + specified when calling ``import_users()``. + + Args: + uid: User's unique ID. Must be a non-empty string not longer than 128 characters. + email: User's email address (optional). + email_verified: A boolean indicating whether the user's email has been verified (optional). + display_name: User's display name (optional). + phone_number: User's phone number (optional). + photo_url: User's photo URL (optional). + disabled: A boolean indicating whether this user account has been disabled (optional). + user_metadata: An ``auth.UserMetadata`` instance with additional user metadata (optional). + provider_data: A list of ``auth.UserProvider`` instances (optional). + custom_claims: A ``dict`` of custom claims to be set on the user account (optional). + password_hash: User's password hash as a ``bytes`` sequence (optional). + password_salt: User's password salt as a ``bytes`` sequence (optional). + + Raises: + ValueError: If provided arguments are invalid. + """ + + def __init__(self, uid, email=None, email_verified=None, display_name=None, phone_number=None, + photo_url=None, disabled=None, user_metadata=None, provider_data=None, + custom_claims=None, password_hash=None, password_salt=None): + self.uid = uid + self.email = email + self.display_name = display_name + self.phone_number = phone_number + self.photo_url = photo_url + self.password_hash = password_hash + self.password_salt = password_salt + self.email_verified = email_verified + self.disabled = disabled + self.user_metadata = user_metadata + self.provider_data = provider_data + self.custom_claims = custom_claims + + @property + def uid(self): + return self._uid + + @uid.setter + def uid(self, uid): + self._uid = _auth_utils.validate_uid(uid, required=True) + + @property + def email(self): + return self._email + + @email.setter + def email(self, email): + self._email = _auth_utils.validate_email(email) + + @property + def display_name(self): + return self._display_name + + @display_name.setter + def display_name(self, display_name): + self._display_name = _auth_utils.validate_display_name(display_name) + + @property + def phone_number(self): + return self._phone_number + + @phone_number.setter + def phone_number(self, phone_number): + self._phone_number = _auth_utils.validate_phone(phone_number) + + @property + def photo_url(self): + return self._photo_url + + @photo_url.setter + def photo_url(self, photo_url): + self._photo_url = _auth_utils.validate_photo_url(photo_url) + + @property + def password_hash(self): + return self._password_hash + + @password_hash.setter + def password_hash(self, password_hash): + self._password_hash = _auth_utils.validate_bytes(password_hash, 'password_hash') + + @property + def password_salt(self): + return self._password_salt + + @password_salt.setter + def password_salt(self, password_salt): + self._password_salt = _auth_utils.validate_bytes(password_salt, 'password_salt') + + @property + def user_metadata(self): + return self._user_metadata + + @user_metadata.setter + def user_metadata(self, user_metadata): + created_at = user_metadata.creation_timestamp if user_metadata is not None else None + last_login_at = user_metadata.last_sign_in_timestamp if user_metadata is not None else None + self._created_at = _auth_utils.validate_timestamp(created_at, 'creation_timestamp') + self._last_login_at = _auth_utils.validate_timestamp( + last_login_at, 'last_sign_in_timestamp') + self._user_metadata = user_metadata + + @property + def provider_data(self): + return self._provider_data + + @provider_data.setter + def provider_data(self, provider_data): + if provider_data is not None: + try: + if any(not isinstance(p, UserProvider) for p in provider_data): + raise ValueError('One or more provider data instances are invalid.') + except TypeError as err: + raise ValueError('provider_data must be iterable.') from err + self._provider_data = provider_data + + @property + def custom_claims(self): + return self._custom_claims + + @custom_claims.setter + def custom_claims(self, custom_claims): + json_claims = json.dumps(custom_claims) if isinstance( + custom_claims, dict) else custom_claims + self._custom_claims_str = _auth_utils.validate_custom_claims(json_claims) + self._custom_claims = custom_claims + + def to_dict(self): + """Returns a dict representation of the user. For internal use only.""" + payload = { + 'localId': self.uid, + 'email': self.email, + 'displayName': self.display_name, + 'phoneNumber': self.phone_number, + 'photoUrl': self.photo_url, + 'emailVerified': (bool(self.email_verified) + if self.email_verified is not None else None), + 'disabled': bool(self.disabled) if self.disabled is not None else None, + 'customAttributes': self._custom_claims_str, + 'createdAt': self._created_at, + 'lastLoginAt': self._last_login_at, + 'passwordHash': b64_encode(self.password_hash) if self.password_hash else None, + 'salt': b64_encode(self.password_salt) if self.password_salt else None, + } + if self.provider_data: + payload['providerUserInfo'] = [p.to_dict() for p in self.provider_data] + return {k: v for k, v in payload.items() if v is not None} + + +class UserImportHash: + """Represents a hash algorithm used to hash user passwords. + + An instance of this class must be specified when importing users with passwords via the + ``auth.import_users()`` API. Use one of the provided class methods to obtain new + instances when required. Refer to `documentation`_ for more details. + + .. _documentation: https://firebase.google.com/docs/auth/admin/import-users + """ + + def __init__(self, name, data=None): + self._name = name + self._data = data + + def to_dict(self): + payload = {'hashAlgorithm': self._name} + if self._data: + payload.update(self._data) + return payload + + @classmethod + def _hmac(cls, name, key): + data = { + 'signerKey': b64_encode(_auth_utils.validate_bytes(key, 'key', required=True)) + } + return UserImportHash(name, data) + + @classmethod + def hmac_sha512(cls, key): + """Creates a new HMAC SHA512 algorithm instance. + + Args: + key: Signer key as a byte sequence. + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + return cls._hmac('HMAC_SHA512', key) + + @classmethod + def hmac_sha256(cls, key): + """Creates a new HMAC SHA256 algorithm instance. + + Args: + key: Signer key as a byte sequence. + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + return cls._hmac('HMAC_SHA256', key) + + @classmethod + def hmac_sha1(cls, key): + """Creates a new HMAC SHA1 algorithm instance. + + Args: + key: Signer key as a byte sequence. + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + return cls._hmac('HMAC_SHA1', key) + + @classmethod + def hmac_md5(cls, key): + """Creates a new HMAC MD5 algorithm instance. + + Args: + key: Signer key as a byte sequence. + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + return cls._hmac('HMAC_MD5', key) + + @classmethod + def md5(cls, rounds): + """Creates a new MD5 algorithm instance. + + Args: + rounds: Number of rounds. Must be an integer between 0 and 8192. + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + return UserImportHash( + 'MD5', + {'rounds': _auth_utils.validate_int(rounds, 'rounds', 0, 8192)}) + + @classmethod + def sha1(cls, rounds): + """Creates a new SHA1 algorithm instance. + + Args: + rounds: Number of rounds. Must be an integer between 1 and 8192. + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + return UserImportHash( + 'SHA1', + {'rounds': _auth_utils.validate_int(rounds, 'rounds', 1, 8192)}) + + @classmethod + def sha256(cls, rounds): + """Creates a new SHA256 algorithm instance. + + Args: + rounds: Number of rounds. Must be an integer between 1 and 8192. + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + return UserImportHash( + 'SHA256', + {'rounds': _auth_utils.validate_int(rounds, 'rounds', 1, 8192)}) + + @classmethod + def sha512(cls, rounds): + """Creates a new SHA512 algorithm instance. + + Args: + rounds: Number of rounds. Must be an integer between 1 and 8192. + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + return UserImportHash( + 'SHA512', + {'rounds': _auth_utils.validate_int(rounds, 'rounds', 1, 8192)}) + + @classmethod + def pbkdf_sha1(cls, rounds): + """Creates a new PBKDF SHA1 algorithm instance. + + Args: + rounds: Number of rounds. Must be an integer between 0 and 120000. + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + return UserImportHash( + 'PBKDF_SHA1', + {'rounds': _auth_utils.validate_int(rounds, 'rounds', 0, 120000)}) + + @classmethod + def pbkdf2_sha256(cls, rounds): + """Creates a new PBKDF2 SHA256 algorithm instance. + + Args: + rounds: Number of rounds. Must be an integer between 0 and 120000. + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + return UserImportHash( + 'PBKDF2_SHA256', + {'rounds': _auth_utils.validate_int(rounds, 'rounds', 0, 120000)}) + + @classmethod + def scrypt(cls, key, rounds, memory_cost, salt_separator=None): + """Creates a new Scrypt algorithm instance. + + This is the modified Scrypt algorithm used by Firebase Auth. See ``standard_scrypt()`` + function for the standard Scrypt algorith, + + Args: + key: Signer key as a byte sequence. + rounds: Number of rounds. Must be an integer between 1 and 8. + memory_cost: Memory cost as an integer between 1 and 14. + salt_separator: Salt separator as a byte sequence (optional). + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + data = { + 'signerKey': b64_encode(_auth_utils.validate_bytes(key, 'key', required=True)), + 'rounds': _auth_utils.validate_int(rounds, 'rounds', 1, 8), + 'memoryCost': _auth_utils.validate_int(memory_cost, 'memory_cost', 1, 14), + } + if salt_separator: + data['saltSeparator'] = b64_encode(_auth_utils.validate_bytes( + salt_separator, 'salt_separator')) + return UserImportHash('SCRYPT', data) + + @classmethod + def bcrypt(cls): + """Creates a new Bcrypt algorithm instance. + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + return UserImportHash('BCRYPT') + + @classmethod + def standard_scrypt(cls, memory_cost, parallelization, block_size, derived_key_length): + """Creates a new standard Scrypt algorithm instance. + + Args: + memory_cost: CPU Memory cost as a non-negative integer. + parallelization: Parallelization as a non-negative integer. + block_size: Block size as a non-negative integer. + derived_key_length: Derived key length as a non-negative integer. + + Returns: + UserImportHash: A new ``UserImportHash``. + """ + data = { + 'cpuMemCost': _auth_utils.validate_int(memory_cost, 'memory_cost', low=0), + 'parallelization': _auth_utils.validate_int(parallelization, 'parallelization', low=0), + 'blockSize': _auth_utils.validate_int(block_size, 'block_size', low=0), + 'dkLen': _auth_utils.validate_int(derived_key_length, 'derived_key_length', low=0), + } + return UserImportHash('STANDARD_SCRYPT', data) + + +class ErrorInfo: + """Represents an error encountered while performing a batch operation such + as importing users or deleting multiple user accounts. + """ + # TODO(rsgowman): This class used to be specific to importing users (hence + # it's home in _user_import.py). It's now also used by bulk deletion of + # users. Move this to a more common location. + + def __init__(self, error): + self._index = error['index'] + self._reason = error['message'] + + @property + def index(self): + return self._index + + @property + def reason(self): + return self._reason + + +class UserImportResult: + """Represents the result of a bulk user import operation. + + See ``auth.import_users()`` API for more details. + """ + + def __init__(self, result, total): + errors = result.get('error', []) + self._success_count = total - len(errors) + self._failure_count = len(errors) + self._errors = [ErrorInfo(err) for err in errors] + + @property + def success_count(self): + """Returns the number of users successfully imported.""" + return self._success_count + + @property + def failure_count(self): + """Returns the number of users that failed to be imported.""" + return self._failure_count + + @property + def errors(self): + """Returns a list of ``auth.ErrorInfo`` instances describing the errors encountered.""" + return self._errors diff --git a/firebase_admin/_user_mgt.py b/firebase_admin/_user_mgt.py new file mode 100644 index 000000000..e7825499c --- /dev/null +++ b/firebase_admin/_user_mgt.py @@ -0,0 +1,874 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Firebase user management sub module.""" + +import base64 +from collections import defaultdict +import json +from typing import Optional +from urllib import parse +import warnings + +import requests + +from firebase_admin import _auth_utils +from firebase_admin import _rfc3339 +from firebase_admin import _user_identifier +from firebase_admin import _user_import +from firebase_admin._user_import import ErrorInfo + + +MAX_LIST_USERS_RESULTS = 1000 +MAX_IMPORT_USERS_SIZE = 1000 +B64_REDACTED = base64.b64encode(b'REDACTED') + + +class Sentinel: + + def __init__(self, description): + self.description = description + + +DELETE_ATTRIBUTE = Sentinel('Value used to delete an attribute from a user profile') + + +class UserMetadata: + """Contains additional metadata associated with a user account.""" + + def __init__(self, creation_timestamp=None, last_sign_in_timestamp=None, + last_refresh_timestamp=None): + self._creation_timestamp = _auth_utils.validate_timestamp( + creation_timestamp, 'creation_timestamp') + self._last_sign_in_timestamp = _auth_utils.validate_timestamp( + last_sign_in_timestamp, 'last_sign_in_timestamp') + self._last_refresh_timestamp = _auth_utils.validate_timestamp( + last_refresh_timestamp, 'last_refresh_timestamp') + + @property + def creation_timestamp(self): + """ Creation timestamp in milliseconds since the epoch. + + Returns: + integer: The user creation timestamp in milliseconds since the epoch. + """ + return self._creation_timestamp + + @property + def last_sign_in_timestamp(self): + """ Last sign in timestamp in milliseconds since the epoch. + + Returns: + integer: The last sign in timestamp in milliseconds since the epoch. + """ + return self._last_sign_in_timestamp + + @property + def last_refresh_timestamp(self): + """The time at which the user was last active (ID token refreshed). + + Returns: + integer: Milliseconds since epoch timestamp, or `None` if the user was + never active. + """ + return self._last_refresh_timestamp + + +class UserInfo: + """A collection of standard profile information for a user. + + Used to expose profile information returned by an identity provider. + """ + + @property + def uid(self): + """Returns the user ID of this user.""" + raise NotImplementedError + + @property + def display_name(self): + """Returns the display name of this user.""" + raise NotImplementedError + + @property + def email(self): + """Returns the email address associated with this user.""" + raise NotImplementedError + + @property + def phone_number(self): + """Returns the phone number associated with this user.""" + raise NotImplementedError + + @property + def photo_url(self): + """Returns the photo URL of this user.""" + raise NotImplementedError + + @property + def provider_id(self): + """Returns the ID of the identity provider. + + This can be a short domain name (e.g. google.com), or the identity of an OpenID + identity provider. + """ + raise NotImplementedError + + +class UserRecord(UserInfo): + """Contains metadata associated with a Firebase user account.""" + + def __init__(self, data): + super().__init__() + if not isinstance(data, dict): + raise ValueError(f'Invalid data argument: {data}. Must be a dictionary.') + if not data.get('localId'): + raise ValueError('User ID must not be None or empty.') + self._data = data + + @property + def uid(self): + """Returns the user ID of this user. + + Returns: + string: A user ID string. This value is never None or empty. + """ + return self._data.get('localId') + + @property + def display_name(self): + """Returns the display name of this user. + + Returns: + string: A display name string or None. + """ + return self._data.get('displayName') + + @property + def email(self): + """Returns the email address associated with this user. + + Returns: + string: An email address string or None. + """ + return self._data.get('email') + + @property + def phone_number(self): + """Returns the phone number associated with this user. + + Returns: + string: A phone number string or None. + """ + return self._data.get('phoneNumber') + + @property + def photo_url(self): + """Returns the photo URL of this user. + + Returns: + string: A URL string or None. + """ + return self._data.get('photoUrl') + + @property + def provider_id(self): + """Returns the provider ID of this user. + + Returns: + string: A constant provider ID value. + """ + return 'firebase' + + @property + def email_verified(self): + """Returns whether the email address of this user has been verified. + + Returns: + bool: True if the email has been verified, and False otherwise. + """ + return bool(self._data.get('emailVerified')) + + @property + def disabled(self): + """Returns whether this user account is disabled. + + Returns: + bool: True if the user account is disabled, and False otherwise. + """ + return bool(self._data.get('disabled')) + + @property + def tokens_valid_after_timestamp(self): + """Returns the time, in milliseconds since the epoch, before which tokens are invalid. + + Note: this is truncated to 1 second accuracy. + + Returns: + int: Timestamp in milliseconds since the epoch, truncated to the second. + All tokens issued before that time are considered revoked. + """ + valid_since = self._data.get('validSince') + if valid_since is not None: + return 1000 * int(valid_since) + return 0 + + @property + def user_metadata(self): + """Returns additional metadata associated with this user. + + Returns: + UserMetadata: A UserMetadata instance. Does not return None. + """ + def _int_or_none(key): + if key in self._data: + return int(self._data[key]) + return None + last_refresh_at_millis = None + last_refresh_at_rfc3339 = self._data.get('lastRefreshAt', None) + if last_refresh_at_rfc3339: + last_refresh_at_millis = int(_rfc3339.parse_to_epoch(last_refresh_at_rfc3339) * 1000) + return UserMetadata( + _int_or_none('createdAt'), _int_or_none('lastLoginAt'), last_refresh_at_millis) + + @property + def provider_data(self): + """Returns a list of UserInfo instances. + + Each object represents an identity from an identity provider that is linked to this user. + + Returns: + list: A list of UserInfo objects, which may be empty. + """ + providers = self._data.get('providerUserInfo', []) + return [ProviderUserInfo(entry) for entry in providers] + + @property + def custom_claims(self): + """Returns any custom claims set on this user account. + + Returns: + dict: A dictionary of claims or None. + """ + claims = self._data.get('customAttributes') + if claims: + parsed = json.loads(claims) + if parsed != {}: + return parsed + return None + + @property + def tenant_id(self): + """Returns the tenant ID of this user. + + Returns: + string: A tenant ID string or None. + """ + return self._data.get('tenantId') + + +class ExportedUserRecord(UserRecord): + """Contains metadata associated with a user including password hash and salt.""" + + @property + def password_hash(self): + """The user's password hash as a base64-encoded string. + + If the Firebase Auth hashing algorithm (SCRYPT) was used to create the user account, this + is the base64-encoded password hash of the user. If a different hashing algorithm was + used to create this user, as is typical when migrating from another Auth system, this + is an empty string. If no password is set, or if the service account doesn't have permission + to read the password, then this is ``None``. + """ + password_hash = self._data.get('passwordHash') + + # If the password hash is redacted (probably due to missing permissions) then clear it out, + # similar to how the salt is returned. (Otherwise, it *looks* like a b64-encoded hash is + # present, which is confusing.) + if password_hash == B64_REDACTED: + return None + return password_hash + + @property + def password_salt(self): + """The user's password salt as a base64-encoded string. + + If the Firebase Auth hashing algorithm (SCRYPT) was used to create the user account, this + is the base64-encoded password salt of the user. If a different hashing algorithm was + used to create this user, as is typical when migrating from another Auth system, this is + an empty string. If no password is set, or if the service account doesn't have permission to + read the password, then this is ``None``. + """ + return self._data.get('salt') + + +class GetUsersResult: + """Represents the result of the ``auth.get_users()`` API.""" + + def __init__(self, users, not_found): + """Constructs a `GetUsersResult` object. + + Args: + users: List of `UserRecord` instances. + not_found: List of `UserIdentifier` instances. + """ + self._users = users + self._not_found = not_found + + @property + def users(self): + """Set of `UserRecord` instances, corresponding to the set of users + that were requested. Only users that were found are listed here. The + result set is unordered. + """ + return self._users + + @property + def not_found(self): + """Set of `UserIdentifier` instances that were requested, but not + found. + """ + return self._not_found + + +class ListUsersPage: + """Represents a page of user records exported from a Firebase project. + + Provides methods for traversing the user accounts included in this page, as well as retrieving + subsequent pages of users. The iterator returned by ``iterate_all()`` can be used to iterate + through all users in the Firebase project starting from this page. + """ + + def __init__(self, download, page_token, max_results): + self._download = download + self._max_results = max_results + self._current = download(page_token, max_results) + + @property + def users(self): + """A list of ``ExportedUserRecord`` instances available in this page.""" + return [ExportedUserRecord(user) for user in self._current.get('users', [])] + + @property + def next_page_token(self): + """Page token string for the next page (empty string indicates no more pages).""" + return self._current.get('nextPageToken', '') + + @property + def has_next_page(self): + """A boolean indicating whether more pages are available.""" + return bool(self.next_page_token) + + def get_next_page(self): + """Retrieves the next page of user accounts, if available. + + Returns: + ListUsersPage: Next page of users, or None if this is the last page. + """ + if self.has_next_page: + return ListUsersPage(self._download, self.next_page_token, self._max_results) + return None + + def iterate_all(self): + """Retrieves an iterator for user accounts. + + Returned iterator will iterate through all the user accounts in the Firebase project + starting from this page. The iterator will never buffer more than one page of users + in memory at a time. + + Returns: + iterator: An iterator of ExportedUserRecord instances. + """ + return _UserIterator(self) + + +class DeleteUsersResult: + """Represents the result of the ``auth.delete_users()`` API.""" + + def __init__(self, result, total): + """Constructs a `DeleteUsersResult` object. + + Args: + result: The proto response, wrapped in a + `BatchDeleteAccountsResponse` instance. + total: Total integer number of deletion attempts. + """ + errors = result.errors + self._success_count = total - len(errors) + self._failure_count = len(errors) + self._errors = errors + + @property + def success_count(self): + """Returns the number of users that were deleted successfully (possibly + zero). + + Users that did not exist prior to calling `delete_users()` are + considered to be successfully deleted. + """ + return self._success_count + + @property + def failure_count(self): + """Returns the number of users that failed to be deleted (possibly + zero). + """ + return self._failure_count + + @property + def errors(self): + """A list of `auth.ErrorInfo` instances describing the errors that + were encountered during the deletion. Length of this list is equal to + `failure_count`. + """ + return self._errors + + +class BatchDeleteAccountsResponse: + """Represents the results of a `delete_users()` call.""" + + def __init__(self, errors=None): + """Constructs a `BatchDeleteAccountsResponse` instance, corresponding to + the JSON representing the `BatchDeleteAccountsResponse` proto. + + Args: + errors: List of dictionaries, with each dictionary representing an + `ErrorInfo` instance as returned by the server. `None` implies + an empty list. + """ + self.errors = [ErrorInfo(err) for err in errors] if errors else [] + + +class ProviderUserInfo(UserInfo): + """Contains metadata regarding how a user is known by a particular identity provider.""" + + def __init__(self, data): + super().__init__() + if not isinstance(data, dict): + raise ValueError(f'Invalid data argument: {data}. Must be a dictionary.') + if not data.get('rawId'): + raise ValueError('User ID must not be None or empty.') + self._data = data + + @property + def uid(self): + return self._data.get('rawId') + + @property + def display_name(self): + return self._data.get('displayName') + + @property + def email(self): + return self._data.get('email') + + @property + def phone_number(self): + return self._data.get('phoneNumber') + + @property + def photo_url(self): + return self._data.get('photoUrl') + + @property + def provider_id(self): + return self._data.get('providerId') + + +class ActionCodeSettings: + """Contains required continue/state URL with optional Android and iOS settings. + Used when invoking the email action link generation APIs. + """ + + def __init__( + self, + url: str, + handle_code_in_app: Optional[bool] = None, + dynamic_link_domain: Optional[str] = None, + ios_bundle_id: Optional[str] = None, + android_package_name: Optional[str] = None, + android_install_app: Optional[str] = None, + android_minimum_version: Optional[str] = None, + link_domain: Optional[str] = None, + ): + if dynamic_link_domain is not None: + warnings.warn( + 'dynamic_link_domain is deprecated, use link_domain instead', + DeprecationWarning + ) + self.url = url + self.handle_code_in_app = handle_code_in_app + self.dynamic_link_domain = dynamic_link_domain + self.ios_bundle_id = ios_bundle_id + self.android_package_name = android_package_name + self.android_install_app = android_install_app + self.android_minimum_version = android_minimum_version + self.link_domain = link_domain + + +def encode_action_code_settings(settings): + """ Validates the provided action code settings for email link generation and + populates the REST api parameters. + + settings - ``ActionCodeSettings`` object provided to be encoded + returns - dict of parameters to be passed for link gereration. + """ + + parameters = {} + # url + if not settings.url: + raise ValueError("Dynamic action links url is mandatory") + + try: + parsed = parse.urlparse(settings.url) + if not parsed.netloc: + raise ValueError(f'Malformed dynamic action links url: "{settings.url}".') + parameters['continueUrl'] = settings.url + except Exception as err: + raise ValueError(f'Malformed dynamic action links url: "{settings.url}".') from err + + # handle_code_in_app + if settings.handle_code_in_app is not None: + if not isinstance(settings.handle_code_in_app, bool): + raise ValueError( + f'Invalid value provided for handle_code_in_app: {settings.handle_code_in_app}') + parameters['canHandleCodeInApp'] = settings.handle_code_in_app + + # dynamic_link_domain + if settings.dynamic_link_domain is not None: + if not isinstance(settings.dynamic_link_domain, str): + raise ValueError( + f'Invalid value provided for dynamic_link_domain: {settings.dynamic_link_domain}') + parameters['dynamicLinkDomain'] = settings.dynamic_link_domain + + # link_domain + if settings.link_domain is not None: + if not isinstance(settings.link_domain, str): + raise ValueError( + f'Invalid value provided for link_domain: {settings.link_domain}') + parameters['linkDomain'] = settings.link_domain + + # ios_bundle_id + if settings.ios_bundle_id is not None: + if not isinstance(settings.ios_bundle_id, str): + raise ValueError( + f'Invalid value provided for ios_bundle_id: {settings.ios_bundle_id}') + parameters['iOSBundleId'] = settings.ios_bundle_id + + # android_* attributes + if (settings.android_minimum_version or settings.android_install_app) \ + and not settings.android_package_name: + raise ValueError("Android package name is required when specifying other Android settings") + + if settings.android_package_name is not None: + if not isinstance(settings.android_package_name, str): + raise ValueError( + f'Invalid value provided for android_package_name: {settings.android_package_name}') + parameters['androidPackageName'] = settings.android_package_name + + if settings.android_minimum_version is not None: + if not isinstance(settings.android_minimum_version, str): + raise ValueError( + 'Invalid value provided for android_minimum_version: ' + f'{settings.android_minimum_version}') + parameters['androidMinimumVersion'] = settings.android_minimum_version + + if settings.android_install_app is not None: + if not isinstance(settings.android_install_app, bool): + raise ValueError( + f'Invalid value provided for android_install_app: {settings.android_install_app}') + parameters['androidInstallApp'] = settings.android_install_app + + return parameters + + +class UserManager: + """Provides methods for interacting with the Google Identity Toolkit.""" + + ID_TOOLKIT_URL = 'https://identitytoolkit.googleapis.com/v1' + + def __init__(self, http_client, project_id, tenant_id=None, url_override=None): + self.http_client = http_client + url_prefix = url_override or self.ID_TOOLKIT_URL + self.base_url = f'{url_prefix}/projects/{project_id}' + if tenant_id: + self.base_url += f'/tenants/{tenant_id}' + + def get_user(self, **kwargs): + """Gets the user data corresponding to the provided key.""" + if 'uid' in kwargs: + key, key_type = kwargs.pop('uid'), 'user ID' + payload = {'localId' : [_auth_utils.validate_uid(key, required=True)]} + elif 'email' in kwargs: + key, key_type = kwargs.pop('email'), 'email' + payload = {'email' : [_auth_utils.validate_email(key, required=True)]} + elif 'phone_number' in kwargs: + key, key_type = kwargs.pop('phone_number'), 'phone number' + payload = {'phoneNumber' : [_auth_utils.validate_phone(key, required=True)]} + else: + raise TypeError(f'Unsupported keyword arguments: {kwargs}.') + + body, http_resp = self._make_request('post', '/accounts:lookup', json=payload) + if not body or not body.get('users'): + raise _auth_utils.UserNotFoundError( + f'No user record found for the provided {key_type}: {key}.', + http_response=http_resp) + return body['users'][0] + + def get_users(self, identifiers): + """Looks up multiple users by their identifiers (uid, email, etc.) + + Args: + identifiers: UserIdentifier[]: The identifiers indicating the user + to be looked up. Must have <= 100 entries. + + Returns: + list[dict[string, string]]: List of dicts representing the JSON + `UserInfo` responses from the server. + + Raises: + ValueError: If any of the identifiers are invalid or if more than + 100 identifiers are specified. + UnexpectedResponseError: If the backend server responds with an + unexpected message. + """ + if not identifiers: + return [] + if len(identifiers) > 100: + raise ValueError('`identifiers` parameter must have <= 100 entries.') + + payload = defaultdict(list) + for identifier in identifiers: + if isinstance(identifier, _user_identifier.UidIdentifier): + payload['localId'].append(identifier.uid) + elif isinstance(identifier, _user_identifier.EmailIdentifier): + payload['email'].append(identifier.email) + elif isinstance(identifier, _user_identifier.PhoneIdentifier): + payload['phoneNumber'].append(identifier.phone_number) + elif isinstance(identifier, _user_identifier.ProviderIdentifier): + payload['federatedUserId'].append({ + 'providerId': identifier.provider_id, + 'rawId': identifier.provider_uid + }) + else: + raise ValueError( + f'Invalid entry in "identifiers" list. Unsupported type: {type(identifier)}') + + body, http_resp = self._make_request( + 'post', '/accounts:lookup', json=payload) + if not http_resp.ok: + raise _auth_utils.UnexpectedResponseError( + 'Failed to get users.', http_response=http_resp) + return body.get('users', []) + + def list_users(self, page_token=None, max_results=MAX_LIST_USERS_RESULTS): + """Retrieves a batch of users.""" + if page_token is not None: + if not isinstance(page_token, str) or not page_token: + raise ValueError('Page token must be a non-empty string.') + if not isinstance(max_results, int): + raise ValueError('Max results must be an integer.') + if max_results < 1 or max_results > MAX_LIST_USERS_RESULTS: + raise ValueError( + f'Max results must be a positive integer less than {MAX_LIST_USERS_RESULTS}.') + + payload = {'maxResults': max_results} + if page_token: + payload['nextPageToken'] = page_token + body, _ = self._make_request('get', '/accounts:batchGet', params=payload) + return body + + def create_user(self, uid=None, display_name=None, email=None, phone_number=None, + photo_url=None, password=None, disabled=None, email_verified=None): + """Creates a new user account with the specified properties.""" + payload = { + 'localId': _auth_utils.validate_uid(uid), + 'displayName': _auth_utils.validate_display_name(display_name), + 'email': _auth_utils.validate_email(email), + 'phoneNumber': _auth_utils.validate_phone(phone_number), + 'photoUrl': _auth_utils.validate_photo_url(photo_url), + 'password': _auth_utils.validate_password(password), + 'emailVerified': bool(email_verified) if email_verified is not None else None, + 'disabled': bool(disabled) if disabled is not None else None, + } + payload = {k: v for k, v in payload.items() if v is not None} + body, http_resp = self._make_request('post', '/accounts', json=payload) + if not body or not body.get('localId'): + raise _auth_utils.UnexpectedResponseError( + 'Failed to create new user.', http_response=http_resp) + return body.get('localId') + + def update_user(self, uid, display_name=None, email=None, phone_number=None, + photo_url=None, password=None, disabled=None, email_verified=None, + valid_since=None, custom_claims=None, providers_to_delete=None): + """Updates an existing user account with the specified properties""" + payload = { + 'localId': _auth_utils.validate_uid(uid, required=True), + 'email': _auth_utils.validate_email(email), + 'password': _auth_utils.validate_password(password), + 'validSince': _auth_utils.validate_timestamp(valid_since, 'valid_since'), + 'emailVerified': bool(email_verified) if email_verified is not None else None, + 'disableUser': bool(disabled) if disabled is not None else None, + } + + remove = [] + remove_provider = _auth_utils.validate_provider_ids(providers_to_delete) + if display_name is not None: + if display_name is DELETE_ATTRIBUTE: + remove.append('DISPLAY_NAME') + else: + payload['displayName'] = _auth_utils.validate_display_name(display_name) + if photo_url is not None: + if photo_url is DELETE_ATTRIBUTE: + remove.append('PHOTO_URL') + else: + payload['photoUrl'] = _auth_utils.validate_photo_url(photo_url) + if remove: + payload['deleteAttribute'] = remove + + if phone_number is not None: + if phone_number is DELETE_ATTRIBUTE: + remove_provider.append('phone') + else: + payload['phoneNumber'] = _auth_utils.validate_phone(phone_number) + + if custom_claims is not None: + if custom_claims is DELETE_ATTRIBUTE: + custom_claims = {} + json_claims = json.dumps(custom_claims) if isinstance( + custom_claims, dict) else custom_claims + payload['customAttributes'] = _auth_utils.validate_custom_claims(json_claims) + + if remove_provider: + payload['deleteProvider'] = list(set(remove_provider)) + + payload = {k: v for k, v in payload.items() if v is not None} + body, http_resp = self._make_request('post', '/accounts:update', json=payload) + if not body or not body.get('localId'): + raise _auth_utils.UnexpectedResponseError( + f'Failed to update user: {uid}.', http_response=http_resp) + return body.get('localId') + + def delete_user(self, uid): + """Deletes the user identified by the specified user ID.""" + _auth_utils.validate_uid(uid, required=True) + body, http_resp = self._make_request('post', '/accounts:delete', json={'localId' : uid}) + if not body or not body.get('kind'): + raise _auth_utils.UnexpectedResponseError( + f'Failed to delete user: {uid}.', http_response=http_resp) + + def delete_users(self, uids, force_delete=False): + """Deletes the users identified by the specified user ids. + + Args: + uids: A list of strings indicating the uids of the users to be deleted. + Must have <= 1000 entries. + force_delete: Optional parameter that indicates if users should be + deleted, even if they're not disabled. Defaults to False. + + + Returns: + BatchDeleteAccountsResponse: Server's proto response, wrapped in a + python object. + + Raises: + ValueError: If any of the identifiers are invalid or if more than 1000 + identifiers are specified. + UnexpectedResponseError: If the backend server responds with an + unexpected message. + """ + if not uids: + return BatchDeleteAccountsResponse() + + if len(uids) > 1000: + raise ValueError("`uids` paramter must have <= 1000 entries.") + for uid in uids: + _auth_utils.validate_uid(uid, required=True) + + body, http_resp = self._make_request('post', '/accounts:batchDelete', + json={'localIds': uids, 'force': force_delete}) + if not isinstance(body, dict): + raise _auth_utils.UnexpectedResponseError( + 'Unexpected response from server while attempting to delete users.', + http_response=http_resp) + return BatchDeleteAccountsResponse(body.get('errors', [])) + + def import_users(self, users, hash_alg=None): + """Imports the given list of users to Firebase Auth.""" + try: + if not users or len(users) > MAX_IMPORT_USERS_SIZE: + raise ValueError( + 'Users must be a non-empty list with no more than ' + f'{MAX_IMPORT_USERS_SIZE} elements.') + if any(not isinstance(u, _user_import.ImportUserRecord) for u in users): + raise ValueError('One or more user objects are invalid.') + except TypeError as err: + raise ValueError('users must be iterable') from err + + payload = {'users': [u.to_dict() for u in users]} + if any('passwordHash' in u for u in payload['users']): + if not isinstance(hash_alg, _user_import.UserImportHash): + raise ValueError('A UserImportHash is required to import users with passwords.') + payload.update(hash_alg.to_dict()) + body, http_resp = self._make_request('post', '/accounts:batchCreate', json=payload) + if not isinstance(body, dict): + raise _auth_utils.UnexpectedResponseError( + 'Failed to import users.', http_response=http_resp) + return body + + def generate_email_action_link(self, action_type, email, action_code_settings=None): + """Fetches the email action links for types + + Args: + action_type: String. Valid values ['VERIFY_EMAIL', 'EMAIL_SIGNIN', 'PASSWORD_RESET'] + email: Email of the user for which the action is performed + action_code_settings: ``ActionCodeSettings`` object or dict (optional). Defines whether + the link is to be handled by a mobile app and the additional state information to be + passed in the deep link, etc. + Returns: + link_url: action url to be emailed to the user + + Raises: + UnexpectedResponseError: If the backend server responds with an unexpected message + FirebaseError: If an error occurs while generating the link + ValueError: If the provided arguments are invalid + """ + payload = { + 'requestType': _auth_utils.validate_action_type(action_type), + 'email': _auth_utils.validate_email(email), + 'returnOobLink': True + } + + if action_code_settings: + payload.update(encode_action_code_settings(action_code_settings)) + + body, http_resp = self._make_request('post', '/accounts:sendOobCode', json=payload) + if not body or not body.get('oobLink'): + raise _auth_utils.UnexpectedResponseError( + 'Failed to generate email action link.', http_response=http_resp) + return body.get('oobLink') + + def _make_request(self, method, path, **kwargs): + url = f'{self.base_url}{path}' + try: + return self.http_client.body_and_response(method, url, **kwargs) + except requests.exceptions.RequestException as error: + raise _auth_utils.handle_auth_backend_error(error) + + +class _UserIterator(_auth_utils.PageIterator): + + @property + def items(self): + return self._current_page.users diff --git a/firebase_admin/_utils.py b/firebase_admin/_utils.py new file mode 100644 index 000000000..0277b9e5f --- /dev/null +++ b/firebase_admin/_utils.py @@ -0,0 +1,347 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Internal utilities common to all modules.""" + +import json +from platform import python_version +from typing import Callable, Optional + +import google.auth +import requests +import httpx + +import firebase_admin +from firebase_admin import exceptions + + +_ERROR_CODE_TO_EXCEPTION_TYPE = { + exceptions.INVALID_ARGUMENT: exceptions.InvalidArgumentError, + exceptions.FAILED_PRECONDITION: exceptions.FailedPreconditionError, + exceptions.OUT_OF_RANGE: exceptions.OutOfRangeError, + exceptions.UNAUTHENTICATED: exceptions.UnauthenticatedError, + exceptions.PERMISSION_DENIED: exceptions.PermissionDeniedError, + exceptions.NOT_FOUND: exceptions.NotFoundError, + exceptions.ABORTED: exceptions.AbortedError, + exceptions.ALREADY_EXISTS: exceptions.AlreadyExistsError, + exceptions.CONFLICT: exceptions.ConflictError, + exceptions.RESOURCE_EXHAUSTED: exceptions.ResourceExhaustedError, + exceptions.CANCELLED: exceptions.CancelledError, + exceptions.DATA_LOSS: exceptions.DataLossError, + exceptions.UNKNOWN: exceptions.UnknownError, + exceptions.INTERNAL: exceptions.InternalError, + exceptions.UNAVAILABLE: exceptions.UnavailableError, + exceptions.DEADLINE_EXCEEDED: exceptions.DeadlineExceededError, +} + + +_HTTP_STATUS_TO_ERROR_CODE = { + 400: exceptions.INVALID_ARGUMENT, + 401: exceptions.UNAUTHENTICATED, + 403: exceptions.PERMISSION_DENIED, + 404: exceptions.NOT_FOUND, + 409: exceptions.CONFLICT, + 412: exceptions.FAILED_PRECONDITION, + 429: exceptions.RESOURCE_EXHAUSTED, + 500: exceptions.INTERNAL, + 503: exceptions.UNAVAILABLE, +} + + +# See https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto +_RPC_CODE_TO_ERROR_CODE = { + 1: exceptions.CANCELLED, + 2: exceptions.UNKNOWN, + 3: exceptions.INVALID_ARGUMENT, + 4: exceptions.DEADLINE_EXCEEDED, + 5: exceptions.NOT_FOUND, + 6: exceptions.ALREADY_EXISTS, + 7: exceptions.PERMISSION_DENIED, + 8: exceptions.RESOURCE_EXHAUSTED, + 9: exceptions.FAILED_PRECONDITION, + 10: exceptions.ABORTED, + 11: exceptions.OUT_OF_RANGE, + 13: exceptions.INTERNAL, + 14: exceptions.UNAVAILABLE, + 15: exceptions.DATA_LOSS, + 16: exceptions.UNAUTHENTICATED, +} + +def get_metrics_header(): + return f'gl-python/{python_version()} fire-admin/{firebase_admin.__version__}' + +def _get_initialized_app(app): + """Returns a reference to an initialized App instance.""" + if app is None: + return firebase_admin.get_app() + + if isinstance(app, firebase_admin.App): + initialized_app = firebase_admin.get_app(app.name) + if app is not initialized_app: + raise ValueError('Illegal app argument. App instance not ' + 'initialized via the firebase module.') + return app + + raise ValueError( + 'Illegal app argument. Argument must be of type firebase_admin.App, but given ' + f'"{type(app)}".') + + + +def get_app_service(app, name, initializer): + app = _get_initialized_app(app) + return app._get_service(name, initializer) # pylint: disable=protected-access + + +def handle_platform_error_from_requests(error, handle_func=None): + """Constructs a ``FirebaseError`` from the given requests error. + + This can be used to handle errors returned by Google Cloud Platform (GCP) APIs. + + Args: + error: An error raised by the requests module while making an HTTP call to a GCP API. + handle_func: A function that can be used to handle platform errors in a custom way. When + specified, this function will be called with three arguments. It has the same + signature as ```_handle_func_requests``, but may return ``None``. + + Returns: + FirebaseError: A ``FirebaseError`` that can be raised to the user code. + """ + if error.response is None: + return handle_requests_error(error) + + response = error.response + content = response.content.decode() + status_code = response.status_code + error_dict, message = _parse_platform_error(content, status_code) + exc = None + if handle_func: + exc = handle_func(error, message, error_dict) + + return exc if exc else _handle_func_requests(error, message, error_dict) + +def handle_platform_error_from_httpx( + error: httpx.HTTPError, + handle_func: Optional[Callable[..., Optional[exceptions.FirebaseError]]] = None +) -> exceptions.FirebaseError: + """Constructs a ``FirebaseError`` from the given httpx error. + + This can be used to handle errors returned by Google Cloud Platform (GCP) APIs. + + Args: + error: An error raised by the httpx module while making an HTTP call to a GCP API. + handle_func: A function that can be used to handle platform errors in a custom way. When + specified, this function will be called with three arguments. It has the same + signature as ```_handle_func_httpx``, but may return ``None``. + + Returns: + FirebaseError: A ``FirebaseError`` that can be raised to the user code. + """ + + if isinstance(error, httpx.HTTPStatusError): + response = error.response + content = response.content.decode() + status_code = response.status_code + error_dict, message = _parse_platform_error(content, status_code) + exc = None + if handle_func: + exc = handle_func(error, message, error_dict) + + return exc if exc else _handle_func_httpx(error, message, error_dict) + return handle_httpx_error(error) + + +def handle_operation_error(error): + """Constructs a ``FirebaseError`` from the given operation error. + + Args: + error: An error returned by a long running operation. + + Returns: + FirebaseError: A ``FirebaseError`` that can be raised to the user code. + """ + if not isinstance(error, dict): + return exceptions.UnknownError( + message=f'Unknown error while making a remote service call: {error}', + cause=error) + + rpc_code = error.get('code') + message = error.get('message') + error_code = _rpc_code_to_error_code(rpc_code) + err_type = _error_code_to_exception_type(error_code) + return err_type(message=message) + + +def _handle_func_requests(error, message, error_dict): + """Constructs a ``FirebaseError`` from the given GCP error. + + Args: + error: An error raised by the requests module while making an HTTP call. + message: A message to be included in the resulting ``FirebaseError``. + error_dict: Parsed GCP error response. + + Returns: + FirebaseError: A ``FirebaseError`` that can be raised to the user code or None. + """ + code = error_dict.get('status') + return handle_requests_error(error, message, code) + + +def handle_requests_error(error, message=None, code=None): + """Constructs a ``FirebaseError`` from the given requests error. + + This method is agnostic of the remote service that produced the error, whether it is a GCP + service or otherwise. Therefore, this method does not attempt to parse the error response in + any way. + + Args: + error: An error raised by the requests module while making an HTTP call. + message: A message to be included in the resulting ``FirebaseError`` (optional). If not + specified the string representation of the ``error`` argument is used as the message. + code: A GCP error code that will be used to determine the resulting error type (optional). + If not specified the HTTP status code on the error response is used to determine a + suitable error code. + + Returns: + FirebaseError: A ``FirebaseError`` that can be raised to the user code. + """ + if isinstance(error, requests.exceptions.Timeout): + return exceptions.DeadlineExceededError( + message=f'Timed out while making an API call: {error}', + cause=error) + if isinstance(error, requests.exceptions.ConnectionError): + return exceptions.UnavailableError( + message=f'Failed to establish a connection: {error}', + cause=error) + if error.response is None: + return exceptions.UnknownError( + message=f'Unknown error while making a remote service call: {error}', + cause=error) + + if not code: + code = _http_status_to_error_code(error.response.status_code) + if not message: + message = str(error) + + err_type = _error_code_to_exception_type(code) + return err_type(message=message, cause=error, http_response=error.response) + +def _handle_func_httpx(error: httpx.HTTPError, message, error_dict) -> exceptions.FirebaseError: + """Constructs a ``FirebaseError`` from the given GCP error. + + Args: + error: An error raised by the httpx module while making an HTTP call. + message: A message to be included in the resulting ``FirebaseError``. + error_dict: Parsed GCP error response. + + Returns: + FirebaseError: A ``FirebaseError`` that can be raised to the user code or None. + """ + code = error_dict.get('status') + return handle_httpx_error(error, message, code) + + +def handle_httpx_error(error: httpx.HTTPError, message=None, code=None) -> exceptions.FirebaseError: + """Constructs a ``FirebaseError`` from the given httpx error. + + This method is agnostic of the remote service that produced the error, whether it is a GCP + service or otherwise. Therefore, this method does not attempt to parse the error response in + any way. + + Args: + error: An error raised by the httpx module while making an HTTP call. + message: A message to be included in the resulting ``FirebaseError`` (optional). If not + specified the string representation of the ``error`` argument is used as the message. + code: A GCP error code that will be used to determine the resulting error type (optional). + If not specified the HTTP status code on the error response is used to determine a + suitable error code. + + Returns: + FirebaseError: A ``FirebaseError`` that can be raised to the user code. + """ + if isinstance(error, httpx.TimeoutException): + return exceptions.DeadlineExceededError( + message=f'Timed out while making an API call: {error}', + cause=error) + if isinstance(error, httpx.ConnectError): + return exceptions.UnavailableError( + message=f'Failed to establish a connection: {error}', + cause=error) + if isinstance(error, httpx.HTTPStatusError): + if not code: + code = _http_status_to_error_code(error.response.status_code) + if not message: + message = str(error) + + err_type = _error_code_to_exception_type(code) + return err_type(message=message, cause=error, http_response=error.response) + + return exceptions.UnknownError( + message=f'Unknown error while making a remote service call: {error}', + cause=error) + +def _http_status_to_error_code(status): + """Maps an HTTP status to a platform error code.""" + return _HTTP_STATUS_TO_ERROR_CODE.get(status, exceptions.UNKNOWN) + +def _rpc_code_to_error_code(rpc_code): + """Maps an RPC code to a platform error code.""" + return _RPC_CODE_TO_ERROR_CODE.get(rpc_code, exceptions.UNKNOWN) + +def _error_code_to_exception_type(code): + """Maps a platform error code to an exception type.""" + return _ERROR_CODE_TO_EXCEPTION_TYPE.get(code, exceptions.UnknownError) + + +def _parse_platform_error(content, status_code): + """Parses an HTTP error response from a Google Cloud Platform API and extracts the error code + and message fields. + + Args: + content: Decoded content of the response body. + status_code: HTTP status code. + + Returns: + tuple: A tuple containing error code and message. + """ + data = {} + try: + parsed_body = json.loads(content) + if isinstance(parsed_body, dict): + data = parsed_body + except ValueError: + pass + + error_dict = data.get('error', {}) + msg = error_dict.get('message') + if not msg: + msg = f'Unexpected HTTP response with status: {status_code}; body: {content}' + return error_dict, msg + + +# Temporarily disable the lint rule. For more information see: +# https://github.com/googleapis/google-auth-library-python/pull/561 +# pylint: disable=abstract-method +class EmulatorAdminCredentials(google.auth.credentials.Credentials): + """ Credentials for use with the firebase local emulator. + + This is used instead of user-supplied credentials or ADC. It will silently do nothing when + asked to refresh credentials. + """ + def __init__(self): + google.auth.credentials.Credentials.__init__(self) + self.token = 'owner' + + def refresh(self, request): + pass diff --git a/firebase_admin/app_check.py b/firebase_admin/app_check.py new file mode 100644 index 000000000..40d857f4e --- /dev/null +++ b/firebase_admin/app_check.py @@ -0,0 +1,161 @@ +# Copyright 2022 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Firebase App Check module.""" + +from typing import Any, Dict +import jwt +from jwt import PyJWKClient, ExpiredSignatureError, InvalidTokenError, DecodeError +from jwt import InvalidAudienceError, InvalidIssuerError, InvalidSignatureError +from firebase_admin import _utils + +_APP_CHECK_ATTRIBUTE = '_app_check' + +def _get_app_check_service(app) -> Any: + return _utils.get_app_service(app, _APP_CHECK_ATTRIBUTE, _AppCheckService) + +def verify_token(token: str, app=None) -> Dict[str, Any]: + """Verifies a Firebase App Check token. + + Args: + token: A token from App Check. + app: An App instance (optional). + + Returns: + Dict[str, Any]: The token's decoded claims. + + Raises: + ValueError: If the app's ``project_id`` is invalid or unspecified, + or if the token's headers or payload are invalid. + PyJWKClientError: If PyJWKClient fails to fetch a valid signing key. + """ + return _get_app_check_service(app).verify_token(token) + +class _AppCheckService: + """Service class that implements Firebase App Check functionality.""" + + _APP_CHECK_ISSUER = 'https://firebaseappcheck.googleapis.com/' + _JWKS_URL = 'https://firebaseappcheck.googleapis.com/v1/jwks' + _project_id = None + _scoped_project_id = None + _jwks_client = None + + _APP_CHECK_HEADERS = { + 'x-goog-api-client': _utils.get_metrics_header(), + } + + def __init__(self, app): + # Validate and store the project_id to validate the JWT claims + self._project_id = app.project_id + if not self._project_id: + raise ValueError( + 'A project ID must be specified to access the App Check ' + 'service. Either set the projectId option, use service ' + 'account credentials, or set the ' + 'GOOGLE_CLOUD_PROJECT environment variable.') + self._scoped_project_id = 'projects/' + app.project_id + # Default lifespan is 300 seconds (5 minutes) so we change it to 21600 seconds (6 hours). + self._jwks_client = PyJWKClient( + self._JWKS_URL, lifespan=21600, headers=self._APP_CHECK_HEADERS) + + + def verify_token(self, token: str) -> Dict[str, Any]: + """Verifies a Firebase App Check token.""" + _Validators.check_string("app check token", token) + + # Obtain the Firebase App Check Public Keys + # Note: It is not recommended to hard code these keys as they rotate, + # but you should cache them for up to 6 hours. + try: + signing_key = self._jwks_client.get_signing_key_from_jwt(token) + self._has_valid_token_headers(jwt.get_unverified_header(token)) + verified_claims = self._decode_and_verify(token, signing_key.key) + except (InvalidTokenError, DecodeError) as exception: + raise ValueError( + f'Verifying App Check token failed. Error: {exception}' + ) from exception + + verified_claims['app_id'] = verified_claims.get('sub') + return verified_claims + + def _has_valid_token_headers(self, headers: Any) -> None: + """Checks whether the token has valid headers for App Check.""" + # Ensure the token's header has type JWT + if headers.get('typ') != 'JWT': + raise ValueError("The provided App Check token has an incorrect type header") + # Ensure the token's header uses the algorithm RS256 + algorithm = headers.get('alg') + if algorithm != 'RS256': + raise ValueError( + 'The provided App Check token has an incorrect alg header. ' + f'Expected RS256 but got {algorithm}.' + ) + + def _decode_and_verify(self, token: str, signing_key: str): + """Decodes and verifies the token from App Check.""" + payload = {} + try: + payload = jwt.decode( + token, + signing_key, + algorithms=["RS256"], + audience=self._scoped_project_id + ) + except InvalidSignatureError as exception: + raise ValueError( + 'The provided App Check token has an invalid signature.' + ) from exception + except InvalidAudienceError as exception: + raise ValueError( + 'The provided App Check token has an incorrect "aud" (audience) claim. ' + f'Expected payload to include {self._scoped_project_id}.' + ) from exception + except InvalidIssuerError as exception: + raise ValueError( + 'The provided App Check token has an incorrect "iss" (issuer) claim. ' + f'Expected claim to include {self._APP_CHECK_ISSUER}' + ) from exception + except ExpiredSignatureError as exception: + raise ValueError( + 'The provided App Check token has expired.' + ) from exception + except InvalidTokenError as exception: + raise ValueError( + f'Decoding App Check token failed. Error: {exception}' + ) from exception + + audience = payload.get('aud') + if not isinstance(audience, list) or self._scoped_project_id not in audience: + raise ValueError('Firebase App Check token has incorrect "aud" (audience) claim.') + if not payload.get('iss').startswith(self._APP_CHECK_ISSUER): + raise ValueError('Token does not contain the correct "iss" (issuer).') + _Validators.check_string( + 'The provided App Check token "sub" (subject) claim', + payload.get('sub')) + + return payload + +class _Validators: + """A collection of data validation utilities. + + Methods provided in this class raise ``ValueErrors`` if any validations fail. + """ + + @classmethod + def check_string(cls, label: str, value: Any): + """Checks if the given value is a string.""" + if value is None: + raise ValueError(f'{label} "{value}" must be a non-empty string.') + if not isinstance(value, str): + raise ValueError(f'{label} "{value}" must be a string.') diff --git a/firebase_admin/auth.py b/firebase_admin/auth.py index cfba9c939..cb63ab7f0 100644 --- a/firebase_admin/auth.py +++ b/firebase_admin/auth.py @@ -14,285 +14,913 @@ """Firebase Authentication module. -This module contains helper methods and utilities for minting and verifying -JWTs used for authenticating against Firebase services. +This module contains functions for minting and verifying JWTs used for +authenticating against Firebase services. It also provides functions for +creating and managing user accounts in Firebase projects. """ -import os -import threading -import time +from firebase_admin import _auth_client +from firebase_admin import _auth_providers +from firebase_admin import _auth_utils +from firebase_admin import _user_identifier +from firebase_admin import _token_gen +from firebase_admin import _user_import +from firebase_admin import _user_mgt +from firebase_admin import _utils -from google.auth import jwt -from google.auth.transport import requests -import google.oauth2.id_token -import six - -import firebase_admin -from firebase_admin import credentials - -_auth_lock = threading.Lock() - -"""Provided for overriding during tests.""" -_request = requests.Request() _AUTH_ATTRIBUTE = '_auth' -GCLOUD_PROJECT_ENV_VAR = 'GCLOUD_PROJECT' - -def _get_initialized_app(app): - if app is None: - return firebase_admin.get_app() - elif isinstance(app, firebase_admin.App): - initialized_app = firebase_admin.get_app(app.name) - if app is not initialized_app: - raise ValueError('Illegal app argument. App instance not ' - 'initialized via the firebase module.') - return app - else: - raise ValueError('Illegal app argument. Argument must be of type ' - ' firebase_admin.App, but given "{0}".'.format(type(app))) - -def _get_token_generator(app): - """Returns a _TokenGenerator instance for an App. - - If the App already has a _TokenGenerator associated with it, simply returns - it. Otherwise creates a new _TokenGenerator, and adds it to the App before +__all__ = [ + 'ActionCodeSettings', + 'CertificateFetchError', + 'Client', + 'ConfigurationNotFoundError', + 'DELETE_ATTRIBUTE', + 'EmailAlreadyExistsError', + 'EmailNotFoundError', + 'ErrorInfo', + 'ExpiredIdTokenError', + 'ExpiredSessionCookieError', + 'ExportedUserRecord', + 'DeleteUsersResult', + 'GetUsersResult', + 'ImportUserRecord', + 'InsufficientPermissionError', + 'InvalidDynamicLinkDomainError', + 'InvalidHostingLinkDomainError', + 'InvalidIdTokenError', + 'InvalidSessionCookieError', + 'ListProviderConfigsPage', + 'ListUsersPage', + 'OIDCProviderConfig', + 'PhoneNumberAlreadyExistsError', + 'ProviderConfig', + 'ResetPasswordExceedLimitError', + 'RevokedIdTokenError', + 'RevokedSessionCookieError', + 'SAMLProviderConfig', + 'TokenSignError', + 'TooManyAttemptsTryLaterError', + 'UidAlreadyExistsError', + 'UnexpectedResponseError', + 'UserDisabledError', + 'UserImportHash', + 'UserImportResult', + 'UserInfo', + 'UserMetadata', + 'UserNotFoundError', + 'UserProvider', + 'UserRecord', + + 'UserIdentifier', + 'UidIdentifier', + 'EmailIdentifier', + 'PhoneIdentifier', + 'ProviderIdentifier', + + 'create_custom_token', + 'create_oidc_provider_config', + 'create_saml_provider_config', + 'create_session_cookie', + 'create_user', + 'delete_oidc_provider_config', + 'delete_saml_provider_config', + 'delete_user', + 'delete_users', + 'generate_email_verification_link', + 'generate_password_reset_link', + 'generate_sign_in_with_email_link', + 'get_oidc_provider_config', + 'get_saml_provider_config', + 'get_user', + 'get_user_by_email', + 'get_user_by_phone_number', + 'get_users', + 'import_users', + 'list_saml_provider_configs', + 'list_users', + 'revoke_refresh_tokens', + 'set_custom_user_claims', + 'update_oidc_provider_config', + 'update_saml_provider_config', + 'update_user', + 'verify_id_token', + 'verify_session_cookie', +] + +ActionCodeSettings = _user_mgt.ActionCodeSettings +CertificateFetchError = _token_gen.CertificateFetchError +Client = _auth_client.Client +ConfigurationNotFoundError = _auth_utils.ConfigurationNotFoundError +DELETE_ATTRIBUTE = _user_mgt.DELETE_ATTRIBUTE +DeleteUsersResult = _user_mgt.DeleteUsersResult +EmailAlreadyExistsError = _auth_utils.EmailAlreadyExistsError +EmailNotFoundError = _auth_utils.EmailNotFoundError +ErrorInfo = _user_import.ErrorInfo +ExpiredIdTokenError = _token_gen.ExpiredIdTokenError +ExpiredSessionCookieError = _token_gen.ExpiredSessionCookieError +ExportedUserRecord = _user_mgt.ExportedUserRecord +GetUsersResult = _user_mgt.GetUsersResult +ImportUserRecord = _user_import.ImportUserRecord +InsufficientPermissionError = _auth_utils.InsufficientPermissionError +InvalidDynamicLinkDomainError = _auth_utils.InvalidDynamicLinkDomainError +InvalidHostingLinkDomainError = _auth_utils.InvalidHostingLinkDomainError +InvalidIdTokenError = _auth_utils.InvalidIdTokenError +InvalidSessionCookieError = _token_gen.InvalidSessionCookieError +ListProviderConfigsPage = _auth_providers.ListProviderConfigsPage +ListUsersPage = _user_mgt.ListUsersPage +OIDCProviderConfig = _auth_providers.OIDCProviderConfig +PhoneNumberAlreadyExistsError = _auth_utils.PhoneNumberAlreadyExistsError +ProviderConfig = _auth_providers.ProviderConfig +ResetPasswordExceedLimitError = _auth_utils.ResetPasswordExceedLimitError +RevokedIdTokenError = _token_gen.RevokedIdTokenError +RevokedSessionCookieError = _token_gen.RevokedSessionCookieError +SAMLProviderConfig = _auth_providers.SAMLProviderConfig +TokenSignError = _token_gen.TokenSignError +TooManyAttemptsTryLaterError = _auth_utils.TooManyAttemptsTryLaterError +UidAlreadyExistsError = _auth_utils.UidAlreadyExistsError +UnexpectedResponseError = _auth_utils.UnexpectedResponseError +UserDisabledError = _auth_utils.UserDisabledError +UserImportHash = _user_import.UserImportHash +UserImportResult = _user_import.UserImportResult +UserInfo = _user_mgt.UserInfo +UserMetadata = _user_mgt.UserMetadata +UserNotFoundError = _auth_utils.UserNotFoundError +UserProvider = _user_import.UserProvider +UserRecord = _user_mgt.UserRecord + +UserIdentifier = _user_identifier.UserIdentifier +UidIdentifier = _user_identifier.UidIdentifier +EmailIdentifier = _user_identifier.EmailIdentifier +PhoneIdentifier = _user_identifier.PhoneIdentifier +ProviderIdentifier = _user_identifier.ProviderIdentifier + + +def _get_client(app): + """Returns a client instance for an App. + + If the App already has a client associated with it, simply returns + it. Otherwise creates a new client, and adds it to the App before returning it. Args: - app: A Firebase App instance (or None to use the default App). + app: A Firebase App instance (or ``None`` to use the default App). Returns: - _TokenGenerator: A _TokenGenerator for the specified App instance. + Client: A client for the specified App instance. Raises: - ValueError: If the app argument is invalid. + ValueError: If the app argument is invalid. """ - app = _get_initialized_app(app) - with _auth_lock: - if not hasattr(app, _AUTH_ATTRIBUTE): - setattr(app, _AUTH_ATTRIBUTE, _TokenGenerator(app)) - return getattr(app, _AUTH_ATTRIBUTE) + return _utils.get_app_service(app, _AUTH_ATTRIBUTE, Client) def create_custom_token(uid, developer_claims=None, app=None): """Builds and signs a Firebase custom auth token. Args: - uid: ID of the user for whom the token is created. - developer_claims: A dictionary of claims to be included in the token - (optional). - app: An App instance (optional). + uid: ID of the user for whom the token is created. + developer_claims: A dictionary of claims to be included in the token + (optional). + app: An App instance (optional). Returns: - string: A token minted from the input parameters. + bytes: A token minted from the input parameters. Raises: - ValueError: If input parameters are invalid. + ValueError: If input parameters are invalid. + TokenSignError: If an error occurs while signing the token using the remote IAM service. """ - token_generator = _get_token_generator(app) - return token_generator.create_custom_token(uid, developer_claims) + client = _get_client(app) + return client.create_custom_token(uid, developer_claims) -def verify_id_token(id_token, app=None): +def verify_id_token(id_token, app=None, check_revoked=False, clock_skew_seconds=0): """Verifies the signature and data for the provided JWT. Accepts a signed token string, verifies that it is current, and issued to this project, and that it was correctly signed by Google. Args: - id_token: A string of the encoded JWT. - app: An App instance (optional). + id_token: A string of the encoded JWT. + app: An App instance (optional). + check_revoked: Boolean, If true, checks whether the token has been revoked or + the user disabled (optional). + clock_skew_seconds: The number of seconds to tolerate when checking the token. + Must be between 0-60. Defaults to 0. + Returns: + dict: A dictionary of key-value pairs parsed from the decoded JWT. + + Raises: + ValueError: If ``id_token`` is a not a string or is empty. + InvalidIdTokenError: If ``id_token`` is not a valid Firebase ID token. + ExpiredIdTokenError: If the specified ID token has expired. + RevokedIdTokenError: If ``check_revoked`` is ``True`` and the ID token has been revoked. + CertificateFetchError: If an error occurs while fetching the public key certificates + required to verify the ID token. + UserDisabledError: If ``check_revoked`` is ``True`` and the corresponding user + record is disabled. + """ + client = _get_client(app) + return client.verify_id_token( + id_token, check_revoked=check_revoked, clock_skew_seconds=clock_skew_seconds) + + +def create_session_cookie(id_token, expires_in, app=None): + """Creates a new Firebase session cookie from the given ID token and options. + + The returned JWT can be set as a server-side session cookie with a custom cookie policy. + + Args: + id_token: The Firebase ID token to exchange for a session cookie. + expires_in: Duration until the cookie is expired. This can be specified + as a numeric seconds value or a ``datetime.timedelta`` instance. + app: An App instance (optional). + + Returns: + bytes: A session cookie generated from the input parameters. + + Raises: + ValueError: If input parameters are invalid. + FirebaseError: If an error occurs while creating the cookie. + """ + client = _get_client(app) + # pylint: disable=protected-access + return client._token_generator.create_session_cookie(id_token, expires_in) + + +def verify_session_cookie(session_cookie, check_revoked=False, app=None, clock_skew_seconds=0): + """Verifies a Firebase session cookie. + + Accepts a session cookie string, verifies that it is current, and issued + to this project, and that it was correctly signed by Google. + + Args: + session_cookie: A session cookie string to verify. + check_revoked: Boolean, if true, checks whether the cookie has been revoked or the + user disabled (optional). + app: An App instance (optional). + clock_skew_seconds: The number of seconds to tolerate when checking the cookie. + + Returns: + dict: A dictionary of key-value pairs parsed from the decoded JWT. + + Raises: + ValueError: If ``session_cookie`` is a not a string or is empty. + InvalidSessionCookieError: If ``session_cookie`` is not a valid Firebase session cookie. + ExpiredSessionCookieError: If the specified session cookie has expired. + RevokedSessionCookieError: If ``check_revoked`` is ``True`` and the cookie has been revoked. + CertificateFetchError: If an error occurs while fetching the public key certificates + required to verify the session cookie. + UserDisabledError: If ``check_revoked`` is ``True`` and the corresponding user + record is disabled. + """ + client = _get_client(app) + # pylint: disable=protected-access + verified_claims = client._token_verifier.verify_session_cookie( + session_cookie, clock_skew_seconds) + if check_revoked: + client._check_jwt_revoked_or_disabled( + verified_claims, RevokedSessionCookieError, 'session cookie') + return verified_claims + + +def revoke_refresh_tokens(uid, app=None): + """Revokes all refresh tokens for an existing user. + + This function updates the user's ``tokens_valid_after_timestamp`` to the current UTC + in seconds since the epoch. It is important that the server on which this is called has its + clock set correctly and synchronized. + + While this revokes all sessions for a specified user and disables any new ID tokens for + existing sessions from getting minted, existing ID tokens may remain active until their + natural expiration (one hour). To verify that ID tokens are revoked, use + ``verify_id_token(idToken, check_revoked=True)``. + + Args: + uid: A user ID string. + app: An App instance (optional). + + Raises: + ValueError: If the user ID is None, empty or malformed. + FirebaseError: If an error occurs while revoking the refresh token. + """ + client = _get_client(app) + client.revoke_refresh_tokens(uid) + + +def get_user(uid, app=None): + """Gets the user data corresponding to the specified user ID. + + Args: + uid: A user ID string. + app: An App instance (optional). + + Returns: + UserRecord: A user record instance. + + Raises: + ValueError: If the user ID is None, empty or malformed. + UserNotFoundError: If the specified user ID does not exist. + FirebaseError: If an error occurs while retrieving the user. + """ + client = _get_client(app) + return client.get_user(uid=uid) + + +def get_user_by_email(email, app=None): + """Gets the user data corresponding to the specified user email. + + Args: + email: A user email address string. + app: An App instance (optional). + + Returns: + UserRecord: A user record instance. + + Raises: + ValueError: If the email is None, empty or malformed. + UserNotFoundError: If no user exists by the specified email address. + FirebaseError: If an error occurs while retrieving the user. + """ + client = _get_client(app) + return client.get_user_by_email(email=email) + + +def get_user_by_phone_number(phone_number, app=None): + """Gets the user data corresponding to the specified phone number. + + Args: + phone_number: A phone number string. + app: An App instance (optional). + + Returns: + UserRecord: A user record instance. + + Raises: + ValueError: If the phone number is None, empty or malformed. + UserNotFoundError: If no user exists by the specified phone number. + FirebaseError: If an error occurs while retrieving the user. + """ + client = _get_client(app) + return client.get_user_by_phone_number(phone_number=phone_number) + + +def get_users(identifiers, app=None): + """Gets the user data corresponding to the specified identifiers. + + There are no ordering guarantees; in particular, the nth entry in the + result list is not guaranteed to correspond to the nth entry in the input + parameters list. + + A maximum of 100 identifiers may be supplied. If more than 100 + identifiers are supplied, this method raises a `ValueError`. + + Args: + identifiers (list[UserIdentifier]): A list of ``UserIdentifier`` + instances used to indicate which user records should be returned. + Must have <= 100 entries. + app: An App instance (optional). + + Returns: + GetUsersResult: A ``GetUsersResult`` instance corresponding to the + specified identifiers. + + Raises: + ValueError: If any of the identifiers are invalid or if more than 100 + identifiers are specified. + """ + client = _get_client(app) + return client.get_users(identifiers) + + +def list_users(page_token=None, max_results=_user_mgt.MAX_LIST_USERS_RESULTS, app=None): + """Retrieves a page of user accounts from a Firebase project. + + The ``page_token`` argument governs the starting point of the page. The ``max_results`` + argument governs the maximum number of user accounts that may be included in the returned page. + This function never returns None. If there are no user accounts in the Firebase project, this + returns an empty page. + + Args: + page_token: A non-empty page token string, which indicates the starting point of the page + (optional). Defaults to ``None``, which will retrieve the first page of users. + max_results: A positive integer indicating the maximum number of users to include in the + returned page (optional). Defaults to 1000, which is also the maximum number allowed. + app: An App instance (optional). + + Returns: + ListUsersPage: A page of user accounts. + + Raises: + ValueError: If ``max_results`` or ``page_token`` are invalid. + FirebaseError: If an error occurs while retrieving the user accounts. + """ + client = _get_client(app) + return client.list_users(page_token=page_token, max_results=max_results) + + +def create_user(**kwargs): # pylint: disable=differing-param-doc + """Creates a new user account with the specified properties. + + Args: + **kwargs: A series of keyword arguments (optional). + + Keyword Args: + uid: User ID to assign to the newly created user (optional). + display_name: The user's display name (optional). + email: The user's primary email (optional). + email_verified: A boolean indicating whether or not the user's primary email is + verified (optional). + phone_number: The user's primary phone number (optional). + photo_url: The user's photo URL (optional). + password: The user's raw, unhashed password. (optional). + disabled: A boolean indicating whether or not the user account is disabled (optional). + app: An App instance (optional). + + Returns: + UserRecord: A user record instance for the newly created user. + + Raises: + ValueError: If the specified user properties are invalid. + FirebaseError: If an error occurs while creating the user account. + """ + app = kwargs.pop('app', None) + client = _get_client(app) + return client.create_user(**kwargs) + + +def update_user(uid, **kwargs): # pylint: disable=differing-param-doc + """Updates an existing user account with the specified properties. + + Args: + uid: A user ID string. + **kwargs: A series of keyword arguments (optional). + + Keyword Args: + display_name: The user's display name (optional). Can be removed by explicitly passing + ``auth.DELETE_ATTRIBUTE``. + email: The user's primary email (optional). + email_verified: A boolean indicating whether or not the user's primary email is + verified (optional). + phone_number: The user's primary phone number (optional). Can be removed by explicitly + passing ``auth.DELETE_ATTRIBUTE``. + photo_url: The user's photo URL (optional). Can be removed by explicitly passing + ``auth.DELETE_ATTRIBUTE``. + password: The user's raw, unhashed password. (optional). + disabled: A boolean indicating whether or not the user account is disabled (optional). + custom_claims: A dictionary or a JSON string containing the custom claims to be set on the + user account (optional). To remove all custom claims, pass ``auth.DELETE_ATTRIBUTE``. + valid_since: An integer signifying the seconds since the epoch (optional). This field is + set by ``revoke_refresh_tokens`` and it is discouraged to set this field directly. + app: An App instance (optional). + + Returns: + UserRecord: An updated user record instance for the user. + + Raises: + ValueError: If the specified user ID or properties are invalid. + FirebaseError: If an error occurs while updating the user account. + """ + app = kwargs.pop('app', None) + client = _get_client(app) + return client.update_user(uid, **kwargs) + + +def set_custom_user_claims(uid, custom_claims, app=None): + """Sets additional claims on an existing user account. + + Custom claims set via this function can be used to define user roles and privilege levels. + These claims propagate to all the devices where the user is already signed in (after token + expiration or when token refresh is forced), and next time the user signs in. The claims + can be accessed via the user's ID token JWT. If a reserved OIDC claim is specified (sub, iat, + iss, etc), an error is thrown. Claims payload must also not be larger then 1000 characters + when serialized into a JSON string. + + Args: + uid: A user ID string. + custom_claims: A dictionary or a JSON string of custom claims. Pass None to unset any + claims set previously. + app: An App instance (optional). + + Raises: + ValueError: If the specified user ID or the custom claims are invalid. + FirebaseError: If an error occurs while updating the user account. + """ + client = _get_client(app) + client.set_custom_user_claims(uid, custom_claims=custom_claims) + + +def delete_user(uid, app=None): + """Deletes the user identified by the specified user ID. + + Args: + uid: A user ID string. + app: An App instance (optional). + + Raises: + ValueError: If the user ID is None, empty or malformed. + FirebaseError: If an error occurs while deleting the user account. + """ + client = _get_client(app) + client.delete_user(uid) + + +def delete_users(uids, app=None): + """Deletes the users specified by the given identifiers. + + Deleting a non-existing user does not generate an error (the method is + idempotent.) Non-existing users are considered to be successfully deleted + and are therefore included in the `DeleteUserResult.success_count` value. + + A maximum of 1000 identifiers may be supplied. If more than 1000 + identifiers are supplied, this method raises a `ValueError`. + + Args: + uids: A list of strings indicating the uids of the users to be deleted. + Must have <= 1000 entries. + app: An App instance (optional). + + Returns: + DeleteUsersResult: The total number of successful/failed deletions, as + well as the array of errors that correspond to the failed deletions. + + Raises: + ValueError: If any of the identifiers are invalid or if more than 1000 + identifiers are specified. + """ + client = _get_client(app) + return client.delete_users(uids) + + +def import_users(users, hash_alg=None, app=None): + """Imports the specified list of users into Firebase Auth. + + At most 1000 users can be imported at a time. This operation is optimized for bulk imports and + will ignore checks on identifier uniqueness which could result in duplications. The + ``hash_alg`` parameter must be specified when importing users with passwords. Refer to the + ``UserImportHash`` class for supported hash algorithms. + + Args: + users: A list of ``ImportUserRecord`` instances to import. Length of the list must not + exceed 1000. + hash_alg: A ``UserImportHash`` object (optional). Required when importing users with + passwords. + app: An App instance (optional). + + Returns: + UserImportResult: An object summarizing the result of the import operation. + + Raises: + ValueError: If the provided arguments are invalid. + FirebaseError: If an error occurs while importing users. + """ + client = _get_client(app) + return client.import_users(users, hash_alg) + + +def generate_password_reset_link(email, action_code_settings=None, app=None): + """Generates the out-of-band email action link for password reset flows for the specified email + address. + + Args: + email: The email of the user whose password is to be reset. + action_code_settings: ``ActionCodeSettings`` instance (optional). Defines whether + the link is to be handled by a mobile app and the additional state information to be + passed in the deep link. + app: An App instance (optional). + Returns: + link: The password reset link created by the API + + Raises: + ValueError: If the provided arguments are invalid + FirebaseError: If an error occurs while generating the link + """ + client = _get_client(app) + return client.generate_password_reset_link(email, action_code_settings=action_code_settings) + + +def generate_email_verification_link(email, action_code_settings=None, app=None): + """Generates the out-of-band email action link for email verification flows for the specified + email address. + + Args: + email: The email of the user to be verified. + action_code_settings: ``ActionCodeSettings`` instance (optional). Defines whether + the link is to be handled by a mobile app and the additional state information to be + passed in the deep link. + app: An App instance (optional). + Returns: + link: The email verification link created by the API + + Raises: + ValueError: If the provided arguments are invalid + FirebaseError: If an error occurs while generating the link + """ + client = _get_client(app) + return client.generate_email_verification_link( + email, action_code_settings=action_code_settings) + + +def generate_sign_in_with_email_link(email, action_code_settings, app=None): + """Generates the out-of-band email action link for email link sign-in flows, using the action + code settings provided. + + Args: + email: The email of the user signing in. + action_code_settings: ``ActionCodeSettings`` instance. Defines whether + the link is to be handled by a mobile app and the additional state information to be + passed in the deep link. + app: An App instance (optional). + + Returns: + link: The email sign-in link created by the API + + Raises: + ValueError: If the provided arguments are invalid + FirebaseError: If an error occurs while generating the link + """ + client = _get_client(app) + return client.generate_sign_in_with_email_link( + email, action_code_settings=action_code_settings) + + +def get_oidc_provider_config(provider_id, app=None): + """Returns the ``OIDCProviderConfig`` with the given ID. + + Args: + provider_id: Provider ID string. + app: An App instance (optional). + + Returns: + OIDCProviderConfig: An OIDC provider config instance. + + Raises: + ValueError: If the provider ID is invalid, empty or does not have ``oidc.`` prefix. + ConfigurationNotFoundError: If no OIDC provider is available with the given identifier. + FirebaseError: If an error occurs while retrieving the OIDC provider. + """ + client = _get_client(app) + return client.get_oidc_provider_config(provider_id) + +def create_oidc_provider_config( + provider_id, client_id, issuer, display_name=None, enabled=None, client_secret=None, + id_token_response_type=None, code_response_type=None, app=None): + """Creates a new OIDC provider config from the given parameters. + + OIDC provider support requires Google Cloud's Identity Platform (GCIP). To learn more about + GCIP, including pricing and features, see https://cloud.google.com/identity-platform. + + Args: + provider_id: Provider ID string. Must have the prefix ``oidc.``. + client_id: Client ID of the new config. + issuer: Issuer of the new config. Must be a valid URL. + display_name: The user-friendly display name to the current configuration (optional). + This name is also used as the provider label in the Cloud Console. + enabled: A boolean indicating whether the provider configuration is enabled or disabled + (optional). A user cannot sign in using a disabled provider. + app: An App instance (optional). + client_secret: A string which sets the client secret for the new provider. + This is required for the code flow. + code_response_type: A boolean which sets whether to enable the code response flow for the + new provider. By default, this is not enabled if no response type is specified. + A client secret must be set for this response type. + Having both the code and ID token response flows is currently not supported. + id_token_response_type: A boolean which sets whether to enable the ID token response flow + for the new provider. By default, this is enabled if no response type is specified. + Having both the code and ID token response flows is currently not supported. + + Returns: + OIDCProviderConfig: The newly created OIDC provider config instance. + + Raises: + ValueError: If any of the specified input parameters are invalid. + FirebaseError: If an error occurs while creating the new OIDC provider config. + """ + client = _get_client(app) + return client.create_oidc_provider_config( + provider_id, client_id=client_id, issuer=issuer, display_name=display_name, + enabled=enabled, client_secret=client_secret, id_token_response_type=id_token_response_type, + code_response_type=code_response_type) + + +def update_oidc_provider_config( + provider_id, client_id=None, issuer=None, display_name=None, enabled=None, + client_secret=None, id_token_response_type=None, code_response_type=None, app=None): + """Updates an existing OIDC provider config with the given parameters. + + Args: + provider_id: Provider ID string. Must have the prefix ``oidc.``. + client_id: Client ID of the new config (optional). + issuer: Issuer of the new config (optional). Must be a valid URL. + display_name: The user-friendly display name of the current configuration (optional). + Pass ``auth.DELETE_ATTRIBUTE`` to delete the current display name. + enabled: A boolean indicating whether the provider configuration is enabled or disabled + (optional). + app: An App instance (optional). + client_secret: A string which sets the client secret for the new provider. + This is required for the code flow. + code_response_type: A boolean which sets whether to enable the code response flow for the + new provider. By default, this is not enabled if no response type is specified. + A client secret must be set for this response type. + Having both the code and ID token response flows is currently not supported. + id_token_response_type: A boolean which sets whether to enable the ID token response flow + for the new provider. By default, this is enabled if no response type is specified. + Having both the code and ID token response flows is currently not supported. + + Returns: + OIDCProviderConfig: The updated OIDC provider config instance. + + Raises: + ValueError: If any of the specified input parameters are invalid. + FirebaseError: If an error occurs while updating the OIDC provider config. + """ + client = _get_client(app) + return client.update_oidc_provider_config( + provider_id, client_id=client_id, issuer=issuer, display_name=display_name, + enabled=enabled, client_secret=client_secret, id_token_response_type=id_token_response_type, + code_response_type=code_response_type) + + +def delete_oidc_provider_config(provider_id, app=None): + """Deletes the ``OIDCProviderConfig`` with the given ID. + + Args: + provider_id: Provider ID string. + app: An App instance (optional). + + Raises: + ValueError: If the provider ID is invalid, empty or does not have ``oidc.`` prefix. + ConfigurationNotFoundError: If no OIDC provider is available with the given identifier. + FirebaseError: If an error occurs while deleting the OIDC provider. + """ + client = _get_client(app) + client.delete_oidc_provider_config(provider_id) + + +def list_oidc_provider_configs( + page_token=None, max_results=_auth_providers.MAX_LIST_CONFIGS_RESULTS, app=None): + """Retrieves a page of OIDC provider configs from a Firebase project. + + The ``page_token`` argument governs the starting point of the page. The ``max_results`` + argument governs the maximum number of configs that may be included in the returned + page. This function never returns ``None``. If there are no OIDC configs in the Firebase + project, this returns an empty page. + + Args: + page_token: A non-empty page token string, which indicates the starting point of the + page (optional). Defaults to ``None``, which will retrieve the first page of users. + max_results: A positive integer indicating the maximum number of users to include in + the returned page (optional). Defaults to 100, which is also the maximum number + allowed. + app: An App instance (optional). + + Returns: + ListProviderConfigsPage: A page of OIDC provider config instances. + + Raises: + ValueError: If ``max_results`` or ``page_token`` are invalid. + FirebaseError: If an error occurs while retrieving the OIDC provider configs. + """ + client = _get_client(app) + return client.list_oidc_provider_configs(page_token, max_results) + + +def get_saml_provider_config(provider_id, app=None): + """Returns the ``SAMLProviderConfig`` with the given ID. + + Args: + provider_id: Provider ID string. + app: An App instance (optional). + + Returns: + SAMLProviderConfig: A SAML provider config instance. + + Raises: + ValueError: If the provider ID is invalid, empty or does not have ``saml.`` prefix. + ConfigurationNotFoundError: If no SAML provider is available with the given identifier. + FirebaseError: If an error occurs while retrieving the SAML provider. + """ + client = _get_client(app) + return client.get_saml_provider_config(provider_id) + + +def create_saml_provider_config( + provider_id, idp_entity_id, sso_url, x509_certificates, rp_entity_id, callback_url, + display_name=None, enabled=None, app=None): + """Creates a new SAML provider config from the given parameters. + + SAML provider support requires Google Cloud's Identity Platform (GCIP). To learn more about + GCIP, including pricing and features, see https://cloud.google.com/identity-platform. + + Args: + provider_id: Provider ID string. Must have the prefix ``saml.``. + idp_entity_id: The SAML IdP entity identifier. + sso_url: The SAML IdP SSO URL. Must be a valid URL. + x509_certificates: The list of SAML IdP X.509 certificates issued by CA for this provider. + Multiple certificates are accepted to prevent outages during IdP key rotation (for + example ADFS rotates every 10 days). When the Auth server receives a SAML response, it + will match the SAML response with the certificate on record. Otherwise the response is + rejected. Developers are expected to manage the certificate updates as keys are + rotated. + rp_entity_id: The SAML relying party (service provider) entity ID. This is defined by the + developer but needs to be provided to the SAML IdP. + callback_url: Callback URL string. This is fixed and must always be the same as the OAuth + redirect URL provisioned by Firebase Auth, unless a custom authDomain is used. + display_name: The user-friendly display name to the current configuration (optional). This + name is also used as the provider label in the Cloud Console. + enabled: A boolean indicating whether the provider configuration is enabled or disabled + (optional). A user cannot sign in using a disabled provider. + app: An App instance (optional). + + Returns: + SAMLProviderConfig: The newly created SAML provider config instance. + + Raises: + ValueError: If any of the specified input parameters are invalid. + FirebaseError: If an error occurs while creating the new SAML provider config. + """ + client = _get_client(app) + return client.create_saml_provider_config( + provider_id, idp_entity_id=idp_entity_id, sso_url=sso_url, + x509_certificates=x509_certificates, rp_entity_id=rp_entity_id, callback_url=callback_url, + display_name=display_name, enabled=enabled) + + +def update_saml_provider_config( + provider_id, idp_entity_id=None, sso_url=None, x509_certificates=None, + rp_entity_id=None, callback_url=None, display_name=None, enabled=None, app=None): + """Updates an existing SAML provider config with the given parameters. + + Args: + provider_id: Provider ID string. Must have the prefix ``saml.``. + idp_entity_id: The SAML IdP entity identifier (optional). + sso_url: The SAML IdP SSO URL. Must be a valid URL (optional). + x509_certificates: The list of SAML IdP X.509 certificates issued by CA for this + provider (optional). + rp_entity_id: The SAML relying party entity ID (optional). + callback_url: Callback URL string (optional). + display_name: The user-friendly display name of the current configuration (optional). + Pass ``auth.DELETE_ATTRIBUTE`` to delete the current display name. + enabled: A boolean indicating whether the provider configuration is enabled or disabled + (optional). + app: An App instance (optional). + + Returns: + SAMLProviderConfig: The updated SAML provider config instance. + + Raises: + ValueError: If any of the specified input parameters are invalid. + FirebaseError: If an error occurs while updating the SAML provider config. + """ + client = _get_client(app) + return client.update_saml_provider_config( + provider_id, idp_entity_id=idp_entity_id, sso_url=sso_url, + x509_certificates=x509_certificates, rp_entity_id=rp_entity_id, + callback_url=callback_url, display_name=display_name, enabled=enabled) + + +def delete_saml_provider_config(provider_id, app=None): + """Deletes the ``SAMLProviderConfig`` with the given ID. + + Args: + provider_id: Provider ID string. + app: An App instance (optional). + + Raises: + ValueError: If the provider ID is invalid, empty or does not have ``saml.`` prefix. + ConfigurationNotFoundError: If no SAML provider is available with the given identifier. + FirebaseError: If an error occurs while deleting the SAML provider. + """ + client = _get_client(app) + client.delete_saml_provider_config(provider_id) + + +def list_saml_provider_configs( + page_token=None, max_results=_auth_providers.MAX_LIST_CONFIGS_RESULTS, app=None): + """Retrieves a page of SAML provider configs from a Firebase project. + + The ``page_token`` argument governs the starting point of the page. The ``max_results`` + argument governs the maximum number of configs that may be included in the returned + page. This function never returns ``None``. If there are no SAML configs in the Firebase + project, this returns an empty page. + + Args: + page_token: A non-empty page token string, which indicates the starting point of the + page (optional). Defaults to ``None``, which will retrieve the first page of users. + max_results: A positive integer indicating the maximum number of users to include in + the returned page (optional). Defaults to 100, which is also the maximum number + allowed. + app: An App instance (optional). Returns: - dict: A dictionary of key-value pairs parsed from the decoded JWT. + ListProviderConfigsPage: A page of SAML provider config instances. Raises: - ValueError: If the input parameters are invalid, or if the App was not - initialized with a credentials.Certificate. - AppIdenityError: The JWT was found to be invalid, the message will contain details. + ValueError: If ``max_results`` or ``page_token`` are invalid. + FirebaseError: If an error occurs while retrieving the SAML provider configs. """ - token_generator = _get_token_generator(app) - return token_generator.verify_id_token(id_token) - - -class _TokenGenerator(object): - """Generates custom tokens, and validates ID tokens.""" - - FIREBASE_CERT_URI = ('https://www.googleapis.com/robot/v1/metadata/x509/' - 'securetoken@system.gserviceaccount.com') - - ISSUER_PREFIX = 'https://securetoken.google.com/' - - MAX_TOKEN_LIFETIME_SECONDS = 3600 # One Hour, in Seconds - FIREBASE_AUDIENCE = ('https://identitytoolkit.googleapis.com/google.' - 'identity.identitytoolkit.v1.IdentityToolkit') - - # Key names we don't allow to appear in the developer_claims. - _RESERVED_CLAIMS_ = set([ - 'acr', 'amr', 'at_hash', 'aud', 'auth_time', 'azp', 'cnf', 'c_hash', - 'exp', 'firebase', 'iat', 'iss', 'jti', 'nbf', 'nonce', 'sub' - ]) - - - def __init__(self, app): - """Initializes FirebaseAuth from a FirebaseApp instance. - - Args: - app: A FirebaseApp instance. - """ - self._app = app - - def create_custom_token(self, uid, developer_claims=None): - """Builds and signs a FirebaseCustomAuthToken. - - Args: - uid: ID of the user for whom the token is created. - developer_claims: A dictionary of claims to be included in the token. - - Returns: - string: A token string minted from the input parameters. - - Raises: - ValueError: If input parameters are invalid. - """ - if not isinstance(self._app.credential, credentials.Certificate): - raise ValueError( - 'Must initialize Firebase App with a certificate credential ' - 'to call create_custom_token().') - - if developer_claims is not None: - if not isinstance(developer_claims, dict): - raise ValueError('developer_claims must be a dictionary') - - disallowed_keys = set(developer_claims.keys() - ) & self._RESERVED_CLAIMS_ - if disallowed_keys: - if len(disallowed_keys) > 1: - error_message = ('Developer claims {0} are reserved and ' - 'cannot be specified.'.format( - ', '.join(disallowed_keys))) - else: - error_message = ('Developer claim {0} is reserved and ' - 'cannot be specified.'.format( - ', '.join(disallowed_keys))) - raise ValueError(error_message) - - if not uid or not isinstance(uid, six.string_types) or len(uid) > 128: - raise ValueError('uid must be a string between 1 and 128 characters.') - - now = int(time.time()) - payload = { - 'iss': self._app.credential.service_account_email, - 'sub': self._app.credential.service_account_email, - 'aud': self.FIREBASE_AUDIENCE, - 'uid': uid, - 'iat': now, - 'exp': now + self.MAX_TOKEN_LIFETIME_SECONDS, - } - - if developer_claims is not None: - payload['claims'] = developer_claims - - return jwt.encode(self._app.credential.signer, payload) - - def verify_id_token(self, id_token): - """Verifies the signature and data for the provided JWT. - - Accepts a signed token string, verifies that is the current, and issued - to this project, and that it was correctly signed by Google. - - Args: - id_token: A string of the encoded JWT. - - Returns: - dict: A dictionary of key-value pairs parsed from the decoded JWT. - - Raises: - ValueError: The app was not initialized with a credentials.Certificate instance. - AppIdenityError: The JWT was found to be invalid, the message will contain details. - """ - if not id_token: - raise ValueError('Illegal ID token provided: {0}. ID token must be a non-empty ' - 'string.'.format(id_token)) - - if isinstance(id_token, six.text_type): - id_token = id_token.encode('ascii') - if not isinstance(id_token, six.binary_type): - raise ValueError('Illegal ID token provided: {0}. ID token must be a non-empty ' - 'string.'.format(id_token)) - - try: - project_id = self._app.credential.project_id - if project_id is None: - project_id = os.environ.get(GCLOUD_PROJECT_ENV_VAR) - except AttributeError: - project_id = os.environ.get(GCLOUD_PROJECT_ENV_VAR) - - if not project_id: - raise ValueError('Failed to ascertain project ID from the credential or the ' - 'environment. Must initialize app with a credentials.Certificate or ' - 'set your Firebase project ID as the GCLOUD_PROJECT environment ' - 'variable to call verify_id_token().') - - header = jwt.decode_header(id_token) - payload = jwt.decode(id_token, verify=False) - issuer = payload.get('iss') - audience = payload.get('aud') - subject = payload.get('sub') - expected_issuer = self.ISSUER_PREFIX + project_id - - project_id_match_msg = ('Make sure the ID token comes from the same' - ' Firebase project as the service account used' - ' to authenticate this SDK.') - verify_id_token_msg = ( - 'See https://firebase.google.com/docs/auth/admin/verify-id-tokens' - ' for details on how to retrieve an ID token.') - error_message = None - if not header.get('kid'): - if audience == self.FIREBASE_AUDIENCE: - error_message = ('verify_id_token() expects an ID token, but ' - 'was given a custom token.') - elif header.get('alg') == 'HS256' and payload.get( - 'v') is 0 and 'uid' in payload.get('d', {}): - error_message = ('verify_id_token() expects an ID token, but ' - 'was given a legacy custom token.') - else: - error_message = 'Firebase ID token has no "kid" claim.' - elif header.get('alg') != 'RS256': - error_message = ('Firebase ID token has incorrect algorithm. ' - 'Expected "RS256" but got "{0}". {1}'.format( - header.get('alg'), verify_id_token_msg)) - elif audience != project_id: - error_message = ( - 'Firebase ID token has incorrect "aud" (audience) claim. ' - 'Expected "{0}" but got "{1}". {2} {3}'.format( - project_id, audience, project_id_match_msg, - verify_id_token_msg)) - elif issuer != expected_issuer: - error_message = ('Firebase ID token has incorrect "iss" (issuer) ' - 'claim. Expected "{0}" but got "{1}". {2} {3}' - .format(expected_issuer, issuer, - project_id_match_msg, - verify_id_token_msg)) - elif subject is None or not isinstance(subject, six.string_types): - error_message = ('Firebase ID token has no "sub" (subject) ' - 'claim. ') + verify_id_token_msg - elif not subject: - error_message = ('Firebase ID token has an empty string "sub" ' - '(subject) claim. ') + verify_id_token_msg - elif len(subject) > 128: - error_message = ('Firebase ID token has a "sub" (subject) ' - 'claim longer than 128 ' - 'characters. ') + verify_id_token_msg - - if error_message: - raise ValueError(error_message) - - verified_claims = google.oauth2.id_token.verify_firebase_token( - id_token, - request=_request, - audience=project_id) - verified_claims['uid'] = verified_claims['sub'] - return verified_claims + client = _get_client(app) + return client.list_saml_provider_configs(page_token, max_results) diff --git a/firebase_admin/credentials.py b/firebase_admin/credentials.py index a0c471a85..0edbecaae 100644 --- a/firebase_admin/credentials.py +++ b/firebase_admin/credentials.py @@ -15,8 +15,10 @@ """Firebase credentials module.""" import collections import json +import pathlib import google.auth +from google.auth.credentials import Credentials as GoogleAuthCredentials from google.auth.transport import requests from google.oauth2 import credentials from google.oauth2 import service_account @@ -24,61 +26,97 @@ _request = requests.Request() _scopes = [ + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/datastore', + 'https://www.googleapis.com/auth/devstorage.read_write', 'https://www.googleapis.com/auth/firebase', + 'https://www.googleapis.com/auth/identitytoolkit', 'https://www.googleapis.com/auth/userinfo.email' ] -AccessTokenInfo = collections.namedtuple( - 'AccessTokenInfo', ['access_token', 'expiry']) +AccessTokenInfo = collections.namedtuple('AccessTokenInfo', ['access_token', 'expiry']) +"""Data included in an OAuth2 access token. +Contains the access token string and the expiry time. The expiry time is exposed as a +``datetime`` value. +""" -class Base(object): + +class Base: """Provides OAuth2 access tokens for accessing Firebase services.""" def get_access_token(self): - """Fetches a Google OAuth2 access token using this credential instance.""" - raise NotImplementedError + """Fetches a Google OAuth2 access token using this credential instance. + + Returns: + AccessTokenInfo: An access token obtained using the credential. + """ + google_cred = self.get_credential() + google_cred.refresh(_request) + return AccessTokenInfo(google_cred.token, google_cred.expiry) def get_credential(self): - """Returns the credential instance used for authentication.""" + """Returns the Google credential instance used for authentication.""" raise NotImplementedError +class _ExternalCredentials(Base): + """A wrapper for google.auth.credentials.Credentials typed credential instances""" + + def __init__(self, credential: GoogleAuthCredentials): + super().__init__() + self._g_credential = credential + + def get_credential(self): + """Returns the underlying Google Credential + + Returns: + google.auth.credentials.Credentials: A Google Auth credential instance.""" + return self._g_credential class Certificate(Base): """A credential initialized from a JSON certificate keyfile.""" _CREDENTIAL_TYPE = 'service_account' - def __init__(self, file_path): - """Initializes a credential from a certificate file. + def __init__(self, cert): + """Initializes a credential from a Google service account certificate. - Parses the specified certificate file (service account file), and - creates a credential instance from it. + Service account certificates can be downloaded as JSON files from the Firebase console. + To instantiate a credential from a certificate file, either specify the file path or a + dict representing the parsed contents of the file. Args: - file_path: Path to a service account certificate file. + cert: Path to a certificate file or a dict representing the contents of a certificate. Raises: - IOError: If the specified file doesn't exist or cannot be read. - ValueError: If the certificate file is invalid. + IOError: If the specified certificate file doesn't exist or cannot be read. + ValueError: If the specified certificate is invalid. """ - super(Certificate, self).__init__() - with open(file_path) as json_keyfile: - json_data = json.load(json_keyfile) + super().__init__() + if _is_file_path(cert): + with open(cert, encoding='utf-8') as json_file: + json_data = json.load(json_file) + elif isinstance(cert, dict): + json_data = cert + else: + raise ValueError( + f'Invalid certificate argument: "{cert}". Certificate argument must be a file ' + 'path, or a dict containing the parsed file contents.') + if json_data.get('type') != self._CREDENTIAL_TYPE: - raise ValueError('Invalid certificate file: "{0}". File must contain a ' - '"type" field set to "{1}".'.format(file_path, self._CREDENTIAL_TYPE)) - self._project_id = json_data.get('project_id') + raise ValueError( + 'Invalid service account certificate. Certificate must contain a ' + f'"type" field set to "{self._CREDENTIAL_TYPE}".') try: self._g_credential = service_account.Credentials.from_service_account_info( json_data, scopes=_scopes) except ValueError as error: - raise ValueError('Failed to initialize a certificate credential from file "{0}". ' - 'Caused by: "{1}"'.format(file_path, error)) + raise ValueError( + f'Failed to initialize a certificate credential. Caused by: "{error}"') from error @property def project_id(self): - return self._project_id + return self._g_credential.project_id @property def signer(self): @@ -88,15 +126,6 @@ def signer(self): def service_account_email(self): return self._g_credential.service_account_email - def get_access_token(self): - """Fetches a Google OAuth2 access token using this certificate credential. - - Returns: - AccessTokenInfo: An access token obtained using the credential. - """ - self._g_credential.refresh(_request) - return AccessTokenInfo(self._g_credential.token, self._g_credential.expiry) - def get_credential(self): """Returns the underlying Google credential. @@ -109,68 +138,78 @@ class ApplicationDefault(Base): """A Google Application Default credential.""" def __init__(self): - """Initializes the Application Default credentials for the current environment. - - Raises: - google.auth.exceptions.DefaultCredentialsError: If Application Default - credentials cannot be initialized in the current environment. - """ - super(ApplicationDefault, self).__init__() - self._g_credential, self._project_id = google.auth.default(scopes=_scopes) - - def get_access_token(self): - """Fetches a Google OAuth2 access token using this application default credential. + """Creates an instance that will use Application Default credentials. - Returns: - AccessTokenInfo: An access token obtained using the credential. + The credentials will be lazily initialized when get_credential() or + project_id() is called. See those methods for possible errors raised. """ - self._g_credential.refresh(_request) - return AccessTokenInfo(self._g_credential.token, self._g_credential.expiry) + super().__init__() + self._g_credential = None # Will be lazily-loaded via _load_credential(). def get_credential(self): """Returns the underlying Google credential. + Raises: + google.auth.exceptions.DefaultCredentialsError: If Application Default + credentials cannot be initialized in the current environment. Returns: google.auth.credentials.Credentials: A Google Auth credential instance.""" + self._load_credential() return self._g_credential @property def project_id(self): + """Returns the project_id from the underlying Google credential. + + Raises: + google.auth.exceptions.DefaultCredentialsError: If Application Default + credentials cannot be initialized in the current environment. + Returns: + str: The project id.""" + self._load_credential() return self._project_id + def _load_credential(self): + if not self._g_credential: + self._g_credential, self._project_id = google.auth.default(scopes=_scopes) class RefreshToken(Base): """A credential initialized from an existing refresh token.""" _CREDENTIAL_TYPE = 'authorized_user' - def __init__(self, file_path): - """Initializes a refresh token credential from the specified JSON file. + def __init__(self, refresh_token): + """Initializes a credential from a refresh token JSON file. + + The JSON must consist of client_id, client_secret and refresh_token fields. Refresh + token files are typically created and managed by the gcloud SDK. To instantiate + a credential from a refresh token file, either specify the file path or a dict + representing the parsed contents of the file. Args: - file_path: File path to a refresh token JSON file. + refresh_token: Path to a refresh token file or a dict representing the contents of a + refresh token file. Raises: IOError: If the specified file doesn't exist or cannot be read. - ValueError: If the refresh token file is invalid. + ValueError: If the refresh token configuration is invalid. """ - super(RefreshToken, self).__init__() - with open(file_path) as json_keyfile: - json_data = json.load(json_keyfile) + super().__init__() + if _is_file_path(refresh_token): + with open(refresh_token, encoding='utf-8') as json_file: + json_data = json.load(json_file) + elif isinstance(refresh_token, dict): + json_data = refresh_token + else: + raise ValueError( + f'Invalid refresh token argument: "{refresh_token}". Refresh token argument must ' + 'be a file path, or a dict containing the parsed file contents.') + if json_data.get('type') != self._CREDENTIAL_TYPE: - raise ValueError('Invalid refresh token file: "{0}". File must contain a ' - '"type" field set to "{1}".'.format(file_path, self._CREDENTIAL_TYPE)) - try: - client_id = json_data['client_id'] - client_secret = json_data['client_secret'] - refresh_token = json_data['refresh_token'] - except KeyError as error: - raise ValueError('Failed to initialize a refresh token credential from file "{0}". ' - 'Caused by: "{1}"'.format(file_path, error)) - self._g_credential = credentials.Credentials( - token=None, refresh_token=refresh_token, - token_uri='https://accounts.google.com/o/oauth2/token', - client_id=client_id, client_secret=client_secret, scopes=_scopes) + raise ValueError( + 'Invalid refresh token configuration. JSON must contain a ' + f'"type" field set to "{self._CREDENTIAL_TYPE}".') + self._g_credential = credentials.Credentials.from_authorized_user_info(json_data, _scopes) @property def client_id(self): @@ -184,18 +223,17 @@ def client_secret(self): def refresh_token(self): return self._g_credential.refresh_token - def get_access_token(self): - """Fetches a Google OAuth2 access token using this refresh token credential. - - Returns: - AccessTokenInfo: An access token obtained using the credential. - """ - self._g_credential.refresh(_request) - return AccessTokenInfo(self._g_credential.token, self._g_credential.expiry) - def get_credential(self): """Returns the underlying Google credential. Returns: google.auth.credentials.Credentials: A Google Auth credential instance.""" return self._g_credential + + +def _is_file_path(path): + try: + pathlib.Path(path) + return True + except TypeError: + return False diff --git a/firebase_admin/db.py b/firebase_admin/db.py new file mode 100644 index 000000000..800cbf8e3 --- /dev/null +++ b/firebase_admin/db.py @@ -0,0 +1,965 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Firebase Realtime Database module. + +This module contains functions and classes that facilitate interacting with the Firebase Realtime +Database. It supports basic data manipulation operations, as well as complex queries such as +limit queries and range queries. However, it does not support realtime update notifications. This +module uses the Firebase REST API underneath. +""" + +import collections +import json +import os +import sys +import threading +from urllib import parse + +import requests + +import firebase_admin +from firebase_admin import exceptions +from firebase_admin import _http_client +from firebase_admin import _sseclient +from firebase_admin import _utils + + +_DB_ATTRIBUTE = '_database' +_INVALID_PATH_CHARACTERS = '[].?#$' +_RESERVED_FILTERS = ('$key', '$value', '$priority') +_USER_AGENT = ( + f'Firebase/HTTP/{firebase_admin.__version__}/{sys.version_info.major}' + f'.{sys.version_info.minor}/AdminPython' +) +_TRANSACTION_MAX_RETRIES = 25 +_EMULATOR_HOST_ENV_VAR = 'FIREBASE_DATABASE_EMULATOR_HOST' + + +def reference(path='/', app=None, url=None): + """Returns a database ``Reference`` representing the node at the specified path. + + If no path is specified, this function returns a ``Reference`` that represents the database + root. By default, the returned References provide access to the Firebase Database specified at + app initialization. To connect to a different database instance in the same Firebase project, + specify the ``url`` parameter. + + Args: + path: Path to a node in the Firebase realtime database (optional). + app: An App instance (optional). + url: Base URL of the Firebase Database instance (optional). When specified, takes + precedence over the the ``databaseURL`` option set at app initialization. + + Returns: + Reference: A newly initialized Reference. + + Raises: + ValueError: If the specified path or app is invalid. + """ + service = _utils.get_app_service(app, _DB_ATTRIBUTE, _DatabaseService) + client = service.get_client(url) + return Reference(client=client, path=path) + +def _parse_path(path): + """Parses a path string into a set of segments.""" + if not isinstance(path, str): + raise ValueError(f'Invalid path: "{path}". Path must be a string.') + if any(ch in path for ch in _INVALID_PATH_CHARACTERS): + raise ValueError(f'Invalid path: "{path}". Path contains illegal characters.') + return [seg for seg in path.split('/') if seg] + + +class Event: + """Represents a realtime update event received from the database.""" + + def __init__(self, sse_event): + self._sse_event = sse_event + self._data = json.loads(sse_event.data) + + @property + def data(self): + """Parsed JSON data of this event.""" + return self._data['data'] + + @property + def path(self): + """Path of the database reference that triggered this event.""" + return self._data['path'] + + @property + def event_type(self): + """Event type string (put, patch).""" + return self._sse_event.event_type + + +class ListenerRegistration: + """Represents the addition of an event listener to a database reference.""" + + def __init__(self, callback, sse): + """Initializes a new listener with given parameters. + + This is an internal API. Use the ``db.Reference.listen()`` method to start a + new listener. + + Args: + callback: The callback function to fire in case of event. + sse: A transport session to make requests with. + """ + self._callback = callback + self._sse = sse + self._thread = threading.Thread(target=self._start_listen) + self._thread.start() + + def _start_listen(self): + # iterate the sse client's generator + for sse_event in self._sse: + # only inject data events + if sse_event: + self._callback(Event(sse_event)) + + def close(self): + """Stops the event listener represented by this registration + + This closes the SSE HTTP connection, and joins the background thread. + """ + self._sse.close() + self._thread.join() + + +class Reference: + """Reference represents a node in the Firebase realtime database.""" + + def __init__(self, **kwargs): + """Creates a new Reference using the provided parameters. + + This method is for internal use only. Use db.reference() to obtain an instance of + Reference. + """ + self._client = kwargs.get('client') + if 'segments' in kwargs: + self._segments = kwargs.get('segments') + else: + self._segments = _parse_path(kwargs.get('path')) + self._pathurl = '/' + '/'.join(self._segments) + + @property + def key(self): + if self._segments: + return self._segments[-1] + return None + + @property + def path(self): + return self._pathurl + + @property + def parent(self): + if self._segments: + return Reference(client=self._client, segments=self._segments[:-1]) + return None + + def child(self, path): + """Returns a Reference to the specified child node. + + The path may point to an immediate child of the current Reference, or a deeply nested + child. Child paths must not begin with '/'. + + Args: + path: Path to the child node. + + Returns: + Reference: A database Reference representing the specified child node. + + Raises: + ValueError: If the child path is not a string, not well-formed or begins with '/'. + """ + if not path or not isinstance(path, str): + raise ValueError(f'Invalid path argument: "{path}". Path must be a non-empty string.') + if path.startswith('/'): + raise ValueError(f'Invalid path argument: "{path}". Child path must not start with "/"') + full_path = self._pathurl + '/' + path + return Reference(client=self._client, path=full_path) + + def get(self, etag=False, shallow=False): + """Returns the value, and optionally the ETag, at the current location of the database. + + Args: + etag: A boolean indicating whether the Etag value should be returned or not (optional). + shallow: A boolean indicating whether to execute a shallow read (optional). Shallow + reads do not retrieve the child nodes of the current database location. Cannot be + set to True if ``etag`` is also set to True. + + Returns: + object: If etag is False returns the decoded JSON value of the current database location. + If etag is True, returns a 2-tuple consisting of the decoded JSON value and the Etag + associated with the current database location. + + Raises: + ValueError: If both ``etag`` and ``shallow`` are set to True. + FirebaseError: If an error occurs while communicating with the remote database server. + """ + if etag: + if shallow: + raise ValueError('etag and shallow cannot both be set to True.') + headers, data = self._client.headers_and_body( + 'get', self._add_suffix(), headers={'X-Firebase-ETag' : 'true'}) + return data, headers.get('ETag') + + params = 'shallow=true' if shallow else None + return self._client.body('get', self._add_suffix(), params=params) + + def get_if_changed(self, etag): + """Gets data in this location only if the specified ETag does not match. + + Args: + etag: The ETag value to be checked against the ETag of the current location. + + Returns: + tuple: A 3-tuple consisting of a boolean, a decoded JSON value and an ETag. If the ETag + specified by the caller did not match, the boolen value will be True and the JSON + and ETag values would reflect the corresponding values in the database. If the ETag + matched, the boolean value will be False and the other elements of the tuple will be + None. + + Raises: + ValueError: If the ETag is not a string. + FirebaseError: If an error occurs while communicating with the remote database server. + """ + if not isinstance(etag, str): + raise ValueError('ETag must be a string.') + + resp = self._client.request('get', self._add_suffix(), headers={'if-none-match': etag}) + if resp.status_code == 304: + return False, None, None + + return True, resp.json(), resp.headers.get('ETag') + + def set(self, value): + """Sets the data at this location to the given value. + + The value must be JSON-serializable and not None. + + Args: + value: JSON-serializable value to be set at this location. + + Raises: + ValueError: If the provided value is None. + TypeError: If the value is not JSON-serializable. + FirebaseError: If an error occurs while communicating with the remote database server. + """ + if value is None: + raise ValueError('Value must not be None.') + self._client.request('put', self._add_suffix(), json=value, params='print=silent') + + def set_if_unchanged(self, expected_etag, value): + """Conditonally sets the data at this location to the given value. + + Sets the data at this location to the given value only if ``expected_etag`` is same as the + ETag value in the database. + + Args: + expected_etag: Value of ETag we want to check. + value: JSON-serializable value to be set at this location. + + Returns: + tuple: A 3-tuple consisting of a boolean, a decoded JSON value and an ETag. The boolean + indicates whether the set operation was successful or not. The decoded JSON and the + ETag corresponds to the latest value in this database location. + + Raises: + ValueError: If the value is None, or if expected_etag is not a string. + FirebaseError: If an error occurs while communicating with the remote database server. + """ + # pylint: disable=missing-raises-doc + if not isinstance(expected_etag, str): + raise ValueError('Expected ETag must be a string.') + if value is None: + raise ValueError('Value must not be none.') + + try: + headers = self._client.headers( + 'put', self._add_suffix(), json=value, headers={'if-match': expected_etag}) + return True, value, headers.get('ETag') + except exceptions.FailedPreconditionError as error: + http_response = error.http_response + if http_response is not None and 'ETag' in http_response.headers: + etag = http_response.headers['ETag'] + snapshot = http_response.json() + return False, snapshot, etag + + raise error + + def push(self, value=''): + """Creates a new child node. + + The optional value argument can be used to provide an initial value for the child node. If + no value is provided, child node will have empty string as the default value. + + Args: + value: JSON-serializable initial value for the child node (optional). + + Returns: + Reference: A Reference representing the newly created child node. + + Raises: + ValueError: If the value is None. + TypeError: If the value is not JSON-serializable. + FirebaseError: If an error occurs while communicating with the remote database server. + """ + if value is None: + raise ValueError('Value must not be None.') + output = self._client.body('post', self._add_suffix(), json=value) + push_id = output.get('name') + return self.child(push_id) + + def update(self, value): + """Updates the specified child keys of this Reference to the provided values. + + Args: + value: A dictionary containing the child keys to update, and their new values. + + Raises: + ValueError: If value is empty or not a dictionary. + FirebaseError: If an error occurs while communicating with the remote database server. + """ + if not value or not isinstance(value, dict): + raise ValueError('Value argument must be a non-empty dictionary.') + if None in value.keys(): + raise ValueError('Dictionary must not contain None keys.') + self._client.request('patch', self._add_suffix(), json=value, params='print=silent') + + def delete(self): + """Deletes this node from the database. + + Raises: + FirebaseError: If an error occurs while communicating with the remote database server. + """ + self._client.request('delete', self._add_suffix()) + + def listen(self, callback): + """Registers the ``callback`` function to receive realtime updates. + + The specified callback function will get invoked with ``db.Event`` objects for each + realtime update received from the database. It will also get called whenever the SDK + reconnects to the server due to network issues or credential expiration. In general, + the OAuth2 credentials used to authorize connections to the server expire every hour. + Therefore clients should expect the ``callback`` to fire at least once every hour, even if + there are no updates in the database. + + This API is based on the event streaming support available in the Firebase REST API. Each + call to ``listen()`` starts a new HTTP connection and a background thread. This is an + experimental feature. It currently does not honor the auth overrides and timeout settings. + Cannot be used in thread-constrained environments like Google App Engine. + + Args: + callback: A function to be called when a data change is detected. + + Returns: + ListenerRegistration: An object that can be used to stop the event listener. + + Raises: + FirebaseError: If an error occurs while starting the initial HTTP connection. + """ + return self._listen_with_session(callback) + + def transaction(self, transaction_update): + """Atomically modifies the data at this location. + + Unlike a normal ``set()``, which just overwrites the data regardless of its previous state, + ``transaction()`` is used to modify the existing value to a new value, ensuring there are + no conflicts with other clients simultaneously writing to the same location. + + This is accomplished by passing an update function which is used to transform the current + value of this reference into a new value. If another client writes to this location before + the new value is successfully saved, the update function is called again with the new + current value, and the write will be retried. In case of repeated failures, this method + will retry the transaction up to 25 times before giving up and raising a + TransactionAbortedError. The update function may also force an early abort by raising an + exception instead of returning a value. + + Args: + transaction_update: A function which will be passed the current data stored at this + location. The function should return the new value it would like written. If + an exception is raised, the transaction will be aborted, and the data at this + location will not be modified. The exceptions raised by this function are + propagated to the caller of the transaction method. + + Returns: + object: New value of the current database Reference (only if the transaction commits). + + Raises: + TransactionAbortedError: If the transaction aborts after exhausting all retry attempts. + ValueError: If transaction_update is not a function. + """ + if not callable(transaction_update): + raise ValueError('transaction_update must be a function.') + + tries = 0 + data, etag = self.get(etag=True) + while tries < _TRANSACTION_MAX_RETRIES: + new_data = transaction_update(data) + success, data, etag = self.set_if_unchanged(etag, new_data) + if success: + return new_data + tries += 1 + + raise TransactionAbortedError('Transaction aborted after failed retries.') + + def order_by_child(self, path): + """Returns a Query that orders data by child values. + + Returned Query can be used to set additional parameters, and execute complex database + queries (e.g. limit queries, range queries). + + Args: + path: Path to a valid child of the current Reference. + + Returns: + Query: A database Query instance. + + Raises: + ValueError: If the child path is not a string, not well-formed or None. + """ + if path in _RESERVED_FILTERS: + raise ValueError(f'Illegal child path: {path}') + return Query(order_by=path, client=self._client, pathurl=self._add_suffix()) + + def order_by_key(self): + """Creates a Query that orderes data by key. + + Returned Query can be used to set additional parameters, and execute complex database + queries (e.g. limit queries, range queries). + + Returns: + Query: A database Query instance. + """ + return Query(order_by='$key', client=self._client, pathurl=self._add_suffix()) + + def order_by_value(self): + """Creates a Query that orderes data by value. + + Returned Query can be used to set additional parameters, and execute complex database + queries (e.g. limit queries, range queries). + + Returns: + Query: A database Query instance. + """ + return Query(order_by='$value', client=self._client, pathurl=self._add_suffix()) + + def _add_suffix(self, suffix='.json'): + return self._pathurl + suffix + + def _listen_with_session(self, callback, session=None): + url = self._client.base_url + self._add_suffix() + if not session: + session = self._client.create_listener_session() + + try: + sse = _sseclient.SSEClient(url, session, **{"params": self._client.params}) + return ListenerRegistration(callback, sse) + except requests.exceptions.RequestException as error: + raise _Client.handle_rtdb_error(error) + + +class Query: + """Represents a complex query that can be executed on a Reference. + + Complex queries can consist of up to 2 components: a required ordering constraint, and an + optional filtering constraint. At the server, data is first sorted according to the given + ordering constraint (e.g. order by child). Then the filtering constraint (e.g. limit, range) + is applied on the sorted data to produce the final result. Despite the ordering constraint, + the final result is returned by the server as an unordered collection. Therefore the Query + interface performs another round of sorting at the client-side before returning the results + to the caller. This client-side sorted results are returned to the user as a Python + OrderedDict. + """ + + def __init__(self, **kwargs): + order_by = kwargs.pop('order_by') + if not order_by or not isinstance(order_by, str): + raise ValueError('order_by field must be a non-empty string') + if order_by not in _RESERVED_FILTERS: + if order_by.startswith('/'): + raise ValueError( + f'Invalid path argument: "{order_by}". Child path must not start with "/"') + segments = _parse_path(order_by) + order_by = '/'.join(segments) + self._client = kwargs.pop('client') + self._pathurl = kwargs.pop('pathurl') + self._order_by = order_by + self._params = {'orderBy' : json.dumps(order_by)} + if kwargs: + raise ValueError(f'Unexpected keyword arguments: {kwargs}') + + def limit_to_first(self, limit): + """Creates a query with limit, and anchors it to the start of the window. + + Args: + limit: The maximum number of child nodes to return. + + Returns: + Query: The updated Query instance. + + Raises: + ValueError: If the value is not an integer, or set_limit_last() was called previously. + """ + if not isinstance(limit, int) or limit < 0: + raise ValueError('Limit must be a non-negative integer.') + if 'limitToLast' in self._params: + raise ValueError('Cannot set both first and last limits.') + self._params['limitToFirst'] = limit + return self + + def limit_to_last(self, limit): + """Creates a query with limit, and anchors it to the end of the window. + + Args: + limit: The maximum number of child nodes to return. + + Returns: + Query: The updated Query instance. + + Raises: + ValueError: If the value is not an integer, or set_limit_first() was called previously. + """ + if not isinstance(limit, int) or limit < 0: + raise ValueError('Limit must be a non-negative integer.') + if 'limitToFirst' in self._params: + raise ValueError('Cannot set both first and last limits.') + self._params['limitToLast'] = limit + return self + + def start_at(self, start): + """Sets the lower bound for a range query. + + The Query will only return child nodes with a value greater than or equal to the specified + value. + + Args: + start: JSON-serializable value to start at, inclusive. + + Returns: + Query: The updated Query instance. + + Raises: + ValueError: If the value is ``None``. + """ + if start is None: + raise ValueError('Start value must not be None.') + self._params['startAt'] = json.dumps(start) + return self + + def end_at(self, end): + """Sets the upper bound for a range query. + + The Query will only return child nodes with a value less than or equal to the specified + value. + + Args: + end: JSON-serializable value to end at, inclusive. + + Returns: + Query: The updated Query instance. + + Raises: + ValueError: If the value is ``None``. + """ + if end is None: + raise ValueError('End value must not be None.') + self._params['endAt'] = json.dumps(end) + return self + + def equal_to(self, value): + """Sets an equals constraint on the Query. + + The Query will only return child nodes whose value is equal to the specified value. + + Args: + value: JSON-serializable value to query for. + + Returns: + Query: The updated Query instance. + + Raises: + ValueError: If the value is ``None``. + """ + if value is None: + raise ValueError('Equal to value must not be None.') + self._params['equalTo'] = json.dumps(value) + return self + + @property + def _querystr(self): + params = [] + for key in sorted(self._params): + params.append(f'{key}={self._params[key]}') + return '&'.join(params) + + def get(self): + """Executes this Query and returns the results. + + The results will be returned as a sorted list or an OrderedDict. + + Returns: + object: Decoded JSON result of the Query. + + Raises: + FirebaseError: If an error occurs while communicating with the remote database server. + """ + result = self._client.body('get', self._pathurl, params=self._querystr) + if isinstance(result, (dict, list)) and self._order_by != '$priority': + return _Sorter(result, self._order_by).get() + return result + + +class TransactionAbortedError(exceptions.AbortedError): + """A transaction was aborted aftr exceeding the maximum number of retries.""" + + def __init__(self, message): + exceptions.AbortedError.__init__(self, message) + + +class _Sorter: + """Helper class for sorting query results.""" + + def __init__(self, results, order_by): + if isinstance(results, dict): + self.dict_input = True + entries = [_SortEntry(k, v, order_by) for k, v in results.items()] + elif isinstance(results, list): + self.dict_input = False + entries = [_SortEntry(k, v, order_by) for k, v in enumerate(results)] + else: + raise ValueError(f'Sorting not supported for "{type(results)}" object.') + self.sort_entries = sorted(entries) + + def get(self): + if self.dict_input: + return collections.OrderedDict([(e.key, e.value) for e in self.sort_entries]) + + return [e.value for e in self.sort_entries] + + +class _SortEntry: + """A wrapper that is capable of sorting items in a dictionary.""" + + _type_none = 0 + _type_bool_false = 1 + _type_bool_true = 2 + _type_numeric = 3 + _type_string = 4 + _type_object = 5 + + def __init__(self, key, value, order_by): + self._key = key + self._value = value + if order_by in ('$key', '$priority'): + self._index = key + elif order_by == '$value': + self._index = value + else: + self._index = _SortEntry._extract_child(value, order_by) + self._index_type = _SortEntry._get_index_type(self._index) + + @property + def key(self): + return self._key + + @property + def index(self): + return self._index + + @property + def index_type(self): + return self._index_type + + @property + def value(self): + return self._value + + @classmethod + def _get_index_type(cls, index): + """Assigns an integer code to the type of the index. + + The index type determines how differently typed values are sorted. This ordering is based + on https://firebase.google.com/docs/database/rest/retrieve-data#section-rest-ordered-data + """ + if index is None: + return cls._type_none + if isinstance(index, bool) and not index: + return cls._type_bool_false + if isinstance(index, bool) and index: + return cls._type_bool_true + if isinstance(index, (int, float)): + return cls._type_numeric + if isinstance(index, str): + return cls._type_string + + return cls._type_object + + @classmethod + def _extract_child(cls, value, path): + segments = path.split('/') + current = value + for segment in segments: + if isinstance(current, dict): + current = current.get(segment) + else: + return None + return current + + def _compare(self, other): + """Compares two _SortEntry instances. + + If the indices have the same numeric or string type, compare them directly. Ties are + broken by comparing the keys. If the indices have the same type, but are neither numeric + nor string, compare the keys. In all other cases compare based on the ordering provided + by index types. + """ + self_key, other_key = self.index_type, other.index_type + if self_key == other_key: + if self_key in (self._type_numeric, self._type_string) and self.index != other.index: + self_key, other_key = self.index, other.index + else: + self_key, other_key = self.key, other.key + + if self_key < other_key: + return -1 + if self_key > other_key: + return 1 + + return 0 + + def __lt__(self, other): + return self._compare(other) < 0 + + def __le__(self, other): + return self._compare(other) <= 0 + + def __gt__(self, other): + return self._compare(other) > 0 + + def __ge__(self, other): + return self._compare(other) >= 0 + + def __eq__(self, other): + return self._compare(other) == 0 + + +class _DatabaseService: + """Service that maintains a collection of database clients.""" + + _DEFAULT_AUTH_OVERRIDE = '_admin_' + + def __init__(self, app): + self._credential = app.credential + db_url = app.options.get('databaseURL') + if db_url: + self._db_url = db_url + else: + self._db_url = None + + auth_override = _DatabaseService._get_auth_override(app) + if auth_override not in (self._DEFAULT_AUTH_OVERRIDE, {}): + self._auth_override = json.dumps(auth_override, separators=(',', ':')) + else: + self._auth_override = None + self._timeout = app.options.get('httpTimeout', _http_client.DEFAULT_TIMEOUT_SECONDS) + self._clients = {} + + emulator_host = os.environ.get(_EMULATOR_HOST_ENV_VAR) + if emulator_host: + if '//' in emulator_host: + raise ValueError( + f'Invalid {_EMULATOR_HOST_ENV_VAR}: "{emulator_host}". It must follow format ' + '"host:port".') + self._emulator_host = emulator_host + else: + self._emulator_host = None + + def get_client(self, db_url=None): + """Creates a client based on the db_url. Clients may be cached.""" + if db_url is None: + db_url = self._db_url + + if not db_url or not isinstance(db_url, str): + raise ValueError( + f'Invalid database URL: "{db_url}". Database URL must be a non-empty URL string.') + + parsed_url = parse.urlparse(db_url) + if not parsed_url.netloc: + raise ValueError( + f'Invalid database URL: "{db_url}". Database URL must be a wellformed URL string.') + + emulator_config = self._get_emulator_config(parsed_url) + if emulator_config: + credential = _utils.EmulatorAdminCredentials() + base_url = emulator_config.base_url + params = {'ns': emulator_config.namespace} + else: + # Defer credential lookup until we are certain it's going to be prod connection. + credential = self._credential.get_credential() + base_url = f'https://{parsed_url.netloc}' + params = {} + + + if self._auth_override: + params['auth_variable_override'] = self._auth_override + + client_cache_key = (base_url, json.dumps(params, sort_keys=True)) + if client_cache_key not in self._clients: + client = _Client(credential, base_url, self._timeout, params) + self._clients[client_cache_key] = client + return self._clients[client_cache_key] + + def _get_emulator_config(self, parsed_url): + """Checks whether the SDK should connect to the RTDB emulator.""" + EmulatorConfig = collections.namedtuple('EmulatorConfig', ['base_url', 'namespace']) + if parsed_url.scheme != 'https': + # Emulator mode enabled by passing http URL via AppOptions + base_url, namespace = _DatabaseService._parse_emulator_url(parsed_url) + return EmulatorConfig(base_url, namespace) + if self._emulator_host: + # Emulator mode enabled via environment variable + base_url = f'http://{self._emulator_host}' + namespace = parsed_url.netloc.split('.')[0] + return EmulatorConfig(base_url, namespace) + + return None + + @classmethod + def _parse_emulator_url(cls, parsed_url): + """Parses emulator URL like http://localhost:8080/?ns=foo-bar""" + query_ns = parse.parse_qs(parsed_url.query).get('ns') + if parsed_url.scheme != 'http' or (not query_ns or len(query_ns) != 1 or not query_ns[0]): + raise ValueError( + f'Invalid database URL: "{parsed_url.geturl()}". Database URL must be a valid URL ' + 'to a Firebase Realtime Database instance.') + + namespace = query_ns[0] + base_url = f'{parsed_url.scheme}://{parsed_url.netloc}' + return base_url, namespace + + @classmethod + def _get_auth_override(cls, app): + """Gets and validates the database auth override to be used.""" + auth_override = app.options.get('databaseAuthVariableOverride', cls._DEFAULT_AUTH_OVERRIDE) + if auth_override == cls._DEFAULT_AUTH_OVERRIDE or auth_override is None: + return auth_override + if not isinstance(auth_override, dict): + raise ValueError( + f'Invalid databaseAuthVariableOverride option: "{auth_override}". Override ' + 'value must be a dict or None.') + + return auth_override + + def close(self): + for value in self._clients.values(): + value.close() + self._clients = {} + + +class _Client(_http_client.JsonHttpClient): + """HTTP client used to make REST calls. + + _Client maintains an HTTP session, and handles authenticating HTTP requests along with + marshalling and unmarshalling of JSON data. + """ + + def __init__(self, credential, base_url, timeout, params=None): + """Creates a new _Client from the given parameters. + + This exists primarily to enable testing. For regular use, obtain _Client instances by + calling the from_app() class method. + + Args: + credential: A Google credential that can be used to authenticate requests. + base_url: A URL prefix to be added to all outgoing requests. This is typically the + Firebase Realtime Database URL. + timeout: HTTP request timeout in seconds. If set to None connections will never + timeout, which is the default behavior of the underlying requests library. + params: Dict of query parameters to add to all outgoing requests. + """ + super().__init__( + credential=credential, base_url=base_url, + timeout=timeout, headers={'User-Agent': _USER_AGENT}) + self.credential = credential + self.params = params if params else {} + + def request(self, method, url, **kwargs): + """Makes an HTTP call using the Python requests library. + + Extends the request() method of the parent JsonHttpClient class. Handles default + params like auth overrides, and low-level exceptions. + + Args: + method: HTTP method name as a string (e.g. get, post). + url: URL path of the remote endpoint. This will be appended to the server's base URL. + **kwargs: An additional set of keyword arguments to be passed into requests API + (e.g. json, params). + + Returns: + Response: An HTTP response object. + + Raises: + FirebaseError: If an error occurs while making the HTTP call. + """ + query = '&'.join(f'{key}={value}' for key, value in self.params.items()) + extra_params = kwargs.get('params') + if extra_params: + if query: + query = extra_params + '&' + query + else: + query = extra_params + kwargs['params'] = query + + try: + return super().request(method, url, **kwargs) + except requests.exceptions.RequestException as error: + raise _Client.handle_rtdb_error(error) + + def create_listener_session(self): + return _sseclient.KeepAuthSession(self.credential) + + @classmethod + def handle_rtdb_error(cls, error): + """Converts an error encountered while calling RTDB into a FirebaseError.""" + if error.response is None: + return _utils.handle_requests_error(error) + + message = cls._extract_error_message(error.response) + return _utils.handle_requests_error(error, message=message) + + @classmethod + def _extract_error_message(cls, response): + """Extracts an error message from an error response. + + If the server has sent a JSON response with an 'error' field, which is the typical + behavior of the Realtime Database REST API, parses the response to retrieve the error + message. If the server has sent a non-JSON response, returns the full response + as the error message. + """ + message = None + try: + # RTDB error format: {"error": "text message"} + data = response.json() + if isinstance(data, dict): + message = data.get('error') + except ValueError: + pass + + if not message: + message = f'Unexpected response from database: {response.content.decode()}' + + return message diff --git a/firebase_admin/exceptions.py b/firebase_admin/exceptions.py new file mode 100644 index 000000000..947f36806 --- /dev/null +++ b/firebase_admin/exceptions.py @@ -0,0 +1,237 @@ +# Copyright 2019 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Firebase Exceptions module. + +This module defines the base types for exceptions and the platform-wide error codes as outlined in +https://cloud.google.com/apis/design/errors. + +:class:`FirebaseError` is the parent class of all exceptions raised by the Admin SDK. It contains +the ``code``, ``http_response`` and ``cause`` properties common to all Firebase exception types. +Each exception also carries a message that outlines what went wrong. This can be logged for +audit or debugging purposes. + +When calling an Admin SDK API, developers can catch the parent ``FirebaseError`` and +inspect its ``code`` to implement fine-grained error handling. Alternatively, developers can +catch one or more subtypes of ``FirebaseError``. Under normal conditions, any given API can raise +only a small subset of the available exception subtypes. However, the SDK also exposes rare error +conditions like connection timeouts and other I/O errors as instances of ``FirebaseError``. +Therefore it is always a good idea to have a handler specified for ``FirebaseError``, after all the +subtype error handlers. +""" + + +#: Error code for ``InvalidArgumentError`` type. +INVALID_ARGUMENT = 'INVALID_ARGUMENT' + +#: Error code for ``FailedPreconditionError`` type. +FAILED_PRECONDITION = 'FAILED_PRECONDITION' + +#: Error code for ``OutOfRangeError`` type. +OUT_OF_RANGE = 'OUT_OF_RANGE' + +#: Error code for ``UnauthenticatedError`` type. +UNAUTHENTICATED = 'UNAUTHENTICATED' + +#: Error code for ``PermissionDeniedError`` type. +PERMISSION_DENIED = 'PERMISSION_DENIED' + +#: Error code for ``NotFoundError`` type. +NOT_FOUND = 'NOT_FOUND' + +#: Error code for ``ConflictError`` type. +CONFLICT = 'CONFLICT' + +#: Error code for ``AbortedError`` type. +ABORTED = 'ABORTED' + +#: Error code for ``AlreadyExistsError`` type. +ALREADY_EXISTS = 'ALREADY_EXISTS' + +#: Error code for ``ResourceExhaustedError`` type. +RESOURCE_EXHAUSTED = 'RESOURCE_EXHAUSTED' + +#: Error code for ``CancelledError`` type. +CANCELLED = 'CANCELLED' + +#: Error code for ``DataLossError`` type. +DATA_LOSS = 'DATA_LOSS' + +#: Error code for ``UnknownError`` type. +UNKNOWN = 'UNKNOWN' + +#: Error code for ``InternalError`` type. +INTERNAL = 'INTERNAL' + +#: Error code for ``UnavailableError`` type. +UNAVAILABLE = 'UNAVAILABLE' + +#: Error code for ``DeadlineExceededError`` type. +DEADLINE_EXCEEDED = 'DEADLINE_EXCEEDED' + + +class FirebaseError(Exception): + """Base class for all errors raised by the Admin SDK. + + Args: + code: A string error code that represents the type of the exception. Possible error + codes are defined in https://cloud.google.com/apis/design/errors#handling_errors. + message: A human-readable error message string. + cause: The exception that caused this error (optional). + http_response: If this error was caused by an HTTP error response, this property is + set to the ``requests.Response`` object that represents the HTTP response (optional). + See https://docs.python-requests.org/en/master/api/#requests.Response for details of + this object. + """ + + def __init__(self, code, message, cause=None, http_response=None): + Exception.__init__(self, message) + self._code = code + self._cause = cause + self._http_response = http_response + + @property + def code(self): + return self._code + + @property + def cause(self): + return self._cause + + @property + def http_response(self): + return self._http_response + + +class InvalidArgumentError(FirebaseError): + """Client specified an invalid argument.""" + + def __init__(self, message, cause=None, http_response=None): + FirebaseError.__init__(self, INVALID_ARGUMENT, message, cause, http_response) + + +class FailedPreconditionError(FirebaseError): + """Request can not be executed in the current system state, such as deleting a non-empty + directory.""" + + def __init__(self, message, cause=None, http_response=None): + FirebaseError.__init__(self, FAILED_PRECONDITION, message, cause, http_response) + + +class OutOfRangeError(FirebaseError): + """Client specified an invalid range.""" + + def __init__(self, message, cause=None, http_response=None): + FirebaseError.__init__(self, OUT_OF_RANGE, message, cause, http_response) + + +class UnauthenticatedError(FirebaseError): + """Request not authenticated due to missing, invalid, or expired OAuth token.""" + + def __init__(self, message, cause=None, http_response=None): + FirebaseError.__init__(self, UNAUTHENTICATED, message, cause, http_response) + + +class PermissionDeniedError(FirebaseError): + """Client does not have sufficient permission. + + This can happen because the OAuth token does not have the right scopes, the client doesn't + have permission, or the API has not been enabled for the client project. + """ + + def __init__(self, message, cause=None, http_response=None): + FirebaseError.__init__(self, PERMISSION_DENIED, message, cause, http_response) + + +class NotFoundError(FirebaseError): + """A specified resource is not found, or the request is rejected by undisclosed reasons, such + as whitelisting.""" + + def __init__(self, message, cause=None, http_response=None): + FirebaseError.__init__(self, NOT_FOUND, message, cause, http_response) + + +class ConflictError(FirebaseError): + """Concurrency conflict, such as read-modify-write conflict.""" + + def __init__(self, message, cause=None, http_response=None): + FirebaseError.__init__(self, CONFLICT, message, cause, http_response) + + +class AbortedError(FirebaseError): + """Concurrency conflict, such as read-modify-write conflict.""" + + def __init__(self, message, cause=None, http_response=None): + FirebaseError.__init__(self, ABORTED, message, cause, http_response) + + +class AlreadyExistsError(FirebaseError): + """The resource that a client tried to create already exists.""" + + def __init__(self, message, cause=None, http_response=None): + FirebaseError.__init__(self, ALREADY_EXISTS, message, cause, http_response) + + +class ResourceExhaustedError(FirebaseError): + """Either out of resource quota or reaching rate limiting.""" + + def __init__(self, message, cause=None, http_response=None): + FirebaseError.__init__(self, RESOURCE_EXHAUSTED, message, cause, http_response) + + +class CancelledError(FirebaseError): + """Request cancelled by the client.""" + + def __init__(self, message, cause=None, http_response=None): + FirebaseError.__init__(self, CANCELLED, message, cause, http_response) + + +class DataLossError(FirebaseError): + """Unrecoverable data loss or data corruption.""" + + def __init__(self, message, cause=None, http_response=None): + FirebaseError.__init__(self, DATA_LOSS, message, cause, http_response) + + +class UnknownError(FirebaseError): + """Unknown server error.""" + + def __init__(self, message, cause=None, http_response=None): + FirebaseError.__init__(self, UNKNOWN, message, cause, http_response) + + +class InternalError(FirebaseError): + """Internal server error.""" + + def __init__(self, message, cause=None, http_response=None): + FirebaseError.__init__(self, INTERNAL, message, cause, http_response) + + +class UnavailableError(FirebaseError): + """Service unavailable. Typically the server is down.""" + + def __init__(self, message, cause=None, http_response=None): + FirebaseError.__init__(self, UNAVAILABLE, message, cause, http_response) + + +class DeadlineExceededError(FirebaseError): + """Request deadline exceeded. + + This will happen only if the caller sets a deadline that is shorter than the method's + default deadline (i.e. requested deadline is not enough for the server to process the + request) and the request did not finish within the deadline. + """ + + def __init__(self, message, cause=None, http_response=None): + FirebaseError.__init__(self, DEADLINE_EXCEEDED, message, cause, http_response) diff --git a/firebase_admin/firestore.py b/firebase_admin/firestore.py new file mode 100644 index 000000000..52ea90671 --- /dev/null +++ b/firebase_admin/firestore.py @@ -0,0 +1,92 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Cloud Firestore module. + +This module contains utilities for accessing the Google Cloud Firestore databases associated with +Firebase apps. This requires the ``google-cloud-firestore`` Python module. +""" + +from __future__ import annotations +from typing import Optional, Dict +from firebase_admin import App +from firebase_admin import _utils + +try: + from google.cloud import firestore + from google.cloud.firestore_v1.base_client import DEFAULT_DATABASE + existing = globals().keys() + for key, value in firestore.__dict__.items(): + if not key.startswith('_') and key not in existing: + globals()[key] = value +except ImportError as error: + raise ImportError('Failed to import the Cloud Firestore library for Python. Make sure ' + 'to install the "google-cloud-firestore" module.') from error + + +_FIRESTORE_ATTRIBUTE = '_firestore' + + +def client(app: Optional[App] = None, database_id: Optional[str] = None) -> firestore.Client: + """Returns a client that can be used to interact with Google Cloud Firestore. + + Args: + app: An App instance (optional). + database_id: The database ID of the Google Cloud Firestore database to be used. + Defaults to the default Firestore database ID if not specified or an empty string + (optional). + + Returns: + google.cloud.firestore.Firestore: A `Firestore Client`_. + + Raises: + ValueError: If the specified database ID is not a valid string, or if a project ID is not + specified either via options, credentials or environment variables, or if the specified + project ID is not a valid string. + + .. _Firestore Client: https://cloud.google.com/python/docs/reference/firestore/latest/\ + google.cloud.firestore_v1.client.Client + """ + # Validate database_id + if database_id is not None and not isinstance(database_id, str): + raise ValueError(f'database_id "{database_id}" must be a string or None.') + fs_service = _utils.get_app_service(app, _FIRESTORE_ATTRIBUTE, _FirestoreService) + return fs_service.get_client(database_id) + + +class _FirestoreService: + """Service that maintains a collection of firestore clients.""" + + def __init__(self, app: App) -> None: + self._app: App = app + self._clients: Dict[str, firestore.Client] = {} + + def get_client(self, database_id: Optional[str]) -> firestore.Client: + """Creates a client based on the database_id. These clients are cached.""" + database_id = database_id or DEFAULT_DATABASE + if database_id not in self._clients: + # Create a new client and cache it in _clients + credentials = self._app.credential.get_credential() + project = self._app.project_id + if not project: + raise ValueError( + 'Project ID is required to access Firestore. Either set the projectId option, ' + 'or use service account credentials. Alternatively, set the ' + 'GOOGLE_CLOUD_PROJECT environment variable.') + + fs_client = firestore.Client( + credentials=credentials, project=project, database=database_id) + self._clients[database_id] = fs_client + + return self._clients[database_id] diff --git a/firebase_admin/firestore_async.py b/firebase_admin/firestore_async.py new file mode 100644 index 000000000..4a197e9df --- /dev/null +++ b/firebase_admin/firestore_async.py @@ -0,0 +1,92 @@ +# Copyright 2022 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Cloud Firestore Async module. + +This module contains utilities for asynchronusly accessing the Google Cloud Firestore databases +associated with Firebase apps. This requires the ``google-cloud-firestore`` Python module. +""" + +from __future__ import annotations +from typing import Optional, Dict +from firebase_admin import App +from firebase_admin import _utils + +try: + from google.cloud import firestore + from google.cloud.firestore_v1.base_client import DEFAULT_DATABASE + existing = globals().keys() + for key, value in firestore.__dict__.items(): + if not key.startswith('_') and key not in existing: + globals()[key] = value +except ImportError as error: + raise ImportError('Failed to import the Cloud Firestore library for Python. Make sure ' + 'to install the "google-cloud-firestore" module.') from error + + +_FIRESTORE_ASYNC_ATTRIBUTE: str = '_firestore_async' + + +def client(app: Optional[App] = None, database_id: Optional[str] = None) -> firestore.AsyncClient: + """Returns an async client that can be used to interact with Google Cloud Firestore. + + Args: + app: An App instance (optional). + database_id: The database ID of the Google Cloud Firestore database to be used. + Defaults to the default Firestore database ID if not specified or an empty string + (optional). + + Returns: + google.cloud.firestore.Firestore_Async: A `Firestore Async Client`_. + + Raises: + ValueError: If the specified database ID is not a valid string, or if a project ID is not + specified either via options, credentials or environment variables, or if the specified + project ID is not a valid string. + + .. _Firestore Async Client: https://cloud.google.com/python/docs/reference/firestore/latest/\ + google.cloud.firestore_v1.async_client.AsyncClient + """ + # Validate database_id + if database_id is not None and not isinstance(database_id, str): + raise ValueError(f'database_id "{database_id}" must be a string or None.') + + fs_service = _utils.get_app_service(app, _FIRESTORE_ASYNC_ATTRIBUTE, _FirestoreAsyncService) + return fs_service.get_client(database_id) + +class _FirestoreAsyncService: + """Service that maintains a collection of firestore async clients.""" + + def __init__(self, app: App) -> None: + self._app: App = app + self._clients: Dict[str, firestore.AsyncClient] = {} + + def get_client(self, database_id: Optional[str]) -> firestore.AsyncClient: + """Creates an async client based on the database_id. These clients are cached.""" + database_id = database_id or DEFAULT_DATABASE + if database_id not in self._clients: + # Create a new client and cache it in _clients + credentials = self._app.credential.get_credential() + project = self._app.project_id + if not project: + raise ValueError( + 'Project ID is required to access Firestore. Either set the projectId option, ' + 'or use service account credentials. Alternatively, set the ' + 'GOOGLE_CLOUD_PROJECT environment variable.') + + fs_client = firestore.AsyncClient( + credentials=credentials, project=project, database=database_id) + self._clients[database_id] = fs_client + + return self._clients[database_id] diff --git a/firebase_admin/functions.py b/firebase_admin/functions.py new file mode 100644 index 000000000..66ba700b3 --- /dev/null +++ b/firebase_admin/functions.py @@ -0,0 +1,518 @@ +# Copyright 2024 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Firebase Functions module.""" + +from __future__ import annotations +from datetime import datetime, timedelta, timezone +from urllib import parse +import re +import os +import json +from base64 import b64encode +from typing import Any, Optional, Dict +from dataclasses import dataclass + +from google.auth.compute_engine import Credentials as ComputeEngineCredentials +from google.auth.credentials import TokenState +from google.auth.exceptions import RefreshError +from google.auth.transport import requests as google_auth_requests + +import requests +import firebase_admin +from firebase_admin import App +from firebase_admin import _http_client +from firebase_admin import _utils + +_FUNCTIONS_ATTRIBUTE = '_functions' + +__all__ = [ + 'TaskOptions', + + 'task_queue', +] + + +_CLOUD_TASKS_API_RESOURCE_PATH = \ + 'projects/{project_id}/locations/{location_id}/queues/{resource_id}/tasks' +_CLOUD_TASKS_API_URL_FORMAT = \ + 'https://cloudtasks.googleapis.com/v2/' + _CLOUD_TASKS_API_RESOURCE_PATH +_FIREBASE_FUNCTION_URL_FORMAT = \ + 'https://{location_id}-{project_id}.cloudfunctions.net/{resource_id}' +_EMULATOR_HOST_ENV_VAR = 'CLOUD_TASKS_EMULATOR_HOST' +_EMULATED_SERVICE_ACCOUNT_DEFAULT = 'emulated-service-acct@email.com' + +_FUNCTIONS_HEADERS = { + 'X-GOOG-API-FORMAT-VERSION': '2', + 'X-FIREBASE-CLIENT': f'fire-admin-python/{firebase_admin.__version__}', +} + +# Default canonical location ID of the task queue. +_DEFAULT_LOCATION = 'us-central1' + +def _get_emulator_host() -> Optional[str]: + emulator_host = os.environ.get(_EMULATOR_HOST_ENV_VAR) + if emulator_host: + if '//' in emulator_host: + raise ValueError( + f'Invalid {_EMULATOR_HOST_ENV_VAR}: "{emulator_host}". It must follow format ' + '"host:port".') + return emulator_host + return None + + +def _get_functions_service(app) -> _FunctionsService: + return _utils.get_app_service(app, _FUNCTIONS_ATTRIBUTE, _FunctionsService) + +def task_queue( + function_name: str, + extension_id: Optional[str] = None, + app: Optional[App] = None + ) -> TaskQueue: + """Creates a reference to a TaskQueue for a given function name. + + The function name can be either: + 1. A fully qualified function resource name: + `projects/{project-id}/locations/{location-id}/functions/{function-name}` + + 2. A partial resource name with location and function name, in which case + the runtime project ID is used: + `locations/{location-id}/functions/{function-name}` + + 3. A partial function name, in which case the runtime project ID and the + default location, `us-central1`, is used: + `{function-name}` + + Args: + function_name: Name of the function. + extension_id: Firebase extension ID (optional). + app: An App instance (optional). + + Returns: + TaskQueue: A TaskQueue instance. + + Raises: + ValueError: If the input arguments are invalid. + """ + return _get_functions_service(app).task_queue(function_name, extension_id) + +class _FunctionsService: + """Service class that implements Firebase Functions functionality.""" + def __init__(self, app: App): + self._project_id = app.project_id + if not self._project_id: + raise ValueError( + 'Project ID is required to access the Cloud Functions service. Either set the ' + 'projectId option, or use service account credentials. Alternatively, set the ' + 'GOOGLE_CLOUD_PROJECT environment variable.') + + self._emulator_host = _get_emulator_host() + if self._emulator_host: + self._credential = _utils.EmulatorAdminCredentials() + else: + self._credential = app.credential.get_credential() + + self._http_client = _http_client.JsonHttpClient(credential=self._credential) + + def task_queue(self, function_name: str, extension_id: Optional[str] = None) -> TaskQueue: + """Creates a TaskQueue instance.""" + return TaskQueue( + function_name, extension_id, self._project_id, self._credential, self._http_client, + self._emulator_host) + + @classmethod + def handle_functions_error(cls, error: Any): + """Handles errors received from the Cloud Functions API.""" + + return _utils.handle_platform_error_from_requests(error) + +class TaskQueue: + """TaskQueue class that implements Firebase Cloud Tasks Queues functionality.""" + def __init__( + self, + function_name: str, + extension_id: Optional[str], + project_id, + credential, + http_client, + emulator_host: Optional[str] = None + ) -> None: + + # Validate function_name + _Validators.check_non_empty_string('function_name', function_name) + + self._project_id = project_id + self._credential = credential + self._http_client = http_client + self._emulator_host = emulator_host + self._function_name = function_name + self._extension_id = extension_id + # Parse resources from function_name + self._resource = self._parse_resource_name(self._function_name, 'functions') + + # Apply defaults and validate resource_id + self._resource.project_id = self._resource.project_id or self._project_id + self._resource.location_id = self._resource.location_id or _DEFAULT_LOCATION + _Validators.check_non_empty_string('resource.resource_id', self._resource.resource_id) + # Validate extension_id if provided and edit resources depending + if self._extension_id is not None: + _Validators.check_non_empty_string('extension_id', self._extension_id) + self._resource.resource_id = f'ext-{self._extension_id}-{self._resource.resource_id}' + + + def enqueue(self, task_data: Any, opts: Optional[TaskOptions] = None) -> str: + """Creates a task and adds it to the queue. Tasks cannot be updated after creation. + + This action requires `cloudtasks.tasks.create` IAM permission on the service account. + + Args: + task_data: The data payload of the task. + opts: Options when enqueuing a new task (optional). + + Raises: + FirebaseError: If an error occurs while requesting the task to be queued by + the Cloud Functions service. + ValueError: If the input arguments are invalid. + + Returns: + str: The ID of the task relative to this queue. + """ + task = self._validate_task_options(task_data, self._resource, opts) + emulator_url = self._get_emulator_url(self._resource) + service_url = emulator_url or self._get_url(self._resource, _CLOUD_TASKS_API_URL_FORMAT) + task_payload = self._update_task_payload(task, self._resource, self._extension_id) + try: + resp = self._http_client.body( + 'post', + url=service_url, + headers=_FUNCTIONS_HEADERS, + json={'task': task_payload.to_api_dict()} + ) + if self._is_emulated(): + # Emulator returns a response with format {task: {name: }} + # The task name also has an extra '/' at the start compared to prod + task_info = resp.get('task') or {} + task_name = task_info.get('name') + if task_name: + task_name = task_name[1:] + else: + # Production returns a response with format {name: } + task_name = resp.get('name') + task_resource = \ + self._parse_resource_name(task_name, f'queues/{self._resource.resource_id}/tasks') + return task_resource.resource_id + except requests.exceptions.RequestException as error: + raise _FunctionsService.handle_functions_error(error) + + def delete(self, task_id: str) -> None: + """Deletes an enqueued task if it has not yet started. + + This action requires `cloudtasks.tasks.delete` IAM permission on the service account. + + Args: + task_id: The ID of the task relative to this queue. + + Raises: + FirebaseError: If an error occurs while requesting the task to be deleted by + the Cloud Functions service. + ValueError: If the input arguments are invalid. + """ + _Validators.check_non_empty_string('task_id', task_id) + emulator_url = self._get_emulator_url(self._resource) + if emulator_url: + service_url = emulator_url + f'/{task_id}' + else: + service_url = self._get_url(self._resource, _CLOUD_TASKS_API_URL_FORMAT + f'/{task_id}') + try: + self._http_client.body( + 'delete', + url=service_url, + headers=_FUNCTIONS_HEADERS, + ) + except requests.exceptions.RequestException as error: + raise _FunctionsService.handle_functions_error(error) + + + def _parse_resource_name(self, resource_name: str, resource_id_key: str) -> Resource: + """Parses a full or partial resource path into a ``Resource``.""" + if '/' not in resource_name: + return Resource(resource_id=resource_name) + + reg = f'^(projects/([^/]+)/)?locations/([^/]+)/{resource_id_key}/([^/]+)$' + match = re.search(reg, resource_name) + if match is None: + raise ValueError('Invalid resource name format.') + return Resource(project_id=match[2], location_id=match[3], resource_id=match[4]) + + def _get_url(self, resource: Resource, url_format: str) -> str: + """Generates url path from a ``Resource`` and url format string.""" + return url_format.format( + project_id=resource.project_id, + location_id=resource.location_id, + resource_id=resource.resource_id) + + def _validate_task_options( + self, + data: Any, + resource: Resource, + opts: Optional[TaskOptions] = None + ) -> Task: + """Validate and create a Task from optional ``TaskOptions``.""" + task_http_request = { + 'url': '', + 'oidcToken': { + 'serviceAccountEmail': '' + }, + 'body': b64encode(json.dumps(data).encode()).decode(), + 'headers': { + 'Content-Type': 'application/json', + } + } + task = Task(http_request=task_http_request) + + if opts is not None: + if opts.headers is not None: + task.http_request['headers'] = {**task.http_request['headers'], **opts.headers} + if opts.schedule_time is not None and opts.schedule_delay_seconds is not None: + raise ValueError( + 'Both schedule_delay_seconds and schedule_time cannot be set at the same time.') + if opts.schedule_time is not None and opts.schedule_delay_seconds is None: + if not isinstance(opts.schedule_time, datetime): + raise ValueError('schedule_time should be UTC datetime.') + task.schedule_time = opts.schedule_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + if opts.schedule_delay_seconds is not None and opts.schedule_time is None: + if not isinstance(opts.schedule_delay_seconds, int) \ + or opts.schedule_delay_seconds < 0: + raise ValueError('schedule_delay_seconds should be positive int.') + schedule_time = ( + datetime.now(timezone.utc) + timedelta(seconds=opts.schedule_delay_seconds)) + task.schedule_time = schedule_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + if opts.dispatch_deadline_seconds is not None: + if not isinstance(opts.dispatch_deadline_seconds, int) \ + or opts.dispatch_deadline_seconds < 15 \ + or opts.dispatch_deadline_seconds > 1800: + raise ValueError( + 'dispatch_deadline_seconds should be int in the range of 15s to ' + '1800s (30 mins).') + task.dispatch_deadline = f'{opts.dispatch_deadline_seconds}s' + if opts.task_id is not None: + if not _Validators.is_task_id(opts.task_id): + raise ValueError( + 'task_id can contain only letters ([A-Za-z]), numbers ([0-9]), hyphens (-)' + ', or underscores (_). The maximum length is 500 characters.') + task.name = self._get_url( + resource, _CLOUD_TASKS_API_RESOURCE_PATH + f'/{opts.task_id}') + if opts.uri is not None: + if not _Validators.is_url(opts.uri): + raise ValueError( + 'uri must be a valid RFC3986 URI string using the https or http schema.') + task.http_request['url'] = opts.uri + return task + + def _update_task_payload(self, task: Task, resource: Resource, extension_id: str) -> Task: + """Prepares task to be sent with credentials.""" + # Get function url from task or generate from resources + if not _Validators.is_non_empty_string(task.http_request['url']): + if self._is_emulated(): + task.http_request['url'] = '' + else: + task.http_request['url'] = self._get_url(resource, _FIREBASE_FUNCTION_URL_FORMAT) + + # Refresh the credential to ensure all attributes (e.g. service_account_email, id_token) + # are populated, preventing cold start errors. + if self._credential.token_state != TokenState.FRESH: + try: + self._credential.refresh(google_auth_requests.Request()) + except RefreshError as err: + raise ValueError(f'Initial task payload credential refresh failed: {err}') from err + + # If extension id is provided, it implies that it is being run from a deployed extension. + # Meaning that it's credential should be a Compute Engine Credential. + if _Validators.is_non_empty_string(extension_id) and \ + isinstance(self._credential, ComputeEngineCredentials): + id_token = self._credential.token + task.http_request['headers'] = \ + {**task.http_request['headers'], 'Authorization': f'Bearer {id_token}'} + # Delete oidc token + del task.http_request['oidcToken'] + else: + try: + task.http_request['oidcToken'] = \ + {'serviceAccountEmail': self._credential.service_account_email} + except AttributeError as error: + if self._is_emulated(): + task.http_request['oidcToken'] = \ + {'serviceAccountEmail': _EMULATED_SERVICE_ACCOUNT_DEFAULT} + else: + raise ValueError( + 'Failed to determine service account. Initialize the SDK with service ' + 'account credentials or set service account ID as an app option.' + ) from error + return task + + def _get_emulator_url(self, resource: Resource): + if self._emulator_host: + emulator_url_format = f'http://{self._emulator_host}/' + _CLOUD_TASKS_API_RESOURCE_PATH + url = self._get_url(resource, emulator_url_format) + return url + return None + + def _is_emulated(self): + return self._emulator_host is not None + + +class _Validators: + """A collection of data validation utilities.""" + @classmethod + def check_non_empty_string(cls, label: str, value: Any): + """Checks if given value is a non-empty string and throws error if not.""" + if not isinstance(value, str): + raise ValueError(f'{label} "{value}" must be a string.') + if value == '': + raise ValueError(f'{label} "{value}" must be a non-empty string.') + + @classmethod + def is_non_empty_string(cls, value: Any): + """Checks if given value is a non-empty string and returns bool.""" + if not isinstance(value, str) or value == '': + return False + return True + + @classmethod + def is_task_id(cls, task_id: Any): + """Checks if given value is a valid task id.""" + reg = '^[A-Za-z0-9_-]+$' + if re.match(reg, task_id) is not None and len(task_id) <= 500: + return True + return False + + @classmethod + def is_url(cls, url: Any): + """Checks if given value is a valid url.""" + if not isinstance(url, str): + return False + try: + parsed = parse.urlparse(url) + if not parsed.netloc or parsed.scheme not in ['http', 'https']: + return False + return True + except Exception: # pylint: disable=broad-except + return False + + +@dataclass +class TaskOptions: + """Task Options that can be applied to a Task. + + Args: + schedule_delay_seconds: The number of seconds after the current time at which to attempt or + retry the task. Should only be set if ``schedule_time`` is not set. + + schedule_time: The time when the task is scheduled to be attempted or retried. Should only + be set if ``schedule_delay_seconds`` is not set. + + dispatch_deadline_seconds: The deadline for requests sent to the worker. If the worker does + not respond by this deadline then the request is cancelled and the attempt is marked as + a ``DEADLINE_EXCEEDED`` failure. Cloud Tasks will retry the task according to the + ``RetryConfig``. The default is 10 minutes. The deadline must be in the range of 15 + seconds and 30 minutes (1800 seconds). + + task_id: The ID to use for the enqueued task. If not provided, one will be automatically + generated. + + If provided, an explicitly specified task ID enables task de-duplication. + Task IDs should be strings that contain only letters ([A-Za-z]), numbers ([0-9]), + hyphens (-), and underscores (_) with a maximum length of 500 characters. If a task's + ID is identical to that of an existing task or a task that was deleted or executed + recently then the call will throw an error with code `functions/task-already-exists`. + Another task with the same ID can't be created for ~1hour after the original task was + deleted or executed. + + Because there is an extra lookup cost to identify duplicate task IDs, setting ID + significantly increases latency. + + Also, note that the infrastructure relies on an approximately uniform distribution + of task IDs to store and serve tasks efficiently. For this reason, using hashed strings + for the task ID or for the prefix of the task ID is recommended. Choosing task IDs that + are sequential or have sequential prefixes, for example using a timestamp, causes an + increase in latency and error rates in all task commands. + + Push IDs from the Firebase Realtime Database make poor IDs because they are based on + timestamps and will cause contention (slowdowns) in your task queue. Reversed push IDs + however form a perfect distribution and are an ideal key. To reverse a string in Python + use ``reversedString = someString[::-1]`` + + headers: HTTP request headers to include in the request to the task queue function. These + headers represent a subset of the headers that will accompany the task's HTTP request. + Some HTTP request headers will be ignored or replaced: `Authorization`, `Host`, + `Content-Length`, `User-Agent` and others cannot be overridden. + + A complete list of these ignored or replaced headers can be found in the following + definition of the HttpRequest.headers property: + https://cloud.google.com/tasks/docs/reference/rest/v2/projects.locations.queues.tasks#httprequest + + By default, Content-Type is set to 'application/json'. + + The size of the headers must be less than 80KB. + + uri: The full URL that the request will be sent to. Must be a valid RFC3986 https or + http URL. + """ + schedule_delay_seconds: Optional[int] = None + schedule_time: Optional[datetime] = None + dispatch_deadline_seconds: Optional[int] = None + task_id: Optional[str] = None + headers: Optional[Dict[str, str]] = None + uri: Optional[str] = None + +@dataclass +class Task: + """Contains the relevant fields for enqueueing tasks that trigger Cloud Functions. + + This is a limited subset of the Cloud Functions `Task` resource. See the following + page for definitions of this class's properties: + https://cloud.google.com/tasks/docs/reference/rest/v2/projects.locations.queues.tasks#resource:-task + + Args: + httpRequest: The request to be made by the task worker. + name: The name of the function. See the Cloud docs for the format of this property. + schedule_time: The time when the task is scheduled to be attempted or retried. + dispatch_deadline: The deadline for requests sent to the worker. + """ + http_request: Dict[str, Optional[str | dict]] + name: Optional[str] = None + schedule_time: Optional[str] = None + dispatch_deadline: Optional[str] = None + + def to_api_dict(self) -> dict: + """Converts the Task object to a dictionary suitable for the Cloud Tasks API.""" + return { + 'httpRequest': self.http_request, + 'name': self.name, + 'scheduleTime': self.schedule_time, + 'dispatchDeadline': self.dispatch_deadline, + } + +@dataclass +class Resource: + """Contains the parsed address of a resource. + + Args: + resource_id: The ID of the resource. + project_id: The project ID of the resource. + location_id: The location ID of the resource. + """ + resource_id: str + project_id: Optional[str] = None + location_id: Optional[str] = None diff --git a/firebase_admin/instance_id.py b/firebase_admin/instance_id.py new file mode 100644 index 000000000..812daf40b --- /dev/null +++ b/firebase_admin/instance_id.py @@ -0,0 +1,99 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Firebase Instance ID module. + +This module enables deleting instance IDs associated with Firebase projects. +""" + +import requests + +from firebase_admin import _http_client +from firebase_admin import _utils + + +_IID_SERVICE_URL = 'https://console.firebase.google.com/v1/' +_IID_ATTRIBUTE = '_iid' + + +def _get_iid_service(app): + return _utils.get_app_service(app, _IID_ATTRIBUTE, _InstanceIdService) + + +def delete_instance_id(instance_id, app=None): + """Deletes the specified instance ID and the associated data from Firebase. + + Note that Google Analytics for Firebase uses its own form of Instance ID to + keep track of analytics data. Therefore deleting a regular Instance ID does + not delete Analytics data. See `Delete an Instance ID`_ for more information. + + Args: + instance_id: A non-empty instance ID string. + app: An App instance (optional). + + Raises: + InstanceIdError: If an error occurs while invoking the backend instance ID service. + ValueError: If the specified instance ID or app is invalid. + + .. _Delete an Instance ID: https://firebase.google.com/support/privacy\ + /manage-iids#delete_an_instance_id + """ + _get_iid_service(app).delete_instance_id(instance_id) + + +class _InstanceIdService: + """Provides methods for interacting with the remote instance ID service.""" + + error_codes = { + 400: 'Malformed instance ID argument.', + 401: 'Request not authorized.', + 403: 'Project does not match instance ID or the client does not have ' + 'sufficient privileges.', + 404: 'Failed to find the instance ID.', + 409: 'Already deleted.', + 429: 'Request throttled out by the backend server.', + 500: 'Internal server error.', + 503: 'Backend servers are over capacity. Try again later.' + } + + def __init__(self, app): + project_id = app.project_id + if not project_id: + raise ValueError( + 'Project ID is required to access Instance ID service. Either set the projectId ' + 'option, or use service account credentials. Alternatively, set the ' + 'GOOGLE_CLOUD_PROJECT environment variable.') + self._project_id = project_id + self._client = _http_client.JsonHttpClient( + credential=app.credential.get_credential(), base_url=_IID_SERVICE_URL) + + def delete_instance_id(self, instance_id): + if not isinstance(instance_id, str) or not instance_id: + raise ValueError('Instance ID must be a non-empty string.') + path = f'project/{self._project_id}/instanceId/{instance_id}' + try: + self._client.request('delete', path) + except requests.exceptions.RequestException as error: + msg = self._extract_message(instance_id, error) + raise _utils.handle_requests_error(error, msg) + + def _extract_message(self, instance_id, error): + if error.response is None: + return None + status = error.response.status_code + msg = self.error_codes.get(status) + if msg: + return f'Instance ID "{instance_id}": {msg}' + + return f'Instance ID "{instance_id}": {error}' diff --git a/firebase_admin/messaging.py b/firebase_admin/messaging.py new file mode 100644 index 000000000..749044436 --- /dev/null +++ b/firebase_admin/messaging.py @@ -0,0 +1,607 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Firebase Cloud Messaging module.""" + +from __future__ import annotations +from typing import Any, Callable, Dict, List, Optional, cast +import concurrent.futures +import json +import asyncio +import logging +import requests +import httpx + +import firebase_admin +from firebase_admin import ( + _http_client, + _messaging_encoder, + _messaging_utils, + _utils, + exceptions, + App +) + +logger = logging.getLogger(__name__) + +_MESSAGING_ATTRIBUTE = '_messaging' + + +__all__ = [ + 'AndroidConfig', + 'AndroidFCMOptions', + 'AndroidNotification', + 'APNSConfig', + 'APNSFCMOptions', + 'APNSPayload', + 'Aps', + 'ApsAlert', + 'BatchResponse', + 'CriticalSound', + 'ErrorInfo', + 'FCMOptions', + 'LightSettings', + 'Message', + 'MulticastMessage', + 'Notification', + 'QuotaExceededError', + 'SenderIdMismatchError', + 'SendResponse', + 'ThirdPartyAuthError', + 'TopicManagementResponse', + 'UnregisteredError', + 'WebpushConfig', + 'WebpushFCMOptions', + 'WebpushNotification', + 'WebpushNotificationAction', + + 'send', + 'send_each', + 'send_each_async', + 'send_each_for_multicast', + 'send_each_for_multicast_async', + 'subscribe_to_topic', + 'unsubscribe_from_topic', +] + + +AndroidConfig = _messaging_utils.AndroidConfig +AndroidFCMOptions = _messaging_utils.AndroidFCMOptions +AndroidNotification = _messaging_utils.AndroidNotification +APNSConfig = _messaging_utils.APNSConfig +APNSFCMOptions = _messaging_utils.APNSFCMOptions +APNSPayload = _messaging_utils.APNSPayload +Aps = _messaging_utils.Aps +ApsAlert = _messaging_utils.ApsAlert +CriticalSound = _messaging_utils.CriticalSound +FCMOptions = _messaging_utils.FCMOptions +LightSettings = _messaging_utils.LightSettings +Message = _messaging_encoder.Message +MulticastMessage = _messaging_encoder.MulticastMessage +Notification = _messaging_utils.Notification +WebpushConfig = _messaging_utils.WebpushConfig +WebpushFCMOptions = _messaging_utils.WebpushFCMOptions +WebpushNotification = _messaging_utils.WebpushNotification +WebpushNotificationAction = _messaging_utils.WebpushNotificationAction + +QuotaExceededError = _messaging_utils.QuotaExceededError +SenderIdMismatchError = _messaging_utils.SenderIdMismatchError +ThirdPartyAuthError = _messaging_utils.ThirdPartyAuthError +UnregisteredError = _messaging_utils.UnregisteredError + + +def _get_messaging_service(app: Optional[App]) -> _MessagingService: + return _utils.get_app_service(app, _MESSAGING_ATTRIBUTE, _MessagingService) + +def send(message: Message, dry_run: bool = False, app: Optional[App] = None) -> str: + """Sends the given message via Firebase Cloud Messaging (FCM). + + If the ``dry_run`` mode is enabled, the message will not be actually delivered to the + recipients. Instead, FCM performs all the usual validations and emulates the send operation. + + Args: + message: An instance of ``messaging.Message``. + dry_run: A boolean indicating whether to run the operation in dry run mode (optional). + app: An App instance (optional). + + Returns: + string: A message ID string that uniquely identifies the sent message. + + Raises: + FirebaseError: If an error occurs while sending the message to the FCM service. + ValueError: If the input arguments are invalid. + """ + return _get_messaging_service(app).send(message, dry_run) + +def send_each( + messages: List[Message], + dry_run: bool = False, + app: Optional[App] = None + ) -> BatchResponse: + """Sends each message in the given list via Firebase Cloud Messaging. + + If the ``dry_run`` mode is enabled, the message will not be actually delivered to the + recipients. Instead, FCM performs all the usual validations and emulates the send operation. + + Args: + messages: A list of ``messaging.Message`` instances. + dry_run: A boolean indicating whether to run the operation in dry run mode (optional). + app: An App instance (optional). + + Returns: + BatchResponse: A ``messaging.BatchResponse`` instance. + + Raises: + FirebaseError: If an error occurs while sending the message to the FCM service. + ValueError: If the input arguments are invalid. + """ + return _get_messaging_service(app).send_each(messages, dry_run) + +async def send_each_async( + messages: List[Message], + dry_run: bool = False, + app: Optional[App] = None + ) -> BatchResponse: + """Sends each message in the given list asynchronously via Firebase Cloud Messaging. + + If the ``dry_run`` mode is enabled, the message will not be actually delivered to the + recipients. Instead, FCM performs all the usual validations and emulates the send operation. + + Args: + messages: A list of ``messaging.Message`` instances. + dry_run: A boolean indicating whether to run the operation in dry run mode (optional). + app: An App instance (optional). + + Returns: + BatchResponse: A ``messaging.BatchResponse`` instance. + + Raises: + FirebaseError: If an error occurs while sending the message to the FCM service. + ValueError: If the input arguments are invalid. + """ + return await _get_messaging_service(app).send_each_async(messages, dry_run) + +async def send_each_for_multicast_async( + multicast_message: MulticastMessage, + dry_run: bool = False, + app: Optional[App] = None + ) -> BatchResponse: + """Sends the given mutlicast message to each token asynchronously via Firebase Cloud Messaging + (FCM). + + If the ``dry_run`` mode is enabled, the message will not be actually delivered to the + recipients. Instead, FCM performs all the usual validations and emulates the send operation. + + Args: + multicast_message: An instance of ``messaging.MulticastMessage``. + dry_run: A boolean indicating whether to run the operation in dry run mode (optional). + app: An App instance (optional). + + Returns: + BatchResponse: A ``messaging.BatchResponse`` instance. + + Raises: + FirebaseError: If an error occurs while sending the message to the FCM service. + ValueError: If the input arguments are invalid. + """ + if not isinstance(multicast_message, MulticastMessage): + raise ValueError('Message must be an instance of messaging.MulticastMessage class.') + messages = [Message( + data=multicast_message.data, + notification=multicast_message.notification, + android=multicast_message.android, + webpush=multicast_message.webpush, + apns=multicast_message.apns, + fcm_options=multicast_message.fcm_options, + token=token + ) for token in multicast_message.tokens] + return await _get_messaging_service(app).send_each_async(messages, dry_run) + +def send_each_for_multicast(multicast_message, dry_run=False, app=None): + """Sends the given mutlicast message to each token via Firebase Cloud Messaging (FCM). + + If the ``dry_run`` mode is enabled, the message will not be actually delivered to the + recipients. Instead, FCM performs all the usual validations and emulates the send operation. + + Args: + multicast_message: An instance of ``messaging.MulticastMessage``. + dry_run: A boolean indicating whether to run the operation in dry run mode (optional). + app: An App instance (optional). + + Returns: + BatchResponse: A ``messaging.BatchResponse`` instance. + + Raises: + FirebaseError: If an error occurs while sending the message to the FCM service. + ValueError: If the input arguments are invalid. + """ + if not isinstance(multicast_message, MulticastMessage): + raise ValueError('Message must be an instance of messaging.MulticastMessage class.') + messages = [Message( + data=multicast_message.data, + notification=multicast_message.notification, + android=multicast_message.android, + webpush=multicast_message.webpush, + apns=multicast_message.apns, + fcm_options=multicast_message.fcm_options, + token=token + ) for token in multicast_message.tokens] + return _get_messaging_service(app).send_each(messages, dry_run) + +def subscribe_to_topic(tokens, topic, app=None): + """Subscribes a list of registration tokens to an FCM topic. + + Args: + tokens: A non-empty list of device registration tokens. List may not have more than 1000 + elements. + topic: Name of the topic to subscribe to. May contain the ``/topics/`` prefix. + app: An App instance (optional). + + Returns: + TopicManagementResponse: A ``TopicManagementResponse`` instance. + + Raises: + FirebaseError: If an error occurs while communicating with instance ID service. + ValueError: If the input arguments are invalid. + """ + return _get_messaging_service(app).make_topic_management_request( + tokens, topic, 'iid/v1:batchAdd') + +def unsubscribe_from_topic(tokens, topic, app=None): + """Unsubscribes a list of registration tokens from an FCM topic. + + Args: + tokens: A non-empty list of device registration tokens. List may not have more than 1000 + elements. + topic: Name of the topic to unsubscribe from. May contain the ``/topics/`` prefix. + app: An App instance (optional). + + Returns: + TopicManagementResponse: A ``TopicManagementResponse`` instance. + + Raises: + FirebaseError: If an error occurs while communicating with instance ID service. + ValueError: If the input arguments are invalid. + """ + return _get_messaging_service(app).make_topic_management_request( + tokens, topic, 'iid/v1:batchRemove') + + +class ErrorInfo: + """An error encountered when performing a topic management operation.""" + + def __init__(self, index, reason): + self._index = index + self._reason = reason + + @property + def index(self): + """Index of the registration token to which this error is related to.""" + return self._index + + @property + def reason(self): + """String describing the nature of the error.""" + return self._reason + + +class TopicManagementResponse: + """The response received from a topic management operation.""" + + def __init__(self, resp): + if not isinstance(resp, dict) or 'results' not in resp: + raise ValueError(f'Unexpected topic management response: {resp}.') + self._success_count = 0 + self._failure_count = 0 + self._errors = [] + for index, result in enumerate(resp['results']): + if 'error' in result: + self._failure_count += 1 + self._errors.append(ErrorInfo(index, result['error'])) + else: + self._success_count += 1 + + @property + def success_count(self): + """Number of tokens that were successfully subscribed or unsubscribed.""" + return self._success_count + + @property + def failure_count(self): + """Number of tokens that could not be subscribed or unsubscribed due to errors.""" + return self._failure_count + + @property + def errors(self): + """A list of ``messaging.ErrorInfo`` objects (possibly empty).""" + return self._errors + + +class BatchResponse: + """The response received from a batch request to the FCM API.""" + + def __init__(self, responses: List[SendResponse]) -> None: + self._responses = responses + self._success_count = sum(1 for resp in responses if resp.success) + + @property + def responses(self) -> List[SendResponse]: + """A list of ``messaging.SendResponse`` objects (possibly empty).""" + return self._responses + + @property + def success_count(self) -> int: + return self._success_count + + @property + def failure_count(self) -> int: + return len(self.responses) - self.success_count + + +class SendResponse: + """The response received from an individual batched request to the FCM API.""" + + def __init__(self, resp, exception): + self._exception = exception + self._message_id = None + if resp: + self._message_id = resp.get('name', None) + + @property + def message_id(self): + """A message ID string that uniquely identifies the message.""" + return self._message_id + + @property + def success(self): + """A boolean indicating if the request was successful.""" + return self._message_id is not None and not self._exception + + @property + def exception(self): + """A ``FirebaseError`` if an error occurs while sending the message to the FCM service.""" + return self._exception + +class _MessagingService: + """Service class that implements Firebase Cloud Messaging (FCM) functionality.""" + + FCM_URL = 'https://fcm.googleapis.com/v1/projects/{0}/messages:send' + FCM_BATCH_URL = 'https://fcm.googleapis.com/batch' + IID_URL = 'https://iid.googleapis.com' + IID_HEADERS = {'access_token_auth': 'true'} + JSON_ENCODER = _messaging_encoder.MessageEncoder() + + FCM_ERROR_TYPES = { + 'APNS_AUTH_ERROR': ThirdPartyAuthError, + 'QUOTA_EXCEEDED': QuotaExceededError, + 'SENDER_ID_MISMATCH': SenderIdMismatchError, + 'THIRD_PARTY_AUTH_ERROR': ThirdPartyAuthError, + 'UNREGISTERED': UnregisteredError, + } + + def __init__(self, app: App) -> None: + project_id = app.project_id + if not project_id: + raise ValueError( + 'Project ID is required to access Cloud Messaging service. Either set the ' + 'projectId option, or use service account credentials. Alternatively, set the ' + 'GOOGLE_CLOUD_PROJECT environment variable.') + self._fcm_url = _MessagingService.FCM_URL.format(project_id) + self._fcm_headers = { + 'X-GOOG-API-FORMAT-VERSION': '2', + 'X-FIREBASE-CLIENT': f'fire-admin-python/{firebase_admin.__version__}', + } + timeout = app.options.get('httpTimeout', _http_client.DEFAULT_TIMEOUT_SECONDS) + self._credential = app.credential.get_credential() + self._client = _http_client.JsonHttpClient(credential=self._credential, timeout=timeout) + self._async_client = _http_client.HttpxAsyncClient( + credential=self._credential, timeout=timeout) + + @classmethod + def encode_message(cls, message): + if not isinstance(message, Message): + raise ValueError('Message must be an instance of messaging.Message class.') + return cls.JSON_ENCODER.default(message) + + def send(self, message: Message, dry_run: bool = False) -> str: + """Sends the given message to FCM via the FCM v1 API.""" + data = self._message_data(message, dry_run) + try: + resp = self._client.body( + 'post', + url=self._fcm_url, + headers=self._fcm_headers, + json=data + ) + except requests.exceptions.RequestException as error: + raise self._handle_fcm_error(error) + return cast(str, resp['name']) + + def send_each(self, messages: List[Message], dry_run: bool = False) -> BatchResponse: + """Sends the given messages to FCM via the FCM v1 API.""" + if not isinstance(messages, list): + raise ValueError('messages must be a list of messaging.Message instances.') + if len(messages) > 500: + raise ValueError('messages must not contain more than 500 elements.') + + def send_data(data): + try: + resp = self._client.body( + 'post', + url=self._fcm_url, + headers=self._fcm_headers, + json=data) + except requests.exceptions.RequestException as exception: + return SendResponse(resp=None, exception=self._handle_fcm_error(exception)) + return SendResponse(resp, exception=None) + + message_data = [self._message_data(message, dry_run) for message in messages] + try: + with concurrent.futures.ThreadPoolExecutor(max_workers=len(message_data)) as executor: + responses = list(executor.map(send_data, message_data)) + return BatchResponse(responses) + except Exception as error: + raise exceptions.UnknownError( + message=f'Unknown error while making remote service calls: {error}', + cause=error) + + async def send_each_async(self, messages: List[Message], dry_run: bool = True) -> BatchResponse: + """Sends the given messages to FCM via the FCM v1 API.""" + if not isinstance(messages, list): + raise ValueError('messages must be a list of messaging.Message instances.') + if len(messages) > 500: + raise ValueError('messages must not contain more than 500 elements.') + + async def send_data(data): + try: + resp = await self._async_client.request( + 'post', + url=self._fcm_url, + headers=self._fcm_headers, + json=data) + except httpx.HTTPError as exception: + return SendResponse(resp=None, exception=self._handle_fcm_httpx_error(exception)) + # Catch errors caused by the requests library during authorization + except requests.exceptions.RequestException as exception: + return SendResponse(resp=None, exception=self._handle_fcm_error(exception)) + return SendResponse(resp.json(), exception=None) + + message_data = [self._message_data(message, dry_run) for message in messages] + try: + responses = await asyncio.gather(*[send_data(message) for message in message_data]) + return BatchResponse(responses) + except Exception as error: + raise exceptions.UnknownError( + message=f'Unknown error while making remote service calls: {error}', + cause=error) + + def make_topic_management_request(self, tokens, topic, operation): + """Invokes the IID service for topic management functionality.""" + if isinstance(tokens, str): + tokens = [tokens] + if not isinstance(tokens, list) or not tokens: + raise ValueError('Tokens must be a string or a non-empty list of strings.') + invalid_str = [t for t in tokens if not isinstance(t, str) or not t] + if invalid_str: + raise ValueError('Tokens must be non-empty strings.') + + if not isinstance(topic, str) or not topic: + raise ValueError('Topic must be a non-empty string.') + if not topic.startswith('/topics/'): + topic = f'/topics/{topic}' + data = { + 'to': topic, + 'registration_tokens': tokens, + } + url = f'{_MessagingService.IID_URL}/{operation}' + try: + resp = self._client.body( + 'post', + url=url, + json=data, + headers=_MessagingService.IID_HEADERS + ) + except requests.exceptions.RequestException as error: + raise self._handle_iid_error(error) + return TopicManagementResponse(resp) + + def _message_data(self, message, dry_run): + data = {'message': _MessagingService.encode_message(message)} + if dry_run: + data['validate_only'] = True + return data + + def _postproc(self, _, body): + """Handle response from batch API request.""" + # This only gets called for 2xx responses. + return json.loads(body.decode()) + + def _handle_fcm_error(self, error): + """Handles errors received from the FCM API.""" + return _utils.handle_platform_error_from_requests( + error, _MessagingService._build_fcm_error_requests) + + def _handle_fcm_httpx_error(self, error: httpx.HTTPError) -> exceptions.FirebaseError: + """Handles errors received from the FCM API.""" + return _utils.handle_platform_error_from_httpx( + error, _MessagingService._build_fcm_error_httpx) + + def _handle_iid_error(self, error): + """Handles errors received from the Instance ID API.""" + if error.response is None: + raise _utils.handle_requests_error(error) + + data = {} + try: + parsed_body = error.response.json() + if isinstance(parsed_body, dict): + data = parsed_body + except ValueError: + pass + + # IID error response format: {"error": "ErrorCode"} + code = data.get('error') + msg = None + if code: + msg = f'Error while calling the IID service: {code}' + else: + msg = ( + f'Unexpected HTTP response with status: {error.response.status_code}; body: ' + f'{error.response.content.decode()}' + ) + + return _utils.handle_requests_error(error, msg) + + def close(self) -> None: + asyncio.run(self._async_client.aclose()) + + @classmethod + def _build_fcm_error_requests(cls, error, message, error_dict): + """Parses an error response from the FCM API and creates a FCM-specific exception if + appropriate.""" + exc_type = cls._build_fcm_error(error_dict) + # pylint: disable=not-callable + return exc_type(message, cause=error, http_response=error.response) if exc_type else None + + @classmethod + def _build_fcm_error_httpx( + cls, + error: httpx.HTTPError, + message: str, + error_dict: Optional[Dict[str, Any]] + ) -> Optional[exceptions.FirebaseError]: + """Parses a httpx error response from the FCM API and creates a FCM-specific exception if + appropriate.""" + exc_type = cls._build_fcm_error(error_dict) + if isinstance(error, httpx.HTTPStatusError): + # pylint: disable=not-callable + return exc_type( + message, cause=error, http_response=error.response) if exc_type else None + # pylint: disable=not-callable + return exc_type(message, cause=error) if exc_type else None + + @classmethod + def _build_fcm_error( + cls, + error_dict: Optional[Dict[str, Any]] + ) -> Optional[Callable[..., exceptions.FirebaseError]]: + """Parses an error response to determine the appropriate FCM-specific error type.""" + if not error_dict: + return None + fcm_code = None + for detail in error_dict.get('details', []): + if detail.get('@type') == 'type.googleapis.com/google.firebase.fcm.v1.FcmError': + fcm_code = detail.get('errorCode') + break + return _MessagingService.FCM_ERROR_TYPES.get(fcm_code) if fcm_code else None diff --git a/firebase_admin/ml.py b/firebase_admin/ml.py new file mode 100644 index 000000000..3a77dd05f --- /dev/null +++ b/firebase_admin/ml.py @@ -0,0 +1,939 @@ +# Copyright 2019 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Firebase ML module. + +This module contains functions for creating, updating, getting, listing, +deleting, publishing and unpublishing Firebase ML models. +""" + + +import datetime +import re +import time +import os +from urllib import parse + +import requests + +import firebase_admin +from firebase_admin import _http_client +from firebase_admin import _utils +from firebase_admin import exceptions + +# pylint: disable=import-error,no-member +try: + from firebase_admin import storage + _GCS_ENABLED = True +except ImportError: + _GCS_ENABLED = False + +# pylint: disable=import-error,no-member +try: + import tensorflow as tf + _TF_ENABLED = True +except ImportError: + _TF_ENABLED = False + +_ML_ATTRIBUTE = '_ml' +_MAX_PAGE_SIZE = 100 +_MODEL_ID_PATTERN = re.compile(r'^[A-Za-z0-9_-]{1,60}$') +_DISPLAY_NAME_PATTERN = re.compile(r'^[A-Za-z0-9_-]{1,32}$') +_TAG_PATTERN = re.compile(r'^[A-Za-z0-9_-]{1,32}$') +_GCS_TFLITE_URI_PATTERN = re.compile( + r'^gs://(?P[a-z0-9_.-]{3,63})/(?P.+)$') +_RESOURCE_NAME_PATTERN = re.compile( + r'^projects/(?P[a-z0-9-]{6,30})/models/(?P[A-Za-z0-9_-]{1,60})$') +_OPERATION_NAME_PATTERN = re.compile( + r'^projects/(?P[a-z0-9-]{6,30})/operations/[^/]+$') + + +def _get_ml_service(app): + """ Returns an _MLService instance for an App. + + Args: + app: A Firebase App instance (or None to use the default App). + + Returns: + _MLService: An _MLService for the specified App instance. + + Raises: + ValueError: If the app argument is invalid. + """ + return _utils.get_app_service(app, _ML_ATTRIBUTE, _MLService) + + +def create_model(model, app=None): + """Creates a model in the current Firebase project. + + Args: + model: An ml.Model to create. + app: A Firebase app instance (or None to use the default app). + + Returns: + Model: The model that was created in Firebase ML. + """ + ml_service = _get_ml_service(app) + return Model.from_dict(ml_service.create_model(model), app=app) + + +def update_model(model, app=None): + """Updates a model's metadata or model file. + + Args: + model: The ml.Model to update. + app: A Firebase app instance (or None to use the default app). + + Returns: + Model: The updated model. + """ + ml_service = _get_ml_service(app) + return Model.from_dict(ml_service.update_model(model), app=app) + + +def publish_model(model_id, app=None): + """Publishes a Firebase ML model. + + A published model can be downloaded to client apps. + + Args: + model_id: The id of the model to publish. + app: A Firebase app instance (or None to use the default app). + + Returns: + Model: The published model. + """ + ml_service = _get_ml_service(app) + return Model.from_dict(ml_service.set_published(model_id, publish=True), app=app) + + +def unpublish_model(model_id, app=None): + """Unpublishes a Firebase ML model. + + Args: + model_id: The id of the model to unpublish. + app: A Firebase app instance (or None to use the default app). + + Returns: + Model: The unpublished model. + """ + ml_service = _get_ml_service(app) + return Model.from_dict(ml_service.set_published(model_id, publish=False), app=app) + + +def get_model(model_id, app=None): + """Gets the model specified by the given ID. + + Args: + model_id: The id of the model to get. + app: A Firebase app instance (or None to use the default app). + + Returns: + Model: The requested model. + """ + ml_service = _get_ml_service(app) + return Model.from_dict(ml_service.get_model(model_id), app=app) + + +def list_models(list_filter=None, page_size=None, page_token=None, app=None): + """Lists the current project's models. + + Args: + list_filter: a list filter string such as ``tags:'tag_1'``. None will return all models. + page_size: A number between 1 and 100 inclusive that specifies the maximum + number of models to return per page. None for default. + page_token: A next page token returned from a previous page of results. None + for first page of results. + app: A Firebase app instance (or None to use the default app). + + Returns: + ListModelsPage: A (filtered) list of models. + """ + ml_service = _get_ml_service(app) + return ListModelsPage( + ml_service.list_models, list_filter, page_size, page_token, app=app) + + +def delete_model(model_id, app=None): + """Deletes a model from the current project. + + Args: + model_id: The id of the model you wish to delete. + app: A Firebase app instance (or None to use the default app). + """ + ml_service = _get_ml_service(app) + ml_service.delete_model(model_id) + + +class Model: + """A Firebase ML Model object. + + Args: + display_name: The display name of your model - used to identify your model in code. + tags: Optional list of strings associated with your model. Can be used in list queries. + model_format: A subclass of ModelFormat. (e.g. TFLiteFormat) Specifies the model details. + """ + def __init__(self, display_name=None, tags=None, model_format=None): + self._app = None # Only needed for wait_for_unlo + self._data = {} + self._model_format = None + + if display_name is not None: + self.display_name = display_name + if tags is not None: + self.tags = tags + if model_format is not None: + self.model_format = model_format + + @classmethod + def from_dict(cls, data, app=None): + """Create an instance of the object from a dict.""" + data_copy = dict(data) + tflite_format = None + tflite_format_data = data_copy.pop('tfliteModel', None) + data_copy.pop('@type', None) # Returned by Operations. (Not needed) + if tflite_format_data: + tflite_format = TFLiteFormat.from_dict(tflite_format_data) + model = Model(model_format=tflite_format) + model._data = data_copy # pylint: disable=protected-access + model._app = app # pylint: disable=protected-access + return model + + def _update_from_dict(self, data): + copy = Model.from_dict(data) + self.model_format = copy.model_format + self._data = copy._data # pylint: disable=protected-access + + def __eq__(self, other): + if isinstance(other, self.__class__): + # pylint: disable=protected-access + return self._data == other._data and self._model_format == other._model_format + return False + + def __ne__(self, other): + return not self.__eq__(other) + + @property + def model_id(self): + """The model's ID, unique to the project.""" + if not self._data.get('name'): + return None + _, model_id = _validate_and_parse_name(self._data.get('name')) + return model_id + + @property + def display_name(self): + """The model's display name, used to refer to the model in code and in + the Firebase console.""" + return self._data.get('displayName') + + @display_name.setter + def display_name(self, display_name): + self._data['displayName'] = _validate_display_name(display_name) + return self + + @staticmethod + def _convert_to_millis(date_string): + if not date_string: + return None + format_str = '%Y-%m-%dT%H:%M:%S.%fZ' + epoch = datetime.datetime.utcfromtimestamp(0) + datetime_object = datetime.datetime.strptime(date_string, format_str) + millis = int((datetime_object - epoch).total_seconds() * 1000) + return millis + + @property + def create_time(self): + """The time the model was created.""" + return Model._convert_to_millis(self._data.get('createTime', None)) + + @property + def update_time(self): + """The time the model was last updated.""" + return Model._convert_to_millis(self._data.get('updateTime', None)) + + @property + def validation_error(self): + """Validation error message.""" + return self._data.get('state', {}).get('validationError', {}).get('message') + + @property + def published(self): + """True if the model is published and available for clients to + download.""" + return bool(self._data.get('state', {}).get('published')) + + @property + def etag(self): + """The entity tag (ETag) of the model resource.""" + return self._data.get('etag') + + @property + def model_hash(self): + """SHA256 hash of the model binary.""" + return self._data.get('modelHash') + + @property + def tags(self): + """Tag strings, used for filtering query results.""" + return self._data.get('tags') + + @tags.setter + def tags(self, tags): + self._data['tags'] = _validate_tags(tags) + return self + + @property + def locked(self): + """True if the Model object is locked by an active operation.""" + return bool(self._data.get('activeOperations') and + len(self._data.get('activeOperations')) > 0) + + def wait_for_unlocked(self, max_time_seconds=None): + """Waits for the model to be unlocked. (All active operations complete) + + Args: + max_time_seconds: The maximum number of seconds to wait for the model to unlock. + (None for no limit) + + Raises: + exceptions.DeadlineExceeded: If max_time_seconds passed and the model is still locked. + """ + if not self.locked: + return + ml_service = _get_ml_service(self._app) + op_name = self._data.get('activeOperations')[0].get('name') + model_dict = ml_service.handle_operation( + ml_service.get_operation(op_name), + wait_for_operation=True, + max_time_seconds=max_time_seconds) + self._update_from_dict(model_dict) + + @property + def model_format(self): + """The model's ``ModelFormat`` object, which represents the model's + format and storage location.""" + return self._model_format + + @model_format.setter + def model_format(self, model_format): + if model_format is not None: + _validate_model_format(model_format) + self._model_format = model_format #Can be None + return self + + def as_dict(self, for_upload=False): + """Returns a serializable representation of the object.""" + copy = dict(self._data) + if self._model_format: + copy.update(self._model_format.as_dict(for_upload=for_upload)) + return copy + + +class ModelFormat: + """Abstract base class representing a Model Format such as TFLite.""" + def as_dict(self, for_upload=False): + """Returns a serializable representation of the object.""" + raise NotImplementedError + + +class TFLiteFormat(ModelFormat): + """Model format representing a TFLite model. + + Args: + model_source: A TFLiteModelSource sub class. Specifies the details of the model source. + """ + def __init__(self, model_source=None): + self._data = {} + self._model_source = None + + if model_source is not None: + self.model_source = model_source + + @classmethod + def from_dict(cls, data): + """Create an instance of the object from a dict.""" + data_copy = dict(data) + tflite_format = TFLiteFormat(model_source=cls._init_model_source(data_copy)) + tflite_format._data = data_copy # pylint: disable=protected-access + return tflite_format + + def __eq__(self, other): + if isinstance(other, self.__class__): + # pylint: disable=protected-access + return self._data == other._data and self._model_source == other._model_source + return False + + def __ne__(self, other): + return not self.__eq__(other) + + @staticmethod + def _init_model_source(data): + """Initialize the ML model source.""" + gcs_tflite_uri = data.pop('gcsTfliteUri', None) + if gcs_tflite_uri: + return TFLiteGCSModelSource(gcs_tflite_uri=gcs_tflite_uri) + return None + + @property + def model_source(self): + """The TF Lite model's location.""" + return self._model_source + + @model_source.setter + def model_source(self, model_source): + if model_source is not None: + if not isinstance(model_source, TFLiteModelSource): + raise TypeError('Model source must be a TFLiteModelSource object.') + self._model_source = model_source # Can be None + + @property + def size_bytes(self): + """The size in bytes of the TF Lite model.""" + return self._data.get('sizeBytes') + + def as_dict(self, for_upload=False): + """Returns a serializable representation of the object.""" + copy = dict(self._data) + if self._model_source: + copy.update(self._model_source.as_dict(for_upload=for_upload)) + return {'tfliteModel': copy} + + +class TFLiteModelSource: + """Abstract base class representing a model source for TFLite format models.""" + def as_dict(self, for_upload=False): + """Returns a serializable representation of the object.""" + raise NotImplementedError + + +class _CloudStorageClient: + """Cloud Storage helper class""" + + GCS_URI = 'gs://{0}/{1}' + BLOB_NAME = 'Firebase/ML/Models/{0}' + + @staticmethod + def _assert_gcs_enabled(): + if not _GCS_ENABLED: + raise ImportError('Failed to import the Cloud Storage library for Python. Make sure ' + 'to install the "google-cloud-storage" module.') + + @staticmethod + def _parse_gcs_tflite_uri(uri): + # GCS Bucket naming rules are complex. The regex is not comprehensive. + # See https://cloud.google.com/storage/docs/naming for full details. + matcher = _GCS_TFLITE_URI_PATTERN.match(uri) + if not matcher: + raise ValueError('GCS TFLite URI format is invalid.') + return matcher.group('bucket_name'), matcher.group('blob_name') + + @staticmethod + def upload(bucket_name, model_file_name, app): + """Upload a model file to the specified Storage bucket.""" + _CloudStorageClient._assert_gcs_enabled() + + file_name = os.path.basename(model_file_name) + bucket = storage.bucket(bucket_name, app=app) + blob_name = _CloudStorageClient.BLOB_NAME.format(file_name) + blob = bucket.blob(blob_name) + blob.upload_from_filename(model_file_name) + return _CloudStorageClient.GCS_URI.format(bucket.name, blob_name) + + @staticmethod + def sign_uri(gcs_tflite_uri, app): + """Makes the gcs_tflite_uri readable for GET for 10 minutes via signed_uri.""" + _CloudStorageClient._assert_gcs_enabled() + bucket_name, blob_name = _CloudStorageClient._parse_gcs_tflite_uri(gcs_tflite_uri) + bucket = storage.bucket(bucket_name, app=app) + blob = bucket.blob(blob_name) + return blob.generate_signed_url( + version='v4', + expiration=datetime.timedelta(minutes=10), + method='GET' + ) + + +class TFLiteGCSModelSource(TFLiteModelSource): + """TFLite model source representing a tflite model file stored in GCS.""" + + _STORAGE_CLIENT = _CloudStorageClient() + + def __init__(self, gcs_tflite_uri, app=None): + self._app = app + self._gcs_tflite_uri = _validate_gcs_tflite_uri(gcs_tflite_uri) + + def __eq__(self, other): + if isinstance(other, self.__class__): + return self._gcs_tflite_uri == other._gcs_tflite_uri # pylint: disable=protected-access + return False + + def __ne__(self, other): + return not self.__eq__(other) + + @classmethod + def from_tflite_model_file(cls, model_file_name, bucket_name=None, app=None): + """Uploads the model file to an existing Google Cloud Storage bucket. + + Args: + model_file_name: The name of the model file. + bucket_name: The name of an existing bucket. None to use the default bucket configured + in the app. + app: A Firebase app instance (or None to use the default app). + + Returns: + TFLiteGCSModelSource: The source created from the model_file + + Raises: + ImportError: If the Cloud Storage Library has not been installed. + """ + gcs_uri = TFLiteGCSModelSource._STORAGE_CLIENT.upload(bucket_name, model_file_name, app) + return TFLiteGCSModelSource(gcs_tflite_uri=gcs_uri, app=app) + + @staticmethod + def _assert_tf_enabled(): + if not _TF_ENABLED: + raise ImportError('Failed to import the tensorflow library for Python. Make sure ' + 'to install the tensorflow module.') + if not tf.version.VERSION.startswith('1.') and not tf.version.VERSION.startswith('2.'): + raise ImportError( + f'Expected tensorflow version 1.x or 2.x, but found {tf.version.VERSION}') + + @staticmethod + def _tf_convert_from_saved_model(saved_model_dir): + # Same for both v1.x and v2.x + converter = tf.lite.TFLiteConverter.from_saved_model(saved_model_dir) + return converter.convert() + + @staticmethod + def _tf_convert_from_keras_model(keras_model): + """Converts the given Keras model into a TF Lite model.""" + # Version 1.x conversion function takes a model file. Version 2.x takes the model itself. + if tf.version.VERSION.startswith('1.'): + keras_file = 'firebase_keras_model.h5' + tf.keras.models.save_model(keras_model, keras_file) + converter = tf.lite.TFLiteConverter.from_keras_model_file(keras_file) + else: + converter = tf.lite.TFLiteConverter.from_keras_model(keras_model) + + return converter.convert() + + @classmethod + def from_saved_model(cls, saved_model_dir, model_file_name='firebase_ml_model.tflite', + bucket_name=None, app=None): + """Creates a Tensor Flow Lite model from the saved model, and uploads the model to GCS. + + Args: + saved_model_dir: The saved model directory. + model_file_name: The name that the tflite model will be saved as in Cloud Storage. + bucket_name: The name of an existing bucket. None to use the default bucket configured + in the app. + app: Optional. A Firebase app instance (or None to use the default app) + + Returns: + TFLiteGCSModelSource: The source created from the saved_model_dir + + Raises: + ImportError: If the Tensor Flow or Cloud Storage Libraries have not been installed. + """ + TFLiteGCSModelSource._assert_tf_enabled() + tflite_model = TFLiteGCSModelSource._tf_convert_from_saved_model(saved_model_dir) + with open(model_file_name, 'wb') as model_file: + model_file.write(tflite_model) + return TFLiteGCSModelSource.from_tflite_model_file(model_file_name, bucket_name, app) + + @classmethod + def from_keras_model(cls, keras_model, model_file_name='firebase_ml_model.tflite', + bucket_name=None, app=None): + """Creates a Tensor Flow Lite model from the keras model, and uploads the model to GCS. + + Args: + keras_model: A tf.keras model. + model_file_name: The name that the tflite model will be saved as in Cloud Storage. + bucket_name: The name of an existing bucket. None to use the default bucket configured + in the app. + app: Optional. A Firebase app instance (or None to use the default app) + + Returns: + TFLiteGCSModelSource: The source created from the keras_model + + Raises: + ImportError: If the Tensor Flow or Cloud Storage Libraries have not been installed. + """ + TFLiteGCSModelSource._assert_tf_enabled() + tflite_model = TFLiteGCSModelSource._tf_convert_from_keras_model(keras_model) + with open(model_file_name, 'wb') as model_file: + model_file.write(tflite_model) + return TFLiteGCSModelSource.from_tflite_model_file(model_file_name, bucket_name, app) + + @property + def gcs_tflite_uri(self): + """URI of the model file in Cloud Storage.""" + return self._gcs_tflite_uri + + @gcs_tflite_uri.setter + def gcs_tflite_uri(self, gcs_tflite_uri): + self._gcs_tflite_uri = _validate_gcs_tflite_uri(gcs_tflite_uri) + + def _get_signed_gcs_tflite_uri(self): + """Signs the GCS uri, so the model file can be uploaded to Firebase ML and verified.""" + return TFLiteGCSModelSource._STORAGE_CLIENT.sign_uri(self._gcs_tflite_uri, self._app) + + def as_dict(self, for_upload=False): + """Returns a serializable representation of the object.""" + if for_upload: + return {'gcsTfliteUri': self._get_signed_gcs_tflite_uri()} + + return {'gcsTfliteUri': self._gcs_tflite_uri} + +class ListModelsPage: + """Represents a page of models in a Firebase project. + + Provides methods for traversing the models included in this page, as well as + retrieving subsequent pages of models. The iterator returned by + ``iterate_all()`` can be used to iterate through all the models in the + Firebase project starting from this page. + """ + def __init__(self, list_models_func, list_filter, page_size, page_token, app): + self._list_models_func = list_models_func + self._list_filter = list_filter + self._page_size = page_size + self._page_token = page_token + self._app = app + self._list_response = list_models_func(list_filter, page_size, page_token) + + @property + def models(self): + """A list of Models from this page.""" + return [ + Model.from_dict(model, app=self._app) for model in self._list_response.get('models', []) + ] + + @property + def list_filter(self): + """The filter string used to filter the models.""" + return self._list_filter + + @property + def next_page_token(self): + """Token identifying the next page of results.""" + return self._list_response.get('nextPageToken', '') + + @property + def has_next_page(self): + """True if more pages are available.""" + return bool(self.next_page_token) + + def get_next_page(self): + """Retrieves the next page of models if available. + + Returns: + ListModelsPage: Next page of models, or None if this is the last page. + """ + if self.has_next_page: + return ListModelsPage( + self._list_models_func, + self._list_filter, + self._page_size, + self.next_page_token, + self._app) + return None + + def iterate_all(self): + """Retrieves an iterator for Models. + + Returned iterator will iterate through all the models in the Firebase + project starting from this page. The iterator will never buffer more than + one page of models in memory at a time. + + Returns: + iterator: An iterator of Model instances. + """ + return _ModelIterator(self) + + +class _ModelIterator: + """An iterator that allows iterating over models, one at a time. + + This implementation loads a page of models into memory, and iterates on them. + When the whole page has been traversed, it loads another page. This class + never keeps more than one page of entries in memory. + """ + def __init__(self, current_page): + if not isinstance(current_page, ListModelsPage): + raise TypeError('Current page must be a ListModelsPage') + self._current_page = current_page + self._index = 0 + + def __next__(self): + if self._index == len(self._current_page.models): + if self._current_page.has_next_page: + self._current_page = self._current_page.get_next_page() + self._index = 0 + if self._index < len(self._current_page.models): + result = self._current_page.models[self._index] + self._index += 1 + return result + raise StopIteration + + def __iter__(self): + return self + + +def _validate_and_parse_name(name): + # The resource name is added automatically from API call responses. + # The only way it could be invalid is if someone tries to + # create a model from a dictionary manually and does it incorrectly. + matcher = _RESOURCE_NAME_PATTERN.match(name) + if not matcher: + raise ValueError('Model resource name format is invalid.') + return matcher.group('project_id'), matcher.group('model_id') + + +def _validate_model(model, update_mask=None): + if not isinstance(model, Model): + raise TypeError('Model must be an ml.Model.') + if update_mask is None and not model.display_name: + raise ValueError('Model must have a display name.') + + +def _validate_model_id(model_id): + if not _MODEL_ID_PATTERN.match(model_id): + raise ValueError('Model ID format is invalid.') + + +def _validate_operation_name(op_name): + if not _OPERATION_NAME_PATTERN.match(op_name): + raise ValueError('Operation name format is invalid.') + return op_name + + +def _validate_display_name(display_name): + if not _DISPLAY_NAME_PATTERN.match(display_name): + raise ValueError('Display name format is invalid.') + return display_name + + +def _validate_tags(tags): + if not isinstance(tags, list) or not \ + all(isinstance(tag, str) for tag in tags): + raise TypeError('Tags must be a list of strings.') + if not all(_TAG_PATTERN.match(tag) for tag in tags): + raise ValueError('Tag format is invalid.') + return tags + + +def _validate_gcs_tflite_uri(uri): + # GCS Bucket naming rules are complex. The regex is not comprehensive. + # See https://cloud.google.com/storage/docs/naming for full details. + if not _GCS_TFLITE_URI_PATTERN.match(uri): + raise ValueError('GCS TFLite URI format is invalid.') + return uri + + +def _validate_model_format(model_format): + if not isinstance(model_format, ModelFormat): + raise TypeError('Model format must be a ModelFormat object.') + return model_format + + +def _validate_list_filter(list_filter): + if list_filter is not None: + if not isinstance(list_filter, str): + raise TypeError('List filter must be a string or None.') + + +def _validate_page_size(page_size): + if page_size is not None: + if type(page_size) is not int: # pylint: disable=unidiomatic-typecheck + # Specifically type() to disallow boolean which is a subtype of int + raise TypeError('Page size must be a number or None.') + if page_size < 1 or page_size > _MAX_PAGE_SIZE: + raise ValueError( + f'Page size must be a positive integer between 1 and {_MAX_PAGE_SIZE}') + + +def _validate_page_token(page_token): + if page_token is not None: + if not isinstance(page_token, str): + raise TypeError('Page token must be a string or None.') + + +class _MLService: + """Firebase ML service.""" + + PROJECT_URL = 'https://firebaseml.googleapis.com/v1beta2/projects/{0}/' + OPERATION_URL = 'https://firebaseml.googleapis.com/v1beta2/' + POLL_EXPONENTIAL_BACKOFF_FACTOR = 1.5 + POLL_BASE_WAIT_TIME_SECONDS = 3 + + def __init__(self, app): + self._project_id = app.project_id + if not self._project_id: + raise ValueError( + 'Project ID is required to access ML service. Either set the ' + 'projectId option, or use service account credentials.') + self._project_url = _MLService.PROJECT_URL.format(self._project_id) + ml_headers = { + 'X-FIREBASE-CLIENT': f'fire-admin-python/{firebase_admin.__version__}', + } + self._client = _http_client.JsonHttpClient( + credential=app.credential.get_credential(), + headers=ml_headers, + base_url=self._project_url) + self._operation_client = _http_client.JsonHttpClient( + credential=app.credential.get_credential(), + headers=ml_headers, + base_url=_MLService.OPERATION_URL) + + def get_operation(self, op_name): + _validate_operation_name(op_name) + try: + return self._operation_client.body('get', url=op_name) + except requests.exceptions.RequestException as error: + raise _utils.handle_platform_error_from_requests(error) + + def _exponential_backoff(self, current_attempt, stop_time): + """Sleeps for the appropriate amount of time. Or throws deadline exceeded.""" + delay_factor = pow(_MLService.POLL_EXPONENTIAL_BACKOFF_FACTOR, current_attempt) + wait_time_seconds = delay_factor * _MLService.POLL_BASE_WAIT_TIME_SECONDS + + if stop_time is not None: + max_seconds_left = (stop_time - datetime.datetime.now()).total_seconds() + if max_seconds_left < 1: # allow a bit of time for rpc + raise exceptions.DeadlineExceededError('Polling max time exceeded.') + wait_time_seconds = min(wait_time_seconds, max_seconds_left - 1) + time.sleep(wait_time_seconds) + + def handle_operation(self, operation, wait_for_operation=False, max_time_seconds=None): + """Handles long running operations. + + Args: + operation: The operation to handle. + wait_for_operation: Should we allow polling for the operation to complete. + If no polling is requested, a locked model will be returned instead. + max_time_seconds: The maximum seconds to try polling for operation complete. + (None for no limit) + + Returns: + dict: A dictionary of the returned model properties. + + Raises: + TypeError: if the operation is not a dictionary. + ValueError: If the operation is malformed. + UnknownError: If the server responds with an unexpected response. + err: If the operation exceeds polling attempts or stop_time + """ + if not isinstance(operation, dict): + raise TypeError('Operation must be a dictionary.') + + if operation.get('done'): + # Operations which are immediately done don't have an operation name + if operation.get('response'): + return operation.get('response') + if operation.get('error'): + raise _utils.handle_operation_error(operation.get('error')) + raise exceptions.UnknownError(message='Internal Error: Malformed Operation.') + + op_name = _validate_operation_name(operation.get('name')) + metadata = operation.get('metadata', {}) + metadata_type = metadata.get('@type', '') + if not metadata_type.endswith('ModelOperationMetadata'): + raise TypeError('Unknown type of operation metadata.') + _, model_id = _validate_and_parse_name(metadata.get('name')) + current_attempt = 0 + start_time = datetime.datetime.now() + stop_time = (None if max_time_seconds is None else + start_time + datetime.timedelta(seconds=max_time_seconds)) + while wait_for_operation and not operation.get('done'): + # We just got this operation. Wait before getting another + # so we don't exceed the GetOperation maximum request rate. + self._exponential_backoff(current_attempt, stop_time) + operation = self.get_operation(op_name) + current_attempt += 1 + + if operation.get('done'): + if operation.get('response'): + return operation.get('response') + if operation.get('error'): + raise _utils.handle_operation_error(operation.get('error')) + + # If the operation is not complete or timed out, return a (locked) model instead + return get_model(model_id).as_dict() + + + def create_model(self, model): + _validate_model(model) + try: + return self.handle_operation( + self._client.body('post', url='models', json=model.as_dict(for_upload=True))) + except requests.exceptions.RequestException as error: + raise _utils.handle_platform_error_from_requests(error) + + def update_model(self, model, update_mask=None): + _validate_model(model, update_mask) + path = f'models/{model.model_id}' + if update_mask is not None: + path = path + f'?updateMask={update_mask}' + try: + return self.handle_operation( + self._client.body('patch', url=path, json=model.as_dict(for_upload=True))) + except requests.exceptions.RequestException as error: + raise _utils.handle_platform_error_from_requests(error) + + def set_published(self, model_id, publish): + _validate_model_id(model_id) + model_name = f'projects/{self._project_id}/models/{model_id}' + model = Model.from_dict({ + 'name': model_name, + 'state': { + 'published': publish + } + }) + return self.update_model(model, update_mask='state.published') + + def get_model(self, model_id): + _validate_model_id(model_id) + try: + return self._client.body('get', url=f'models/{model_id}') + except requests.exceptions.RequestException as error: + raise _utils.handle_platform_error_from_requests(error) + + def list_models(self, list_filter, page_size, page_token): + """ lists Firebase ML models.""" + _validate_list_filter(list_filter) + _validate_page_size(page_size) + _validate_page_token(page_token) + params = {} + if list_filter: + params['filter'] = list_filter + if page_size: + params['page_size'] = page_size + if page_token: + params['page_token'] = page_token + path = 'models' + if params: + param_str = parse.urlencode(sorted(params.items()), True) + path = path + '?' + param_str + try: + return self._client.body('get', url=path) + except requests.exceptions.RequestException as error: + raise _utils.handle_platform_error_from_requests(error) + + def delete_model(self, model_id): + _validate_model_id(model_id) + try: + self._client.body('delete', url=f'models/{model_id}') + except requests.exceptions.RequestException as error: + raise _utils.handle_platform_error_from_requests(error) diff --git a/firebase_admin/project_management.py b/firebase_admin/project_management.py new file mode 100644 index 000000000..73c100d3a --- /dev/null +++ b/firebase_admin/project_management.py @@ -0,0 +1,663 @@ +# Copyright 2018 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Firebase Project Management module. + +This module enables management of resources in Firebase projects, such as Android and iOS apps. +""" + +import base64 +import re +import time + +import requests + +import firebase_admin +from firebase_admin import exceptions +from firebase_admin import _http_client +from firebase_admin import _utils + + +_PROJECT_MANAGEMENT_ATTRIBUTE = '_project_management' + + +def _get_project_management_service(app): + return _utils.get_app_service(app, _PROJECT_MANAGEMENT_ATTRIBUTE, _ProjectManagementService) + + +def android_app(app_id, app=None): + """Obtains a reference to an Android app in the associated Firebase project. + + Args: + app_id: The app ID that identifies this Android app. + app: An App instance (optional). + + Returns: + AndroidApp: An ``AndroidApp`` instance. + """ + return AndroidApp(app_id=app_id, service=_get_project_management_service(app)) + + +def ios_app(app_id, app=None): + """Obtains a reference to an iOS app in the associated Firebase project. + + Args: + app_id: The app ID that identifies this iOS app. + app: An App instance (optional). + + Returns: + IOSApp: An ``IOSApp`` instance. + """ + return IOSApp(app_id=app_id, service=_get_project_management_service(app)) + + +def list_android_apps(app=None): + """Lists all Android apps in the associated Firebase project. + + Args: + app: An App instance (optional). + + Returns: + list: a list of ``AndroidApp`` instances referring to each Android app in the Firebase + project. + """ + return _get_project_management_service(app).list_android_apps() + + +def list_ios_apps(app=None): + """Lists all iOS apps in the associated Firebase project. + + Args: + app: An App instance (optional). + + Returns: + list: a list of ``IOSApp`` instances referring to each iOS app in the Firebase project. + """ + return _get_project_management_service(app).list_ios_apps() + + +def create_android_app(package_name, display_name=None, app=None): + """Creates a new Android app in the associated Firebase project. + + Args: + package_name: The package name of the Android app to be created. + display_name: A nickname for this Android app (optional). + app: An App instance (optional). + + Returns: + AndroidApp: An ``AndroidApp`` instance that is a reference to the newly created app. + """ + return _get_project_management_service(app).create_android_app(package_name, display_name) + + +def create_ios_app(bundle_id, display_name=None, app=None): + """Creates a new iOS app in the associated Firebase project. + + Args: + bundle_id: The bundle ID of the iOS app to be created. + display_name: A nickname for this iOS app (optional). + app: An App instance (optional). + + Returns: + IOSApp: An ``IOSApp`` instance that is a reference to the newly created app. + """ + return _get_project_management_service(app).create_ios_app(bundle_id, display_name) + + +def _check_is_string_or_none(obj, field_name): + if obj is None or isinstance(obj, str): + return obj + raise ValueError(f'{field_name} must be a string.') + + +def _check_is_nonempty_string(obj, field_name): + if isinstance(obj, str) and obj: + return obj + raise ValueError(f'{field_name} must be a non-empty string.') + + +def _check_is_nonempty_string_or_none(obj, field_name): + if obj is None: + return None + return _check_is_nonempty_string(obj, field_name) + + +def _check_not_none(obj, field_name): + if obj is None: + raise ValueError(f'{field_name} cannot be None.') + return obj + + +class AndroidApp: + """A reference to an Android app within a Firebase project. + + Note: Unless otherwise specified, all methods defined in this class make an RPC. + + Please use the module-level function ``android_app(app_id)`` to obtain instances of this class + instead of instantiating it directly. + """ + + def __init__(self, app_id, service): + self._app_id = app_id + self._service = service + + @property + def app_id(self): + """Returns the app ID of the Android app to which this instance refers. + + Note: This method does not make an RPC. + + Returns: + string: The app ID of the Android app to which this instance refers. + """ + return self._app_id + + def get_metadata(self): + """Retrieves detailed information about this Android app. + + Returns: + AndroidAppMetadata: An ``AndroidAppMetadata`` instance. + + Raises: + FirebaseError: If an error occurs while communicating with the Firebase Project + Management Service. + """ + return self._service.get_android_app_metadata(self._app_id) + + def set_display_name(self, new_display_name): + """Updates the display name attribute of this Android app to the one given. + + Args: + new_display_name: The new display name for this Android app. + + Returns: + NoneType: None. + + Raises: + FirebaseError: If an error occurs while communicating with the Firebase Project + Management Service. + """ + return self._service.set_android_app_display_name(self._app_id, new_display_name) + + def get_config(self): + """Retrieves the configuration artifact associated with this Android app.""" + return self._service.get_android_app_config(self._app_id) + + def get_sha_certificates(self): + """Retrieves the entire list of SHA certificates associated with this Android app. + + Returns: + list: A list of ``SHACertificate`` instances. + + Raises: + FirebaseError: If an error occurs while communicating with the Firebase Project + Management Service. + """ + return self._service.get_sha_certificates(self._app_id) + + def add_sha_certificate(self, certificate_to_add): + """Adds a SHA certificate to this Android app. + + Args: + certificate_to_add: The SHA certificate to add. + + Returns: + NoneType: None. + + Raises: + FirebaseError: If an error occurs while communicating with the Firebase Project + Management Service. (For example, if the certificate_to_add already exists.) + """ + return self._service.add_sha_certificate(self._app_id, certificate_to_add) + + def delete_sha_certificate(self, certificate_to_delete): + """Removes a SHA certificate from this Android app. + + Args: + certificate_to_delete: The SHA certificate to delete. + + Returns: + NoneType: None. + + Raises: + FirebaseError: If an error occurs while communicating with the Firebase Project + Management Service. (For example, if the certificate_to_delete is not found.) + """ + return self._service.delete_sha_certificate(certificate_to_delete) + + +class IOSApp: + """A reference to an iOS app within a Firebase project. + + Note: Unless otherwise specified, all methods defined in this class make an RPC. + + Please use the module-level function ``ios_app(app_id)`` to obtain instances of this class + instead of instantiating it directly. + """ + + def __init__(self, app_id, service): + self._app_id = app_id + self._service = service + + @property + def app_id(self): + """Returns the app ID of the iOS app to which this instance refers. + + Note: This method does not make an RPC. + + Returns: + string: The app ID of the iOS app to which this instance refers. + """ + return self._app_id + + def get_metadata(self): + """Retrieves detailed information about this iOS app. + + Returns: + IOSAppMetadata: An ``IOSAppMetadata`` instance. + + Raises: + FirebaseError: If an error occurs while communicating with the Firebase Project + Management Service. + """ + return self._service.get_ios_app_metadata(self._app_id) + + def set_display_name(self, new_display_name): + """Updates the display name attribute of this iOS app to the one given. + + Args: + new_display_name: The new display name for this iOS app. + + Returns: + NoneType: None. + + Raises: + FirebaseError: If an error occurs while communicating with the Firebase Project + Management Service. + """ + return self._service.set_ios_app_display_name(self._app_id, new_display_name) + + def get_config(self): + """Retrieves the configuration artifact associated with this iOS app.""" + return self._service.get_ios_app_config(self._app_id) + + +class _AppMetadata: + """Detailed information about a Firebase Android or iOS app.""" + + def __init__(self, name, app_id, display_name, project_id): + # _name is the fully qualified resource name of this Android or iOS app; currently it is not + # exposed to client code. + self._name = _check_is_nonempty_string(name, 'name') + self._app_id = _check_is_nonempty_string(app_id, 'app_id') + self._display_name = _check_is_string_or_none(display_name, 'display_name') + self._project_id = _check_is_nonempty_string(project_id, 'project_id') + + @property + def app_id(self): + """The globally unique, Firebase-assigned identifier of this Android or iOS app. + + This ID is unique even across apps of different platforms. + """ + return self._app_id + + @property + def display_name(self): + """The user-assigned display name of this Android or iOS app. + + Note that the display name can be None if it has never been set by the user.""" + return self._display_name + + @property + def project_id(self): + """The permanent, globally unique, user-assigned ID of the parent Firebase project.""" + return self._project_id + + def __eq__(self, other): + if not isinstance(other, type(self)): + return False + # pylint: disable=protected-access + return (self._name == other._name and self.app_id == other.app_id and + self.display_name == other.display_name and self.project_id == other.project_id) + # pylint: enable=protected-access + + +class AndroidAppMetadata(_AppMetadata): + """Android-specific information about an Android Firebase app.""" + + def __init__(self, package_name, name, app_id, display_name, project_id): + """Clients should not instantiate this class directly.""" + super().__init__(name, app_id, display_name, project_id) + self._package_name = _check_is_nonempty_string(package_name, 'package_name') + + @property + def package_name(self): + """The canonical package name of this Android app as it would appear in the Play Store.""" + return self._package_name + + def __eq__(self, other): + return (super().__eq__(other) and + self.package_name == other.package_name) + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return hash( + (self._name, self.app_id, self.display_name, self.project_id, self.package_name)) + + +class IOSAppMetadata(_AppMetadata): + """iOS-specific information about an iOS Firebase app.""" + + def __init__(self, bundle_id, name, app_id, display_name, project_id): + """Clients should not instantiate this class directly.""" + super().__init__(name, app_id, display_name, project_id) + self._bundle_id = _check_is_nonempty_string(bundle_id, 'bundle_id') + + @property + def bundle_id(self): + """The canonical bundle ID of this iOS app as it would appear in the iOS AppStore.""" + return self._bundle_id + + def __eq__(self, other): + return super().__eq__(other) and self.bundle_id == other.bundle_id + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return hash((self._name, self.app_id, self.display_name, self.project_id, self.bundle_id)) + + +class SHACertificate: + """Represents a SHA-1 or SHA-256 certificate associated with an Android app.""" + + SHA_1 = 'SHA_1' + SHA_256 = 'SHA_256' + + _SHA_1_RE = re.compile('^[0-9A-Fa-f]{40}$') + _SHA_256_RE = re.compile('^[0-9A-Fa-f]{64}$') + + def __init__(self, sha_hash, name=None): + """Creates a new SHACertificate instance. + + Args: + sha_hash: A string; the certificate hash for the Android app. + name: The fully qualified resource name of this certificate; note that this field should + be omitted if the instance is being constructed for the purpose of calling the + add_sha_certificate() method on an ``AndroidApp``. + + Raises: + ValueError: If the sha_hash is not a valid SHA-1 or SHA-256 certificate hash. + """ + _check_is_nonempty_string(sha_hash, 'sha_hash') + _check_is_nonempty_string_or_none(name, 'name') + self._name = name + self._sha_hash = sha_hash.lower() + if SHACertificate._SHA_1_RE.match(sha_hash): + self._cert_type = SHACertificate.SHA_1 + elif SHACertificate._SHA_256_RE.match(sha_hash): + self._cert_type = SHACertificate.SHA_256 + else: + raise ValueError( + 'The supplied certificate hash is neither a valid SHA-1 nor SHA_256 hash.') + + @property + def name(self): + """Returns the fully qualified resource name of this certificate, if known. + + Returns: + string: The fully qualified resource name of this certificate, if known; otherwise, the + empty string. + """ + return self._name + + @property + def sha_hash(self): + """Returns the certificate hash. + + Returns: + string: The certificate hash. + """ + return self._sha_hash + + @property + def cert_type(self): + """Returns the type of the SHA certificate encoded in the hash. + + Returns: + string: One of 'SHA_1' or 'SHA_256'. + """ + return self._cert_type + + def __eq__(self, other): + if not isinstance(other, SHACertificate): + return False + return (self.name == other.name and self.sha_hash == other.sha_hash and + self.cert_type == other.cert_type) + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return hash((self.name, self.sha_hash, self.cert_type)) + + +class _ProjectManagementService: + """Provides methods for interacting with the Firebase Project Management Service.""" + + BASE_URL = 'https://firebase.googleapis.com' + MAXIMUM_LIST_APPS_PAGE_SIZE = 100 + MAXIMUM_POLLING_ATTEMPTS = 8 + POLL_BASE_WAIT_TIME_SECONDS = 0.5 + POLL_EXPONENTIAL_BACKOFF_FACTOR = 1.5 + + ANDROID_APPS_RESOURCE_NAME = 'androidApps' + ANDROID_APP_IDENTIFIER_NAME = 'packageName' + IOS_APPS_RESOURCE_NAME = 'iosApps' + IOS_APP_IDENTIFIER_NAME = 'bundleId' + + def __init__(self, app): + project_id = app.project_id + if not project_id: + raise ValueError( + 'Project ID is required to access the Firebase Project Management Service. Either ' + 'set the projectId option, or use service account credentials. Alternatively, set ' + 'the GOOGLE_CLOUD_PROJECT environment variable.') + self._project_id = project_id + version_header = f'Python/Admin/{firebase_admin.__version__}' + timeout = app.options.get('httpTimeout', _http_client.DEFAULT_TIMEOUT_SECONDS) + self._client = _http_client.JsonHttpClient( + credential=app.credential.get_credential(), + base_url=_ProjectManagementService.BASE_URL, + headers={'X-Client-Version': version_header}, + timeout=timeout) + + def get_android_app_metadata(self, app_id): + return self._get_app_metadata( + platform_resource_name=_ProjectManagementService.ANDROID_APPS_RESOURCE_NAME, + identifier_name=_ProjectManagementService.ANDROID_APP_IDENTIFIER_NAME, + metadata_class=AndroidAppMetadata, + app_id=app_id) + + def get_ios_app_metadata(self, app_id): + return self._get_app_metadata( + platform_resource_name=_ProjectManagementService.IOS_APPS_RESOURCE_NAME, + identifier_name=_ProjectManagementService.IOS_APP_IDENTIFIER_NAME, + metadata_class=IOSAppMetadata, + app_id=app_id) + + def _get_app_metadata(self, platform_resource_name, identifier_name, metadata_class, app_id): + """Retrieves detailed information about an Android or iOS app.""" + _check_is_nonempty_string(app_id, 'app_id') + path = f'/v1beta1/projects/-/{platform_resource_name}/{app_id}' + response = self._make_request('get', path) + return metadata_class( + response[identifier_name], + name=response['name'], + app_id=response['appId'], + display_name=response.get('displayName') or None, + project_id=response['projectId']) + + def set_android_app_display_name(self, app_id, new_display_name): + self._set_display_name( + app_id=app_id, + new_display_name=new_display_name, + platform_resource_name=_ProjectManagementService.ANDROID_APPS_RESOURCE_NAME) + + def set_ios_app_display_name(self, app_id, new_display_name): + self._set_display_name( + app_id=app_id, + new_display_name=new_display_name, + platform_resource_name=_ProjectManagementService.IOS_APPS_RESOURCE_NAME) + + def _set_display_name(self, app_id, new_display_name, platform_resource_name): + """Sets the display name of an Android or iOS app.""" + path = f'/v1beta1/projects/-/{platform_resource_name}/{app_id}?updateMask=displayName' + request_body = {'displayName': new_display_name} + self._make_request('patch', path, json=request_body) + + def list_android_apps(self): + return self._list_apps( + platform_resource_name=_ProjectManagementService.ANDROID_APPS_RESOURCE_NAME, + app_class=AndroidApp) + + def list_ios_apps(self): + return self._list_apps( + platform_resource_name=_ProjectManagementService.IOS_APPS_RESOURCE_NAME, + app_class=IOSApp) + + def _list_apps(self, platform_resource_name, app_class): + """Lists all the Android or iOS apps within the Firebase project.""" + path = ( + f'/v1beta1/projects/{self._project_id}/{platform_resource_name}?pageSize=' + f'{_ProjectManagementService.MAXIMUM_LIST_APPS_PAGE_SIZE}' + ) + response = self._make_request('get', path) + apps_list = [] + while True: + apps = response.get('apps') + if not apps: + break + apps_list.extend(app_class(app_id=app['appId'], service=self) for app in apps) + next_page_token = response.get('nextPageToken') + if not next_page_token: + break + # Retrieve the next page of apps. + path = ( + f'/v1beta1/projects/{self._project_id}/{platform_resource_name}' + f'?pageToken={next_page_token}' + f'&pageSize={_ProjectManagementService.MAXIMUM_LIST_APPS_PAGE_SIZE}' + ) + response = self._make_request('get', path) + return apps_list + + def create_android_app(self, package_name, display_name=None): + return self._create_app( + platform_resource_name=_ProjectManagementService.ANDROID_APPS_RESOURCE_NAME, + identifier_name=_ProjectManagementService.ANDROID_APP_IDENTIFIER_NAME, + identifier=package_name, + display_name=display_name, + app_class=AndroidApp) + + def create_ios_app(self, bundle_id, display_name=None): + return self._create_app( + platform_resource_name=_ProjectManagementService.IOS_APPS_RESOURCE_NAME, + identifier_name=_ProjectManagementService.IOS_APP_IDENTIFIER_NAME, + identifier=bundle_id, + display_name=display_name, + app_class=IOSApp) + + def _create_app( + self, + platform_resource_name, + identifier_name, + identifier, + display_name, + app_class): + """Creates an Android or iOS app.""" + _check_is_string_or_none(display_name, 'display_name') + path = f'/v1beta1/projects/{self._project_id}/{platform_resource_name}' + request_body = {identifier_name: identifier} + if display_name: + request_body['displayName'] = display_name + response = self._make_request('post', path, json=request_body) + operation_name = response['name'] + poll_response = self._poll_app_creation(operation_name) + return app_class(app_id=poll_response['appId'], service=self) + + def _poll_app_creation(self, operation_name): + """Polls the Long-Running Operation repeatedly until it is done with exponential backoff.""" + for current_attempt in range(_ProjectManagementService.MAXIMUM_POLLING_ATTEMPTS): + delay_factor = pow( + _ProjectManagementService.POLL_EXPONENTIAL_BACKOFF_FACTOR, current_attempt) + wait_time_seconds = delay_factor * _ProjectManagementService.POLL_BASE_WAIT_TIME_SECONDS + time.sleep(wait_time_seconds) + path = f'/v1/{operation_name}' + poll_response, http_response = self._body_and_response('get', path) + done = poll_response.get('done') + if done: + response = poll_response.get('response') + if response: + return response + + raise exceptions.UnknownError( + 'Polling finished, but the operation terminated in an error.', + http_response=http_response) + raise exceptions.DeadlineExceededError('Polling deadline exceeded.') + + def get_android_app_config(self, app_id): + return self._get_app_config( + platform_resource_name=_ProjectManagementService.ANDROID_APPS_RESOURCE_NAME, + app_id=app_id) + + def get_ios_app_config(self, app_id): + return self._get_app_config( + platform_resource_name=_ProjectManagementService.IOS_APPS_RESOURCE_NAME, app_id=app_id) + + def _get_app_config(self, platform_resource_name, app_id): + path = f'/v1beta1/projects/-/{platform_resource_name}/{app_id}/config' + response = self._make_request('get', path) + # In Python 2.7, the base64 module works with strings, while in Python 3, it works with + # bytes objects. This line works in both versions. + return base64.standard_b64decode(response['configFileContents']).decode(encoding='utf-8') + + def get_sha_certificates(self, app_id): + path = f'/v1beta1/projects/-/androidApps/{app_id}/sha' + response = self._make_request('get', path) + cert_list = response.get('certificates') or [] + return [SHACertificate(sha_hash=cert['shaHash'], name=cert['name']) for cert in cert_list] + + def add_sha_certificate(self, app_id, certificate_to_add): + path = f'/v1beta1/projects/-/androidApps/{app_id}/sha' + sha_hash = _check_not_none(certificate_to_add, 'certificate_to_add').sha_hash + cert_type = certificate_to_add.cert_type + request_body = {'shaHash': sha_hash, 'certType': cert_type} + self._make_request('post', path, json=request_body) + + def delete_sha_certificate(self, certificate_to_delete): + name = _check_not_none(certificate_to_delete, 'certificate_to_delete').name + path = f'/v1beta1/{name}' + self._make_request('delete', path) + + def _make_request(self, method, url, json=None): + body, _ = self._body_and_response(method, url, json) + return body + + def _body_and_response(self, method, url, json=None): + try: + return self._client.body_and_response(method=method, url=url, json=json) + except requests.exceptions.RequestException as error: + raise _utils.handle_platform_error_from_requests(error) diff --git a/firebase_admin/remote_config.py b/firebase_admin/remote_config.py new file mode 100644 index 000000000..880804d3d --- /dev/null +++ b/firebase_admin/remote_config.py @@ -0,0 +1,762 @@ +# Copyright 2024 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Firebase Remote Config Module. +This module has required APIs for the clients to use Firebase Remote Config with python. +""" + +import asyncio +import json +import logging +import threading +from typing import Dict, Optional, Literal, Union, Any +from enum import Enum +import re +import hashlib +import requests +from firebase_admin import App, _http_client, _utils +import firebase_admin + +# Set up logging (you can customize the level and output) +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +_REMOTE_CONFIG_ATTRIBUTE = '_remoteconfig' +MAX_CONDITION_RECURSION_DEPTH = 10 +ValueSource = Literal['default', 'remote', 'static'] # Define the ValueSource type + +class PercentConditionOperator(Enum): + """Enum representing the available operators for percent conditions. + """ + LESS_OR_EQUAL = "LESS_OR_EQUAL" + GREATER_THAN = "GREATER_THAN" + BETWEEN = "BETWEEN" + UNKNOWN = "UNKNOWN" + +class CustomSignalOperator(Enum): + """Enum representing the available operators for custom signal conditions. + """ + STRING_CONTAINS = "STRING_CONTAINS" + STRING_DOES_NOT_CONTAIN = "STRING_DOES_NOT_CONTAIN" + STRING_EXACTLY_MATCHES = "STRING_EXACTLY_MATCHES" + STRING_CONTAINS_REGEX = "STRING_CONTAINS_REGEX" + NUMERIC_LESS_THAN = "NUMERIC_LESS_THAN" + NUMERIC_LESS_EQUAL = "NUMERIC_LESS_EQUAL" + NUMERIC_EQUAL = "NUMERIC_EQUAL" + NUMERIC_NOT_EQUAL = "NUMERIC_NOT_EQUAL" + NUMERIC_GREATER_THAN = "NUMERIC_GREATER_THAN" + NUMERIC_GREATER_EQUAL = "NUMERIC_GREATER_EQUAL" + SEMANTIC_VERSION_LESS_THAN = "SEMANTIC_VERSION_LESS_THAN" + SEMANTIC_VERSION_LESS_EQUAL = "SEMANTIC_VERSION_LESS_EQUAL" + SEMANTIC_VERSION_EQUAL = "SEMANTIC_VERSION_EQUAL" + SEMANTIC_VERSION_NOT_EQUAL = "SEMANTIC_VERSION_NOT_EQUAL" + SEMANTIC_VERSION_GREATER_THAN = "SEMANTIC_VERSION_GREATER_THAN" + SEMANTIC_VERSION_GREATER_EQUAL = "SEMANTIC_VERSION_GREATER_EQUAL" + UNKNOWN = "UNKNOWN" + +class _ServerTemplateData: + """Parses, validates and encapsulates template data and metadata.""" + def __init__(self, template_data): + """Initializes a new ServerTemplateData instance. + + Args: + template_data: The data to be parsed for getting the parameters and conditions. + + Raises: + ValueError: If the template data is not valid. + """ + if 'parameters' in template_data: + if template_data['parameters'] is not None: + self._parameters = template_data['parameters'] + else: + raise ValueError('Remote Config parameters must be a non-null object') + else: + self._parameters = {} + + if 'conditions' in template_data: + if template_data['conditions'] is not None: + self._conditions = template_data['conditions'] + else: + raise ValueError('Remote Config conditions must be a non-null object') + else: + self._conditions = [] + + self._version = '' + if 'version' in template_data: + self._version = template_data['version'] + + self._etag = '' + if 'etag' in template_data and isinstance(template_data['etag'], str): + self._etag = template_data['etag'] + + self._template_data_json = json.dumps(template_data) + + @property + def parameters(self): + return self._parameters + + @property + def etag(self): + return self._etag + + @property + def version(self): + return self._version + + @property + def conditions(self): + return self._conditions + + @property + def template_data_json(self): + return self._template_data_json + + +class ServerTemplate: + """Represents a Server Template with implementations for loading and evaluating the template.""" + def __init__(self, app: App = None, default_config: Optional[Dict[str, str]] = None): + """Initializes a ServerTemplate instance. + + Args: + app: App instance to be used. This is optional and the default app instance will + be used if not present. + default_config: The default config to be used in the evaluated config. + """ + self._rc_service = _utils.get_app_service(app, + _REMOTE_CONFIG_ATTRIBUTE, _RemoteConfigService) + # This gets set when the template is + # fetched from RC servers via the load API, or via the set API. + self._cache = None + self._stringified_default_config: Dict[str, str] = {} + self._lock = threading.RLock() + + # RC stores all remote values as string, but it's more intuitive + # to declare default values with specific types, so this converts + # the external declaration to an internal string representation. + if default_config is not None: + for key in default_config: + self._stringified_default_config[key] = str(default_config[key]) + + async def load(self): + """Fetches the server template and caches the data.""" + rc_server_template = await self._rc_service.get_server_template() + with self._lock: + self._cache = rc_server_template + + def evaluate(self, context: Optional[Dict[str, Union[str, int]]] = None) -> 'ServerConfig': + """Evaluates the cached server template to produce a ServerConfig. + + Args: + context: A dictionary of values to use for evaluating conditions. + + Returns: + A ServerConfig object. + Raises: + ValueError: If the input arguments are invalid. + """ + # Logic to process the cached template into a ServerConfig here. + if not self._cache: + raise ValueError("""No Remote Config Server template in cache. + Call load() before calling evaluate().""") + context = context or {} + config_values = {} + + with self._lock: + template_conditions = self._cache.conditions + template_parameters = self._cache.parameters + + # Initializes config Value objects with default values. + if self._stringified_default_config is not None: + for key, value in self._stringified_default_config.items(): + config_values[key] = _Value('default', value) + self._evaluator = _ConditionEvaluator(template_conditions, + template_parameters, context, + config_values) + return ServerConfig(config_values=self._evaluator.evaluate()) + + def set(self, template_data_json: str): + """Updates the cache to store the given template is of type ServerTemplateData. + + Args: + template_data_json: A json string representing ServerTemplateData to be cached. + """ + template_data_map = json.loads(template_data_json) + template_data = _ServerTemplateData(template_data_map) + + with self._lock: + self._cache = template_data + + def to_json(self): + """Provides the server template in a JSON format to be used for initialization later.""" + if not self._cache: + raise ValueError("""No Remote Config Server template in cache. + Call load() before calling toJSON().""") + with self._lock: + template_json = self._cache.template_data_json + return template_json + + +class ServerConfig: + """Represents a Remote Config Server Side Config.""" + def __init__(self, config_values): + self._config_values = config_values # dictionary of param key to values + + def get_boolean(self, key): + """Returns the value as a boolean.""" + return self._get_value(key).as_boolean() + + def get_string(self, key): + """Returns the value as a string.""" + return self._get_value(key).as_string() + + def get_int(self, key): + """Returns the value as an integer.""" + return self._get_value(key).as_int() + + def get_float(self, key): + """Returns the value as a float.""" + return self._get_value(key).as_float() + + def get_value_source(self, key): + """Returns the source of the value.""" + return self._get_value(key).get_source() + + def _get_value(self, key): + return self._config_values.get(key, _Value('static')) + + +class _RemoteConfigService: + """Internal class that facilitates sending requests to the Firebase Remote + Config backend API. + """ + def __init__(self, app): + """Initialize a JsonHttpClient with necessary inputs. + + Args: + app: App instance to be used for fetching app specific details required + for initializing the http client. + """ + remote_config_base_url = 'https://firebaseremoteconfig.googleapis.com' + self._project_id = app.project_id + app_credential = app.credential.get_credential() + rc_headers = { + 'X-FIREBASE-CLIENT': f'fire-admin-python/{firebase_admin.__version__}', } + timeout = app.options.get('httpTimeout', _http_client.DEFAULT_TIMEOUT_SECONDS) + + self._client = _http_client.JsonHttpClient(credential=app_credential, + base_url=remote_config_base_url, + headers=rc_headers, timeout=timeout) + + async def get_server_template(self): + """Requests for a server template and converts the response to an instance of + ServerTemplateData for storing the template parameters and conditions.""" + try: + loop = asyncio.get_event_loop() + headers, template_data = await loop.run_in_executor(None, + self._client.headers_and_body, + 'get', self._get_url()) + except requests.exceptions.RequestException as error: + raise self._handle_remote_config_error(error) + template_data['etag'] = headers.get('etag') + return _ServerTemplateData(template_data) + + def _get_url(self): + """Returns project prefix for url, in the format of /v1/projects/${projectId}""" + return f"/v1/projects/{self._project_id}/namespaces/firebase-server/serverRemoteConfig" + + @classmethod + def _handle_remote_config_error(cls, error: Any): + """Handles errors received from the Cloud Functions API.""" + return _utils.handle_platform_error_from_requests(error) + + +class _ConditionEvaluator: + """Internal class that facilitates sending requests to the Firebase Remote + Config backend API.""" + def __init__(self, conditions, parameters, context, config_values): + self._context = context + self._conditions = conditions + self._parameters = parameters + self._config_values = config_values + + def evaluate(self): + """Internal function that evaluates the cached server template to produce + a ServerConfig""" + evaluated_conditions = self.evaluate_conditions(self._conditions, self._context) + + # Overlays config Value objects derived by evaluating the template. + if self._parameters: + for key, parameter in self._parameters.items(): + conditional_values = parameter.get('conditionalValues', {}) + default_value = parameter.get('defaultValue', {}) + parameter_value_wrapper = None + # Iterates in order over condition list. If there is a value associated + # with a condition, this checks if the condition is true. + if evaluated_conditions: + for condition_name, condition_evaluation in evaluated_conditions.items(): + if condition_name in conditional_values and condition_evaluation: + parameter_value_wrapper = conditional_values[condition_name] + break + + if parameter_value_wrapper and parameter_value_wrapper.get('useInAppDefault'): + logger.info("Using in-app default value for key '%s'", key) + continue + + if parameter_value_wrapper: + parameter_value = parameter_value_wrapper.get('value') + self._config_values[key] = _Value('remote', parameter_value) + continue + + if not default_value: + logger.warning("No default value found for key '%s'", key) + continue + + if default_value.get('useInAppDefault'): + logger.info("Using in-app default value for key '%s'", key) + continue + self._config_values[key] = _Value('remote', default_value.get('value')) + return self._config_values + + def evaluate_conditions(self, conditions, context)-> Dict[str, bool]: + """Evaluates a list of conditions and returns a dictionary of results. + + Args: + conditions: A list of NamedCondition objects. + context: An EvaluationContext object. + + Returns: + A dictionary that maps condition names to boolean evaluation results. + """ + evaluated_conditions = {} + for condition in conditions: + evaluated_conditions[condition.get('name')] = self.evaluate_condition( + condition.get('condition'), context + ) + return evaluated_conditions + + def evaluate_condition(self, condition, context, + nesting_level: int = 0) -> bool: + """Recursively evaluates a condition. + + Args: + condition: The condition to evaluate. + context: An EvaluationContext object. + nesting_level: The current recursion depth. + + Returns: + The boolean result of the condition evaluation. + """ + if nesting_level >= MAX_CONDITION_RECURSION_DEPTH: + logger.warning("Maximum condition recursion depth exceeded.") + return False + if condition.get('orCondition') is not None: + return self.evaluate_or_condition(condition.get('orCondition'), + context, nesting_level + 1) + if condition.get('andCondition') is not None: + return self.evaluate_and_condition(condition.get('andCondition'), + context, nesting_level + 1) + if condition.get('true') is not None: + return True + if condition.get('false') is not None: + return False + if condition.get('percent') is not None: + return self.evaluate_percent_condition(condition.get('percent'), context) + if condition.get('customSignal') is not None: + return self.evaluate_custom_signal_condition(condition.get('customSignal'), context) + logger.warning("Unknown condition type encountered.") + return False + + def evaluate_or_condition(self, or_condition, + context, + nesting_level: int = 0) -> bool: + """Evaluates an OR condition. + + Args: + or_condition: The OR condition to evaluate. + context: An EvaluationContext object. + nesting_level: The current recursion depth. + + Returns: + True if any of the subconditions are true, False otherwise. + """ + sub_conditions = or_condition.get('conditions') or [] + for sub_condition in sub_conditions: + result = self.evaluate_condition(sub_condition, context, nesting_level + 1) + if result: + return True + return False + + def evaluate_and_condition(self, and_condition, + context, + nesting_level: int = 0) -> bool: + """Evaluates an AND condition. + + Args: + and_condition: The AND condition to evaluate. + context: An EvaluationContext object. + nesting_level: The current recursion depth. + + Returns: + True if all of the subconditions are met; False otherwise. + """ + sub_conditions = and_condition.get('conditions') or [] + for sub_condition in sub_conditions: + result = self.evaluate_condition(sub_condition, context, nesting_level + 1) + if not result: + return False + return True + + def evaluate_percent_condition(self, percent_condition, + context) -> bool: + """Evaluates a percent condition. + + Args: + percent_condition: The percent condition to evaluate. + context: An EvaluationContext object. + + Returns: + True if the condition is met, False otherwise. + """ + if not context.get('randomization_id'): + logger.warning("Missing randomization_id in context for evaluating percent condition.") + return False + + seed = percent_condition.get('seed') + percent_operator = percent_condition.get('percentOperator') + micro_percent = percent_condition.get('microPercent') + micro_percent_range = percent_condition.get('microPercentRange') + if not percent_operator: + logger.warning("Missing percent operator for percent condition.") + return False + if micro_percent_range: + norm_percent_upper_bound = micro_percent_range.get('microPercentUpperBound') or 0 + norm_percent_lower_bound = micro_percent_range.get('microPercentLowerBound') or 0 + else: + norm_percent_upper_bound = 0 + norm_percent_lower_bound = 0 + if micro_percent: + norm_micro_percent = micro_percent + else: + norm_micro_percent = 0 + seed_prefix = f"{seed}." if seed else "" + string_to_hash = f"{seed_prefix}{context.get('randomization_id')}" + + hash64 = self.hash_seeded_randomization_id(string_to_hash) + instance_micro_percentile = hash64 % (100 * 1000000) + if percent_operator == PercentConditionOperator.LESS_OR_EQUAL.value: + return instance_micro_percentile <= norm_micro_percent + if percent_operator == PercentConditionOperator.GREATER_THAN.value: + return instance_micro_percentile > norm_micro_percent + if percent_operator == PercentConditionOperator.BETWEEN.value: + return norm_percent_lower_bound < instance_micro_percentile <= norm_percent_upper_bound + logger.warning("Unknown percent operator: %s", percent_operator) + return False + def hash_seeded_randomization_id(self, seeded_randomization_id: str) -> int: + """Hashes a seeded randomization ID. + + Args: + seeded_randomization_id: The seeded randomization ID to hash. + + Returns: + The hashed value. + """ + hash_object = hashlib.sha256() + hash_object.update(seeded_randomization_id.encode('utf-8')) + hash64 = hash_object.hexdigest() + return abs(int(hash64, 16)) + + def evaluate_custom_signal_condition(self, custom_signal_condition, + context) -> bool: + """Evaluates a custom signal condition. + + Args: + custom_signal_condition: The custom signal condition to evaluate. + context: An EvaluationContext object. + + Returns: + True if the condition is met, False otherwise. + """ + custom_signal_operator = custom_signal_condition.get('customSignalOperator') or {} + custom_signal_key = custom_signal_condition.get('customSignalKey') or {} + target_custom_signal_values = ( + custom_signal_condition.get('targetCustomSignalValues') or {}) + + if not all([custom_signal_operator, custom_signal_key, target_custom_signal_values]): + logger.warning("Missing operator, key, or target values for custom signal condition.") + return False + + if not target_custom_signal_values: + return False + actual_custom_signal_value = context.get(custom_signal_key) or {} + + if not actual_custom_signal_value: + logger.debug("Custom signal value not found in context: %s", custom_signal_key) + return False + + if custom_signal_operator == CustomSignalOperator.STRING_CONTAINS.value: + return self._compare_strings(target_custom_signal_values, + actual_custom_signal_value, + lambda target, actual: target in actual) + if custom_signal_operator == CustomSignalOperator.STRING_DOES_NOT_CONTAIN.value: + return not self._compare_strings(target_custom_signal_values, + actual_custom_signal_value, + lambda target, actual: target in actual) + if custom_signal_operator == CustomSignalOperator.STRING_EXACTLY_MATCHES.value: + return self._compare_strings(target_custom_signal_values, + actual_custom_signal_value, + lambda target, actual: target.strip() == actual.strip()) + if custom_signal_operator == CustomSignalOperator.STRING_CONTAINS_REGEX.value: + return self._compare_strings(target_custom_signal_values, + actual_custom_signal_value, + re.search) + + # For numeric operators only one target value is allowed. + if custom_signal_operator == CustomSignalOperator.NUMERIC_LESS_THAN.value: + return self._compare_numbers(custom_signal_key, + target_custom_signal_values[0], + actual_custom_signal_value, + lambda r: r < 0) + if custom_signal_operator == CustomSignalOperator.NUMERIC_LESS_EQUAL.value: + return self._compare_numbers(custom_signal_key, + target_custom_signal_values[0], + actual_custom_signal_value, + lambda r: r <= 0) + if custom_signal_operator == CustomSignalOperator.NUMERIC_EQUAL.value: + return self._compare_numbers(custom_signal_key, + target_custom_signal_values[0], + actual_custom_signal_value, + lambda r: r == 0) + if custom_signal_operator == CustomSignalOperator.NUMERIC_NOT_EQUAL.value: + return self._compare_numbers(custom_signal_key, + target_custom_signal_values[0], + actual_custom_signal_value, + lambda r: r != 0) + if custom_signal_operator == CustomSignalOperator.NUMERIC_GREATER_THAN.value: + return self._compare_numbers(custom_signal_key, + target_custom_signal_values[0], + actual_custom_signal_value, + lambda r: r > 0) + if custom_signal_operator == CustomSignalOperator.NUMERIC_GREATER_EQUAL.value: + return self._compare_numbers(custom_signal_key, + target_custom_signal_values[0], + actual_custom_signal_value, + lambda r: r >= 0) + + # For semantic operators only one target value is allowed. + if custom_signal_operator == CustomSignalOperator.SEMANTIC_VERSION_LESS_THAN.value: + return self._compare_semantic_versions(custom_signal_key, + target_custom_signal_values[0], + actual_custom_signal_value, + lambda r: r < 0) + if custom_signal_operator == CustomSignalOperator.SEMANTIC_VERSION_LESS_EQUAL.value: + return self._compare_semantic_versions(custom_signal_key, + target_custom_signal_values[0], + actual_custom_signal_value, + lambda r: r <= 0) + if custom_signal_operator == CustomSignalOperator.SEMANTIC_VERSION_EQUAL.value: + return self._compare_semantic_versions(custom_signal_key, + target_custom_signal_values[0], + actual_custom_signal_value, + lambda r: r == 0) + if custom_signal_operator == CustomSignalOperator.SEMANTIC_VERSION_NOT_EQUAL.value: + return self._compare_semantic_versions(custom_signal_key, + target_custom_signal_values[0], + actual_custom_signal_value, + lambda r: r != 0) + if custom_signal_operator == CustomSignalOperator.SEMANTIC_VERSION_GREATER_THAN.value: + return self._compare_semantic_versions(custom_signal_key, + target_custom_signal_values[0], + actual_custom_signal_value, + lambda r: r > 0) + if custom_signal_operator == CustomSignalOperator.SEMANTIC_VERSION_GREATER_EQUAL.value: + return self._compare_semantic_versions(custom_signal_key, + target_custom_signal_values[0], + actual_custom_signal_value, + lambda r: r >= 0) + logger.warning("Unknown custom signal operator: %s", custom_signal_operator) + return False + + def _compare_strings(self, target_values, actual_value, predicate_fn) -> bool: + """Compares the actual string value of a signal against a list of target values. + + Args: + target_values: A list of target string values. + actual_value: The actual value to compare, which can be a string or number. + predicate_fn: A function that takes two string arguments (target and actual) + and returns a boolean indicating whether + the target matches the actual value. + + Returns: + bool: True if the predicate function returns True for any target value in the list, + False otherwise. + """ + + for target in target_values: + if predicate_fn(target, str(actual_value)): + return True + return False + + def _compare_numbers(self, custom_signal_key, target_value, actual_value, predicate_fn) -> bool: + try: + target = float(target_value) + actual = float(actual_value) + result = -1 if actual < target else 1 if actual > target else 0 + return predicate_fn(result) + except ValueError: + logger.warning("Invalid numeric value for comparison for custom signal key %s.", + custom_signal_key) + return False + + def _compare_semantic_versions(self, custom_signal_key, + target_value, actual_value, predicate_fn) -> bool: + """Compares the actual semantic version value of a signal against a target value. + Calls the predicate function with -1, 0, 1 if actual is less than, equal to, + or greater than target. + + Args: + custom_signal_key: The custom signal for which the evaluation is being performed. + target_values: A list of target string values. + actual_value: The actual value to compare, which can be a string or number. + predicate_fn: A function that takes an integer (-1, 0, or 1) and returns a boolean. + + Returns: + bool: True if the predicate function returns True for the result of the comparison, + False otherwise. + """ + return self._compare_versions(custom_signal_key, str(actual_value), + str(target_value), predicate_fn) + + def _compare_versions(self, custom_signal_key, + sem_version_1, sem_version_2, predicate_fn) -> bool: + """Compares two semantic version strings. + + Args: + custom_signal_key: The custom singal for which the evaluation is being performed. + sem_version_1: The first semantic version string. + sem_version_2: The second semantic version string. + predicate_fn: A function that takes an integer and returns a boolean. + + Returns: + bool: The result of the predicate function. + """ + try: + v1_parts = [int(part) for part in sem_version_1.split('.')] + v2_parts = [int(part) for part in sem_version_2.split('.')] + max_length = max(len(v1_parts), len(v2_parts)) + v1_parts.extend([0] * (max_length - len(v1_parts))) + v2_parts.extend([0] * (max_length - len(v2_parts))) + + for part1, part2 in zip(v1_parts, v2_parts): + if any((part1 < 0, part2 < 0)): + raise ValueError + if part1 < part2: + return predicate_fn(-1) + if part1 > part2: + return predicate_fn(1) + return predicate_fn(0) + except ValueError: + logger.warning( + "Invalid semantic version format for comparison for custom signal key %s.", + custom_signal_key) + return False + +async def get_server_template(app: App = None, default_config: Optional[Dict[str, str]] = None): + """Initializes a new ServerTemplate instance and fetches the server template. + + Args: + app: App instance to be used. This is optional and the default app instance will + be used if not present. + default_config: The default config to be used in the evaluated config. + + Returns: + ServerTemplate: An object having the cached server template to be used for evaluation. + """ + template = init_server_template(app=app, default_config=default_config) + await template.load() + return template + +def init_server_template(app: App = None, default_config: Optional[Dict[str, str]] = None, + template_data_json: Optional[str] = None): + """Initializes a new ServerTemplate instance. + + Args: + app: App instance to be used. This is optional and the default app instance will + be used if not present. + default_config: The default config to be used in the evaluated config. + template_data_json: An optional template data JSON to be set on initialization. + + Returns: + ServerTemplate: A new ServerTemplate instance initialized with an optional + template and config. + """ + template = ServerTemplate(app=app, default_config=default_config) + if template_data_json is not None: + template.set(template_data_json) + return template + +class _Value: + """Represents a value fetched from Remote Config. + """ + DEFAULT_VALUE_FOR_BOOLEAN = False + DEFAULT_VALUE_FOR_STRING = '' + DEFAULT_VALUE_FOR_INTEGER = 0 + DEFAULT_VALUE_FOR_FLOAT_NUMBER = 0.0 + BOOLEAN_TRUTHY_VALUES = ['1', 'true', 't', 'yes', 'y', 'on'] + + def __init__(self, source: ValueSource, value: str = DEFAULT_VALUE_FOR_STRING): + """Initializes a Value instance. + + Args: + source: The source of the value (e.g., 'default', 'remote', 'static'). + "static" indicates the value was defined by a static constant. + "default" indicates the value was defined by default config. + "remote" indicates the value was defined by config produced by evaluating a template. + value: The string value. + """ + self.source = source + self.value = value + + def as_string(self) -> str: + """Returns the value as a string.""" + if self.source == 'static': + return self.DEFAULT_VALUE_FOR_STRING + return str(self.value) + + def as_boolean(self) -> bool: + """Returns the value as a boolean.""" + if self.source == 'static': + return self.DEFAULT_VALUE_FOR_BOOLEAN + return str(self.value).lower() in self.BOOLEAN_TRUTHY_VALUES + + def as_int(self) -> float: + """Returns the value as a number.""" + if self.source == 'static': + return self.DEFAULT_VALUE_FOR_INTEGER + try: + return int(self.value) + except ValueError: + return self.DEFAULT_VALUE_FOR_INTEGER + + def as_float(self) -> float: + """Returns the value as a number.""" + if self.source == 'static': + return self.DEFAULT_VALUE_FOR_FLOAT_NUMBER + try: + return float(self.value) + except ValueError: + return self.DEFAULT_VALUE_FOR_FLOAT_NUMBER + + def get_source(self) -> ValueSource: + """Returns the source of the value.""" + return self.source diff --git a/firebase_admin/storage.py b/firebase_admin/storage.py new file mode 100644 index 000000000..d2f004be6 --- /dev/null +++ b/firebase_admin/storage.py @@ -0,0 +1,87 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Firebase Cloud Storage module. + +This module contains utilities for accessing Google Cloud Storage buckets associated with +Firebase apps. This requires the ``google-cloud-storage`` Python module. +""" + +# pylint: disable=import-error,no-name-in-module +try: + from google.cloud import storage +except ImportError as exception: + raise ImportError('Failed to import the Cloud Storage library for Python. Make sure ' + 'to install the "google-cloud-storage" module.') from exception + +from firebase_admin import _utils + + +_STORAGE_ATTRIBUTE = '_storage' + +def bucket(name=None, app=None) -> storage.Bucket: + """Returns a handle to a Google Cloud Storage bucket. + + If the name argument is not provided, uses the 'storageBucket' option specified when + initializing the App. If that is also not available raises an error. This function + does not make any RPC calls. + + Args: + name: Name of a cloud storage bucket (optional). + app: An App instance (optional). + + Returns: + google.cloud.storage.Bucket: A handle to the specified bucket. + + Raises: + ValueError: If a bucket name is not specified either via options or method arguments, + or if the specified bucket name is not a valid string. + """ + client = _utils.get_app_service(app, _STORAGE_ATTRIBUTE, _StorageClient.from_app) + return client.bucket(name) + + +class _StorageClient: + """Holds a Google Cloud Storage client instance.""" + + STORAGE_HEADERS = { + 'x-goog-api-client': _utils.get_metrics_header(), + } + + def __init__(self, credentials, project, default_bucket): + self._client = storage.Client( + credentials=credentials, project=project, extra_headers=self.STORAGE_HEADERS) + self._default_bucket = default_bucket + + @classmethod + def from_app(cls, app): + credentials = app.credential.get_credential() + default_bucket = app.options.get('storageBucket') + # Specifying project ID is not required, but providing it when available + # significantly speeds up the initialization of the storage client. + return _StorageClient(credentials, app.project_id, default_bucket) + + def bucket(self, name=None): + """Returns a handle to the specified Cloud Storage Bucket.""" + bucket_name = name if name is not None else self._default_bucket + if bucket_name is None: + raise ValueError( + 'Storage bucket name not specified. Specify the bucket name via the ' + '"storageBucket" option when initializing the App, or specify the bucket ' + 'name explicitly when calling the storage.bucket() function.') + if not bucket_name or not isinstance(bucket_name, str): + raise ValueError( + f'Invalid storage bucket name: "{bucket_name}". Bucket name must be a non-empty ' + 'string.') + return self._client.bucket(bucket_name) diff --git a/firebase_admin/tenant_mgt.py b/firebase_admin/tenant_mgt.py new file mode 100644 index 000000000..9e713d988 --- /dev/null +++ b/firebase_admin/tenant_mgt.py @@ -0,0 +1,439 @@ +# Copyright 2020 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Firebase tenant management module. + +This module contains functions for creating and configuring authentication tenants within a +Google Cloud Identity Platform (GCIP) instance. +""" + +import re +import threading + +import requests + +import firebase_admin +from firebase_admin import auth +from firebase_admin import _auth_utils +from firebase_admin import _http_client +from firebase_admin import _utils + + +_TENANT_MGT_ATTRIBUTE = '_tenant_mgt' +_MAX_LIST_TENANTS_RESULTS = 100 +_DISPLAY_NAME_PATTERN = re.compile('^[a-zA-Z][a-zA-Z0-9-]{3,19}$') + + +__all__ = [ + 'ListTenantsPage', + 'Tenant', + 'TenantIdMismatchError', + 'TenantNotFoundError', + + 'auth_for_tenant', + 'create_tenant', + 'delete_tenant', + 'get_tenant', + 'list_tenants', + 'update_tenant', +] + + +TenantIdMismatchError = _auth_utils.TenantIdMismatchError +TenantNotFoundError = _auth_utils.TenantNotFoundError + + +def auth_for_tenant(tenant_id, app=None): + """Gets an Auth Client instance scoped to the given tenant ID. + + Args: + tenant_id: A tenant ID string. + app: An App instance (optional). + + Returns: + auth.Client: An ``auth.Client`` object. + + Raises: + ValueError: If the tenant ID is None, empty or not a string. + """ + tenant_mgt_service = _get_tenant_mgt_service(app) + return tenant_mgt_service.auth_for_tenant(tenant_id) + + +def get_tenant(tenant_id, app=None): + """Gets the tenant corresponding to the given ``tenant_id``. + + Args: + tenant_id: A tenant ID string. + app: An App instance (optional). + + Returns: + Tenant: A tenant object. + + Raises: + ValueError: If the tenant ID is None, empty or not a string. + TenantNotFoundError: If no tenant exists by the given ID. + FirebaseError: If an error occurs while retrieving the tenant. + """ + tenant_mgt_service = _get_tenant_mgt_service(app) + return tenant_mgt_service.get_tenant(tenant_id) + + +def create_tenant( + display_name, allow_password_sign_up=None, enable_email_link_sign_in=None, app=None): + """Creates a new tenant from the given options. + + Args: + display_name: Display name string for the new tenant. Must begin with a letter and contain + only letters, digits and hyphens. Length must be between 4 and 20. + allow_password_sign_up: A boolean indicating whether to enable or disable the email sign-in + provider (optional). + enable_email_link_sign_in: A boolean indicating whether to enable or disable email link + sign-in (optional). Disabling this makes the password required for email sign-in. + app: An App instance (optional). + + Returns: + Tenant: A tenant object. + + Raises: + ValueError: If any of the given arguments are invalid. + FirebaseError: If an error occurs while creating the tenant. + """ + tenant_mgt_service = _get_tenant_mgt_service(app) + return tenant_mgt_service.create_tenant( + display_name=display_name, allow_password_sign_up=allow_password_sign_up, + enable_email_link_sign_in=enable_email_link_sign_in) + + +def update_tenant( + tenant_id, display_name=None, allow_password_sign_up=None, enable_email_link_sign_in=None, + app=None): + """Updates an existing tenant with the given options. + + Args: + tenant_id: ID of the tenant to update. + display_name: Updated display name string for the tenant (optional). + allow_password_sign_up: A boolean indicating whether to enable or disable the email sign-in + provider. + enable_email_link_sign_in: A boolean indicating whether to enable or disable email link + sign-in. Disabling this makes the password required for email sign-in. + app: An App instance (optional). + + Returns: + Tenant: The updated tenant object. + + Raises: + ValueError: If any of the given arguments are invalid. + TenantNotFoundError: If no tenant exists by the given ID. + FirebaseError: If an error occurs while creating the tenant. + """ + tenant_mgt_service = _get_tenant_mgt_service(app) + return tenant_mgt_service.update_tenant( + tenant_id, display_name=display_name, allow_password_sign_up=allow_password_sign_up, + enable_email_link_sign_in=enable_email_link_sign_in) + + +def delete_tenant(tenant_id, app=None): + """Deletes the tenant corresponding to the given ``tenant_id``. + + Args: + tenant_id: A tenant ID string. + app: An App instance (optional). + + Raises: + ValueError: If the tenant ID is None, empty or not a string. + TenantNotFoundError: If no tenant exists by the given ID. + FirebaseError: If an error occurs while retrieving the tenant. + """ + tenant_mgt_service = _get_tenant_mgt_service(app) + tenant_mgt_service.delete_tenant(tenant_id) + + +def list_tenants(page_token=None, max_results=_MAX_LIST_TENANTS_RESULTS, app=None): + """Retrieves a page of tenants from a Firebase project. + + The ``page_token`` argument governs the starting point of the page. The ``max_results`` + argument governs the maximum number of tenants that may be included in the returned page. + This function never returns None. If there are no user accounts in the Firebase project, this + returns an empty page. + + Args: + page_token: A non-empty page token string, which indicates the starting point of the page + (optional). Defaults to ``None``, which will retrieve the first page of users. + max_results: A positive integer indicating the maximum number of users to include in the + returned page (optional). Defaults to 100, which is also the maximum number allowed. + app: An App instance (optional). + + Returns: + ListTenantsPage: A page of tenants. + + Raises: + ValueError: If ``max_results`` or ``page_token`` are invalid. + FirebaseError: If an error occurs while retrieving the user accounts. + """ + tenant_mgt_service = _get_tenant_mgt_service(app) + def download(page_token, max_results): + return tenant_mgt_service.list_tenants(page_token, max_results) + return ListTenantsPage(download, page_token, max_results) + + +def _get_tenant_mgt_service(app): + return _utils.get_app_service(app, _TENANT_MGT_ATTRIBUTE, _TenantManagementService) + + +class Tenant: + """Represents a tenant in a multi-tenant application. + + Multi-tenancy support requires Google Cloud Identity Platform (GCIP). To learn more about + GCIP including pricing and features, see https://cloud.google.com/identity-platform. + + Before multi-tenancy can be used in a Google Cloud Identity Platform project, tenants must be + enabled in that project via the Cloud Console UI. A Tenant instance provides information + such as the display name, tenant identifier and email authentication configuration. + """ + + def __init__(self, data): + if not isinstance(data, dict): + raise ValueError(f'Invalid data argument in Tenant constructor: {data}') + if not 'name' in data: + raise ValueError('Tenant response missing required keys.') + + self._data = data + + @property + def tenant_id(self): + name = self._data['name'] + return name.split('/')[-1] + + @property + def display_name(self): + return self._data.get('displayName') + + @property + def allow_password_sign_up(self): + return self._data.get('allowPasswordSignup', False) + + @property + def enable_email_link_sign_in(self): + return self._data.get('enableEmailLinkSignin', False) + + +class _TenantManagementService: + """Firebase tenant management service.""" + + TENANT_MGT_URL = 'https://identitytoolkit.googleapis.com/v2' + + def __init__(self, app): + credential = app.credential.get_credential() + version_header = f'Python/Admin/{firebase_admin.__version__}' + base_url = f'{self.TENANT_MGT_URL}/projects/{app.project_id}' + self.app = app + self.client = _http_client.JsonHttpClient( + credential=credential, base_url=base_url, headers={'X-Client-Version': version_header}) + self.tenant_clients = {} + self.lock = threading.RLock() + + def auth_for_tenant(self, tenant_id): + """Gets an Auth Client instance scoped to the given tenant ID.""" + if not isinstance(tenant_id, str) or not tenant_id: + raise ValueError( + f'Invalid tenant ID: {tenant_id}. Tenant ID must be a non-empty string.') + + with self.lock: + if tenant_id in self.tenant_clients: + return self.tenant_clients[tenant_id] + + client = auth.Client(self.app, tenant_id=tenant_id) + self.tenant_clients[tenant_id] = client + return client + + def get_tenant(self, tenant_id): + """Gets the tenant corresponding to the given ``tenant_id``.""" + if not isinstance(tenant_id, str) or not tenant_id: + raise ValueError( + f'Invalid tenant ID: {tenant_id}. Tenant ID must be a non-empty string.') + + try: + body = self.client.body('get', f'/tenants/{tenant_id}') + except requests.exceptions.RequestException as error: + raise _auth_utils.handle_auth_backend_error(error) + return Tenant(body) + + def create_tenant( + self, display_name, allow_password_sign_up=None, enable_email_link_sign_in=None): + """Creates a new tenant from the given parameters.""" + + payload = {'displayName': _validate_display_name(display_name)} + if allow_password_sign_up is not None: + payload['allowPasswordSignup'] = _auth_utils.validate_boolean( + allow_password_sign_up, 'allowPasswordSignup') + if enable_email_link_sign_in is not None: + payload['enableEmailLinkSignin'] = _auth_utils.validate_boolean( + enable_email_link_sign_in, 'enableEmailLinkSignin') + + try: + body = self.client.body('post', '/tenants', json=payload) + except requests.exceptions.RequestException as error: + raise _auth_utils.handle_auth_backend_error(error) + return Tenant(body) + + def update_tenant( + self, tenant_id, display_name=None, allow_password_sign_up=None, + enable_email_link_sign_in=None): + """Updates the specified tenant with the given parameters.""" + if not isinstance(tenant_id, str) or not tenant_id: + raise ValueError('Tenant ID must be a non-empty string.') + + payload = {} + if display_name is not None: + payload['displayName'] = _validate_display_name(display_name) + if allow_password_sign_up is not None: + payload['allowPasswordSignup'] = _auth_utils.validate_boolean( + allow_password_sign_up, 'allowPasswordSignup') + if enable_email_link_sign_in is not None: + payload['enableEmailLinkSignin'] = _auth_utils.validate_boolean( + enable_email_link_sign_in, 'enableEmailLinkSignin') + + if not payload: + raise ValueError('At least one parameter must be specified for update.') + + url = f'/tenants/{tenant_id}' + update_mask = ','.join(_auth_utils.build_update_mask(payload)) + params = f'updateMask={update_mask}' + try: + body = self.client.body('patch', url, json=payload, params=params) + except requests.exceptions.RequestException as error: + raise _auth_utils.handle_auth_backend_error(error) + return Tenant(body) + + def delete_tenant(self, tenant_id): + """Deletes the tenant corresponding to the given ``tenant_id``.""" + if not isinstance(tenant_id, str) or not tenant_id: + raise ValueError( + f'Invalid tenant ID: {tenant_id}. Tenant ID must be a non-empty string.') + + try: + self.client.request('delete', f'/tenants/{tenant_id}') + except requests.exceptions.RequestException as error: + raise _auth_utils.handle_auth_backend_error(error) + + def list_tenants(self, page_token=None, max_results=_MAX_LIST_TENANTS_RESULTS): + """Retrieves a batch of tenants.""" + if page_token is not None: + if not isinstance(page_token, str) or not page_token: + raise ValueError('Page token must be a non-empty string.') + if not isinstance(max_results, int): + raise ValueError('Max results must be an integer.') + if max_results < 1 or max_results > _MAX_LIST_TENANTS_RESULTS: + raise ValueError( + 'Max results must be a positive integer less than or equal to ' + f'{_MAX_LIST_TENANTS_RESULTS}.') + + payload = {'pageSize': max_results} + if page_token: + payload['pageToken'] = page_token + try: + return self.client.body('get', '/tenants', params=payload) + except requests.exceptions.RequestException as error: + raise _auth_utils.handle_auth_backend_error(error) + + +class ListTenantsPage: + """Represents a page of tenants fetched from a Firebase project. + + Provides methods for traversing tenants included in this page, as well as retrieving + subsequent pages of tenants. The iterator returned by ``iterate_all()`` can be used to iterate + through all tenants in the Firebase project starting from this page. + """ + + def __init__(self, download, page_token, max_results): + self._download = download + self._max_results = max_results + self._current = download(page_token, max_results) + + @property + def tenants(self): + """A list of ``ExportedUserRecord`` instances available in this page.""" + return [Tenant(data) for data in self._current.get('tenants', [])] + + @property + def next_page_token(self): + """Page token string for the next page (empty string indicates no more pages).""" + return self._current.get('nextPageToken', '') + + @property + def has_next_page(self): + """A boolean indicating whether more pages are available.""" + return bool(self.next_page_token) + + def get_next_page(self): + """Retrieves the next page of tenants, if available. + + Returns: + ListTenantsPage: Next page of tenants, or None if this is the last page. + """ + if self.has_next_page: + return ListTenantsPage(self._download, self.next_page_token, self._max_results) + return None + + def iterate_all(self): + """Retrieves an iterator for tenants. + + Returned iterator will iterate through all the tenants in the Firebase project + starting from this page. The iterator will never buffer more than one page of tenants + in memory at a time. + + Returns: + iterator: An iterator of Tenant instances. + """ + return _TenantIterator(self) + + +class _TenantIterator: + """An iterator that allows iterating over tenants. + + This implementation loads a page of tenants into memory, and iterates on them. When the whole + page has been traversed, it loads another page. This class never keeps more than one page + of entries in memory. + """ + + def __init__(self, current_page): + if not current_page: + raise ValueError('Current page must not be None.') + self._current_page = current_page + self._index = 0 + + def __next__(self): + if self._index == len(self._current_page.tenants): + if self._current_page.has_next_page: + self._current_page = self._current_page.get_next_page() + self._index = 0 + if self._index < len(self._current_page.tenants): + result = self._current_page.tenants[self._index] + self._index += 1 + return result + raise StopIteration + + def __iter__(self): + return self + + +def _validate_display_name(display_name): + if not isinstance(display_name, str): + raise ValueError('Invalid type for displayName') + if not _DISPLAY_NAME_PATTERN.search(display_name): + raise ValueError( + 'displayName must start with a letter and only consist of letters, digits and ' + 'hyphens with 4-20 characters.') + return display_name diff --git a/integration/__init__.py b/integration/__init__.py new file mode 100644 index 000000000..81707da07 --- /dev/null +++ b/integration/__init__.py @@ -0,0 +1,16 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# Enables exclusion of the tests module from the distribution. diff --git a/integration/conftest.py b/integration/conftest.py new file mode 100644 index 000000000..ebaf9297a --- /dev/null +++ b/integration/conftest.py @@ -0,0 +1,72 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""pytest configuration and global fixtures for integration tests.""" +import json + +import pytest + +import firebase_admin +from firebase_admin import credentials + + +def pytest_addoption(parser): + parser.addoption( + '--cert', action='store', help='Service account certificate file for integration tests.') + parser.addoption( + '--apikey', action='store', help='API key file for integration tests.') + +def _get_cert_path(request): + cert = request.config.getoption('--cert') + if cert: + return cert + raise ValueError('Service account certificate not specified. Make sure to specify the ' + '"--cert" command-line option.') + +def integration_conf(request): + cert_path = _get_cert_path(request) + with open(cert_path, encoding='utf-8') as cert: + project_id = json.load(cert).get('project_id') + if not project_id: + raise ValueError('Failed to determine project ID from service account certificate.') + return credentials.Certificate(cert_path), project_id + +@pytest.fixture(scope='session') +def project_id(request): + _, project_id = integration_conf(request) + return project_id + +@pytest.fixture(autouse=True, scope='session') +def default_app(request): + """Initializes the default Firebase App instance used for all integration tests. + + This fixture is attached to the session scope, which ensures that it runs only once during + a test session. It is also marked as autouse, and therefore runs automatically without + test cases having to call it explicitly. + """ + cred, project_id = integration_conf(request) + ops = { + 'databaseURL' : f'https://{project_id}.firebaseio.com', + 'storageBucket' : f'{project_id}.appspot.com' + } + return firebase_admin.initialize_app(cred, ops) + +@pytest.fixture(scope='session') +def api_key(request): + path = request.config.getoption('--apikey') + if not path: + raise ValueError('API key file not specified. Make sure to specify the "--apikey" ' + 'command-line option.') + with open(path, encoding='utf-8') as keyfile: + return keyfile.read().strip() diff --git a/integration/emulators/.gitignore b/integration/emulators/.gitignore new file mode 100644 index 000000000..b17f63107 --- /dev/null +++ b/integration/emulators/.gitignore @@ -0,0 +1,69 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +firebase-debug.log* +firebase-debug.*.log* + +# Firebase cache +.firebase/ + +# Firebase config + +# Uncomment this if you'd like others to create their own Firebase project. +# For a team working on the same Firebase project(s), it is recommended to leave +# it commented so all members can deploy to the same project(s) in .firebaserc. +# .firebaserc + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (http://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variables file +.env + +# dataconnect generated files +.dataconnect diff --git a/integration/emulators/firebase.json b/integration/emulators/firebase.json new file mode 100644 index 000000000..a7b727c4d --- /dev/null +++ b/integration/emulators/firebase.json @@ -0,0 +1,29 @@ +{ + "emulators": { + "tasks": { + "port": 9499 + }, + "ui": { + "enabled": false + }, + "singleProjectMode": true, + "functions": { + "port": 5001 + } + }, + "functions": [ + { + "source": "functions", + "codebase": "default", + "disallowLegacyRuntimeConfig": true, + "ignore": [ + "venv", + ".git", + "firebase-debug.log", + "firebase-debug.*.log", + "*.local" + ], + "runtime": "python313" + } + ] +} diff --git a/integration/emulators/functions/.gitignore b/integration/emulators/functions/.gitignore new file mode 100644 index 000000000..1609bab70 --- /dev/null +++ b/integration/emulators/functions/.gitignore @@ -0,0 +1,6 @@ +# Python bytecode +__pycache__/ + +# Python virtual environment +venv/ +*.local diff --git a/integration/emulators/functions/main.py b/integration/emulators/functions/main.py new file mode 100644 index 000000000..6cd2c5766 --- /dev/null +++ b/integration/emulators/functions/main.py @@ -0,0 +1,7 @@ +from firebase_functions import tasks_fn + +@tasks_fn.on_task_dispatched() +def testTaskQueue(req: tasks_fn.CallableRequest) -> None: + """Handles tasks from the task queue.""" + print(f"Received task with data: {req.data}") + return diff --git a/integration/emulators/functions/requirements.txt b/integration/emulators/functions/requirements.txt new file mode 100644 index 000000000..6bbab42f8 --- /dev/null +++ b/integration/emulators/functions/requirements.txt @@ -0,0 +1 @@ +firebase_functions~=0.4.1 diff --git a/integration/test_auth.py b/integration/test_auth.py new file mode 100644 index 000000000..b36063d19 --- /dev/null +++ b/integration/test_auth.py @@ -0,0 +1,925 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Integration tests for firebase_admin.auth module.""" +import base64 +import datetime +import random +import string +import time +from typing import List +from urllib import parse +import uuid + +import google.oauth2.credentials +from google.auth import transport +import pytest +import requests + +import firebase_admin +from firebase_admin import auth +from firebase_admin import credentials +from firebase_admin._http_client import DEFAULT_TIMEOUT_SECONDS as timeout + + +_verify_token_url = 'https://www.googleapis.com/identitytoolkit/v3/relyingparty/verifyCustomToken' +_verify_password_url = 'https://www.googleapis.com/identitytoolkit/v3/relyingparty/verifyPassword' +_password_reset_url = 'https://www.googleapis.com/identitytoolkit/v3/relyingparty/resetPassword' +_verify_email_url = 'https://www.googleapis.com/identitytoolkit/v3/relyingparty/setAccountInfo' +_email_sign_in_url = 'https://www.googleapis.com/identitytoolkit/v3/relyingparty/emailLinkSignin' + +ACTION_LINK_CONTINUE_URL = 'http://localhost?a=1&b=5#f=1' + +X509_CERTIFICATES = [ + ('-----BEGIN CERTIFICATE-----\nMIICZjCCAc+gAwIBAgIBADANBgkqhkiG9w0BAQ0FADBQMQswCQYDVQQGEwJ1czE' + 'L\nMAkGA1UECAwCQ0ExDTALBgNVBAoMBEFjbWUxETAPBgNVBAMMCGFjbWUuY29tMRIw\nEAYDVQQHDAlTdW5ueXZhbGU' + 'wHhcNMTgxMjA2MDc1MTUxWhcNMjgxMjAzMDc1MTUx\nWjBQMQswCQYDVQQGEwJ1czELMAkGA1UECAwCQ0ExDTALBgNVB' + 'AoMBEFjbWUxETAP\nBgNVBAMMCGFjbWUuY29tMRIwEAYDVQQHDAlTdW5ueXZhbGUwgZ8wDQYJKoZIhvcN\nAQEBBQADg' + 'Y0AMIGJAoGBAKphmggjiVgqMLXyzvI7cKphscIIQ+wcv7Dld6MD4aKv\n7Jqr8ltujMxBUeY4LFEKw8Terb01snYpDot' + 'filaG6NxpF/GfVVmMalzwWp0mT8+H\nyzyPj89mRcozu17RwuooR6n1ofXjGcBE86lqC21UhA3WVgjPOLqB42rlE9gPn' + 'ZLB\nAgMBAAGjUDBOMB0GA1UdDgQWBBS0iM7WnbCNOnieOP1HIA+Oz/ML+zAfBgNVHSME\nGDAWgBS0iM7WnbCNOnieO' + 'P1HIA+Oz/ML+zAMBgNVHRMEBTADAQH/MA0GCSqGSIb3\nDQEBDQUAA4GBAF3jBgS+wP+K/jTupEQur6iaqS4UvXd//d4' + 'vo1MV06oTLQMTz+rP\nOSMDNwxzfaOn6vgYLKP/Dcy9dSTnSzgxLAxfKvDQZA0vE3udsw0Bd245MmX4+GOp\nlbrN99X' + 'P1u+lFxCSdMUzvQ/jW4ysw/Nq4JdJ0gPAyPvL6Qi/3mQdIQwx\n-----END CERTIFICATE-----\n'), + ('-----BEGIN CERTIFICATE-----\nMIICZjCCAc+gAwIBAgIBADANBgkqhkiG9w0BAQ0FADBQMQswCQYDVQQGEwJ1czE' + 'L\nMAkGA1UECAwCQ0ExDTALBgNVBAoMBEFjbWUxETAPBgNVBAMMCGFjbWUuY29tMRIw\nEAYDVQQHDAlTdW5ueXZhbGU' + 'wHhcNMTgxMjA2MDc1ODE4WhcNMjgxMjAzMDc1ODE4\nWjBQMQswCQYDVQQGEwJ1czELMAkGA1UECAwCQ0ExDTALBgNVB' + 'AoMBEFjbWUxETAP\nBgNVBAMMCGFjbWUuY29tMRIwEAYDVQQHDAlTdW5ueXZhbGUwgZ8wDQYJKoZIhvcN\nAQEBBQADg' + 'Y0AMIGJAoGBAKuzYKfDZGA6DJgQru3wNUqv+S0hMZfP/jbp8ou/8UKu\nrNeX7cfCgt3yxoGCJYKmF6t5mvo76JY0MWw' + 'A53BxeP/oyXmJ93uHG5mFRAsVAUKs\ncVVb0Xi6ujxZGVdDWFV696L0BNOoHTfXmac6IBoZQzNNK4n1AATqwo+z7a0pf' + 'RrJ\nAgMBAAGjUDBOMB0GA1UdDgQWBBSKmi/ZKMuLN0ES7/jPa7q7jAjPiDAfBgNVHSME\nGDAWgBSKmi/ZKMuLN0ES7' + '/jPa7q7jAjPiDAMBgNVHRMEBTADAQH/MA0GCSqGSIb3\nDQEBDQUAA4GBAAg2a2kSn05NiUOuWOHwPUjW3wQRsGxPXtb' + 'hWMhmNdCfKKteM2+/\nLd/jz5F3qkOgGQ3UDgr3SHEoWhnLaJMF4a2tm6vL2rEIfPEK81KhTTRxSsAgMVbU\nJXBz1md' + '6Ur0HlgQC7d1CHC8/xi2DDwHopLyxhogaZUxy9IaRxUEa2vJW\n-----END CERTIFICATE-----\n'), +] + + +def _sign_in(custom_token, api_key): + body = {'token' : custom_token.decode(), 'returnSecureToken' : True} + params = {'key' : api_key} + resp = requests.request('post', _verify_token_url, params=params, json=body, timeout=timeout) + resp.raise_for_status() + return resp.json().get('idToken') + +def _sign_in_with_password(email, password, api_key): + body = {'email': email, 'password': password, 'returnSecureToken': True} + params = {'key' : api_key} + resp = requests.request('post', _verify_password_url, params=params, json=body, timeout=timeout) + resp.raise_for_status() + return resp.json().get('idToken') + +def _random_string(length=10): + letters = string.ascii_lowercase + return ''.join(random.choice(letters) for i in range(length)) + +def _random_id(): + random_id = str(uuid.uuid4()).lower().replace('-', '') + email = f'test{random_id[:12]}@example.{random_id[12:]}.com' + return random_id, email + +def _random_phone(): + return '+1' + ''.join([str(random.randint(0, 9)) for _ in range(0, 10)]) + +def _reset_password(oob_code, new_password, api_key): + body = {'oobCode': oob_code, 'newPassword': new_password} + params = {'key' : api_key} + resp = requests.request('post', _password_reset_url, params=params, json=body, timeout=timeout) + resp.raise_for_status() + return resp.json().get('email') + +def _verify_email(oob_code, api_key): + body = {'oobCode': oob_code} + params = {'key' : api_key} + resp = requests.request('post', _verify_email_url, params=params, json=body, timeout=timeout) + resp.raise_for_status() + return resp.json().get('email') + +def _sign_in_with_email_link(email, oob_code, api_key): + body = {'oobCode': oob_code, 'email': email} + params = {'key' : api_key} + resp = requests.request('post', _email_sign_in_url, params=params, json=body, timeout=timeout) + resp.raise_for_status() + return resp.json().get('idToken') + +def _extract_link_params(link): + query = parse.urlparse(link).query + query_dict = dict(parse.parse_qsl(query)) + return query_dict + +def test_custom_token(api_key): + custom_token = auth.create_custom_token('user1') + id_token = _sign_in(custom_token, api_key) + claims = auth.verify_id_token(id_token) + assert claims['uid'] == 'user1' + +def test_custom_token_without_service_account(api_key): + google_cred = firebase_admin.get_app().credential.get_credential() + cred = CredentialWrapper.from_existing_credential(google_cred) + custom_app = firebase_admin.initialize_app(cred, { + 'serviceAccountId': google_cred.service_account_email, + 'projectId': firebase_admin.get_app().project_id + }, 'temp-app') + try: + custom_token = auth.create_custom_token('user1', app=custom_app) + id_token = _sign_in(custom_token, api_key) + claims = auth.verify_id_token(id_token) + assert claims['uid'] == 'user1' + finally: + firebase_admin.delete_app(custom_app) + +def test_custom_token_with_claims(api_key): + dev_claims = {'premium' : True, 'subscription' : 'silver'} + custom_token = auth.create_custom_token('user2', dev_claims) + id_token = _sign_in(custom_token, api_key) + claims = auth.verify_id_token(id_token) + assert claims['uid'] == 'user2' + assert claims['premium'] is True + assert claims['subscription'] == 'silver' + +def test_session_cookies(api_key): + dev_claims = {'premium' : True, 'subscription' : 'silver'} + custom_token = auth.create_custom_token('user3', dev_claims) + id_token = _sign_in(custom_token, api_key) + expires_in = datetime.timedelta(days=1) + session_cookie = auth.create_session_cookie(id_token, expires_in=expires_in) + claims = auth.verify_session_cookie(session_cookie) + assert claims['uid'] == 'user3' + assert claims['premium'] is True + assert claims['subscription'] == 'silver' + assert claims['iss'].startswith('https://session.firebase.google.com') + estimated_exp = int(time.time() + expires_in.total_seconds()) + assert abs(claims['exp'] - estimated_exp) < 5 + +def test_session_cookie_error(): + expires_in = datetime.timedelta(days=1) + with pytest.raises(auth.InvalidIdTokenError): + auth.create_session_cookie('not.a.token', expires_in=expires_in) + +def test_get_non_existing_user(): + with pytest.raises(auth.UserNotFoundError) as excinfo: + auth.get_user('non.existing') + assert str(excinfo.value) == 'No user record found for the provided user ID: non.existing.' + +def test_get_non_existing_user_by_email(): + with pytest.raises(auth.UserNotFoundError) as excinfo: + auth.get_user_by_email('non.existing@definitely.non.existing') + error_msg = ('No user record found for the provided email: ' + 'non.existing@definitely.non.existing.') + assert str(excinfo.value) == error_msg + +def test_update_non_existing_user(): + with pytest.raises(auth.UserNotFoundError): + auth.update_user('non.existing') + +def test_delete_non_existing_user(): + with pytest.raises(auth.UserNotFoundError): + auth.delete_user('non.existing') + +@pytest.fixture +def new_user(): + user = auth.create_user() + yield user + auth.delete_user(user.uid) + +@pytest.fixture +def new_user_with_params() -> auth.UserRecord: + random_id, email = _random_id() + phone = _random_phone() + user = auth.create_user( + uid=random_id, + email=email, + phone_number=phone, + display_name='Random User', + photo_url='https://example.com/photo.png', + email_verified=True, + password='secret', + ) + yield user + auth.delete_user(user.uid) + +@pytest.fixture +def new_user_list(): + users = [ + auth.create_user(password='password').uid, + auth.create_user(password='password').uid, + auth.create_user(password='password').uid, + ] + yield users + # TODO(rsgowman): Using auth.delete_users() would make more sense here, but + # that's currently rate limited to 1qps, so using it in this context would + # almost certainly trigger errors. When/if that limit is relaxed, switch to + # batch delete. + for uid in users: + auth.delete_user(uid) + +@pytest.fixture +def new_user_record_list() -> List[auth.UserRecord]: + uid1, email1 = _random_id() + uid2, email2 = _random_id() + uid3, email3 = _random_id() + users = [ + auth.create_user( + uid=uid1, email=email1, password='password', phone_number=_random_phone()), + auth.create_user( + uid=uid2, email=email2, password='password', phone_number=_random_phone()), + auth.create_user( + uid=uid3, email=email3, password='password', phone_number=_random_phone()), + ] + yield users + for user in users: + auth.delete_user(user.uid) + +@pytest.fixture +def new_user_with_provider() -> auth.UserRecord: + uid4, email4 = _random_id() + google_uid, google_email = _random_id() + import_user1 = auth.ImportUserRecord( + uid=uid4, + email=email4, + provider_data=[ + auth.UserProvider( + uid=google_uid, + provider_id='google.com', + email=google_email, + ) + ]) + user_import_result = auth.import_users([import_user1]) + assert user_import_result.success_count == 1 + assert user_import_result.failure_count == 0 + + user = auth.get_user(uid4) + yield user + auth.delete_user(user.uid) + +@pytest.fixture +def new_user_email_unverified(): + random_id, email = _random_id() + user = auth.create_user( + uid=random_id, + email=email, + email_verified=False, + password='password' + ) + yield user + auth.delete_user(user.uid) + +def test_get_user(new_user_with_params): + user = auth.get_user(new_user_with_params.uid) + assert user.uid == new_user_with_params.uid + assert user.display_name == 'Random User' + assert user.email == new_user_with_params.email + assert user.phone_number == new_user_with_params.phone_number + assert user.photo_url == 'https://example.com/photo.png' + assert user.email_verified is True + assert user.disabled is False + + user = auth.get_user_by_email(new_user_with_params.email) + assert user.uid == new_user_with_params.uid + user = auth.get_user_by_phone_number(new_user_with_params.phone_number) + assert user.uid == new_user_with_params.uid + + assert len(user.provider_data) == 2 + provider_ids = sorted([provider.provider_id for provider in user.provider_data]) + assert provider_ids == ['password', 'phone'] + +class TestGetUsers: + @staticmethod + def _map_user_record_to_uid_email_phones(user_record): + return { + 'uid': user_record.uid, + 'email': user_record.email, + 'phone_number': user_record.phone_number + } + + def test_multiple_uid_types(self, new_user_record_list, new_user_with_provider): + get_users_results = auth.get_users([ + auth.UidIdentifier(new_user_record_list[0].uid), + auth.EmailIdentifier(new_user_record_list[1].email), + auth.PhoneIdentifier(new_user_record_list[2].phone_number), + auth.ProviderIdentifier( + new_user_with_provider.provider_data[0].provider_id, + new_user_with_provider.provider_data[0].uid, + )]) + actual = sorted([ + self._map_user_record_to_uid_email_phones(user) + for user in get_users_results.users + ], key=lambda user: user['uid']) + expected = sorted([ + self._map_user_record_to_uid_email_phones(user) + for user in new_user_record_list + [new_user_with_provider] + ], key=lambda user: user['uid']) + + assert actual == expected + + def test_existing_and_non_existing_users(self, new_user_record_list): + get_users_results = auth.get_users([ + auth.UidIdentifier(new_user_record_list[0].uid), + auth.UidIdentifier('uid_that_doesnt_exist'), + auth.UidIdentifier(new_user_record_list[2].uid)]) + actual = sorted([ + self._map_user_record_to_uid_email_phones(user) + for user in get_users_results.users + ], key=lambda user: user['uid']) + expected = sorted([ + self._map_user_record_to_uid_email_phones(user) + for user in [new_user_record_list[0], new_user_record_list[2]] + ], key=lambda user: user['uid']) + + assert actual == expected + + def test_non_existing_users(self): + not_found_ids = [auth.UidIdentifier('non-existing user')] + get_users_results = auth.get_users(not_found_ids) + + assert get_users_results.users == [] + assert get_users_results.not_found == not_found_ids + + def test_de_dups_duplicate_users(self, new_user): + get_users_results = auth.get_users([ + auth.UidIdentifier(new_user.uid), + auth.UidIdentifier(new_user.uid)]) + actual = [ + self._map_user_record_to_uid_email_phones(user) + for user in get_users_results.users] + expected = [self._map_user_record_to_uid_email_phones(new_user)] + assert actual == expected + +def test_last_refresh_timestamp(new_user_with_params: auth.UserRecord, api_key): + # new users should not have a last_refresh_timestamp set + assert new_user_with_params.user_metadata.last_refresh_timestamp is None + + # login to cause the last_refresh_timestamp to be set + _sign_in_with_password(new_user_with_params.email, 'secret', api_key) + + # Attempt to retrieve the user 3 times (with a small delay between each + # attempt). Occassionally, this call retrieves the user data without the + # lastLoginTime/lastRefreshTime set; possibly because it's hitting a + # different server than the login request uses. + user_record = None + for iteration in range(0, 3): + user_record = auth.get_user(new_user_with_params.uid) + if user_record.user_metadata.last_refresh_timestamp is not None: + break + + time.sleep(2 ** iteration) + + # Ensure the last refresh time occurred at approximately 'now'. (With a + # tolerance of up to 1 minute; we ideally want to ensure that any timezone + # considerations are handled properly, so as long as we're within an hour, + # we're in good shape.) + millis_per_second = 1000 + millis_per_minute = millis_per_second * 60 + + last_refresh_timestamp = user_record.user_metadata.last_refresh_timestamp + assert last_refresh_timestamp == pytest.approx( + time.time()*millis_per_second, 1*millis_per_minute) + +def test_list_users(new_user_list): + err_msg_template = ( + 'Missing {field} field. A common cause would be forgetting to add the "Firebase ' + + 'Authentication Admin" permission. See instructions in CONTRIBUTING.md') + + fetched = [] + # Test exporting all user accounts. + page = auth.list_users() + while page: + for user in page.users: + assert isinstance(user, auth.ExportedUserRecord) + if user.uid in new_user_list: + fetched.append(user.uid) + assert user.password_hash is not None, ( + err_msg_template.format(field='password_hash')) + assert user.password_salt is not None, ( + err_msg_template.format(field='password_salt')) + page = page.get_next_page() + assert len(fetched) == len(new_user_list) + + fetched = [] + page = auth.list_users() + for user in page.iterate_all(): + assert isinstance(user, auth.ExportedUserRecord) + if user.uid in new_user_list: + fetched.append(user.uid) + assert user.password_hash is not None, ( + err_msg_template.format(field='password_hash')) + assert user.password_salt is not None, ( + err_msg_template.format(field='password_salt')) + assert len(fetched) == len(new_user_list) + +def test_create_user(new_user): + user = auth.get_user(new_user.uid) + assert user.uid == new_user.uid + assert user.display_name is None + assert user.email is None + assert user.phone_number is None + assert user.photo_url is None + assert user.email_verified is False + assert user.disabled is False + assert user.custom_claims is None + assert user.user_metadata.creation_timestamp > 0 + assert user.user_metadata.last_sign_in_timestamp is None + assert len(user.provider_data) == 0 + with pytest.raises(auth.UidAlreadyExistsError): + auth.create_user(uid=new_user.uid) + +def test_update_user(new_user): + _, email = _random_id() + phone = _random_phone() + user = auth.update_user( + new_user.uid, + email=email, + phone_number=phone, + display_name='Updated Name', + photo_url='https://example.com/photo.png', + email_verified=True, + password='secret') + assert user.uid == new_user.uid + assert user.display_name == 'Updated Name' + assert user.email == email + assert user.phone_number == phone + assert user.photo_url == 'https://example.com/photo.png' + assert user.email_verified is True + assert user.disabled is False + assert user.custom_claims is None + assert len(user.provider_data) == 2 + +def test_set_custom_user_claims(new_user, api_key): + claims = {'admin' : True, 'package' : 'gold'} + auth.set_custom_user_claims(new_user.uid, claims) + user = auth.get_user(new_user.uid) + assert user.custom_claims == claims + custom_token = auth.create_custom_token(new_user.uid) + id_token = _sign_in(custom_token, api_key) + dev_claims = auth.verify_id_token(id_token) + for key, value in claims.items(): + assert dev_claims[key] == value + +def test_update_custom_user_claims(new_user): + assert new_user.custom_claims is None + claims = {'admin' : True, 'package' : 'gold'} + auth.set_custom_user_claims(new_user.uid, claims) + user = auth.get_user(new_user.uid) + assert user.custom_claims == claims + + claims = {'admin' : False, 'subscription' : 'guest'} + auth.set_custom_user_claims(new_user.uid, claims) + user = auth.get_user(new_user.uid) + assert user.custom_claims == claims + + auth.set_custom_user_claims(new_user.uid, None) + user = auth.get_user(new_user.uid) + assert user.custom_claims is None + +def test_disable_user(new_user_with_params): + user = auth.update_user( + new_user_with_params.uid, + display_name=auth.DELETE_ATTRIBUTE, + photo_url=auth.DELETE_ATTRIBUTE, + phone_number=auth.DELETE_ATTRIBUTE, + disabled=True) + assert user.uid == new_user_with_params.uid + assert user.email == new_user_with_params.email + assert user.display_name is None + assert user.phone_number is None + assert user.photo_url is None + assert user.email_verified is True + assert user.disabled is True + assert len(user.provider_data) == 1 + +def test_remove_provider(new_user_with_provider): + provider_ids = [provider.provider_id for provider in new_user_with_provider.provider_data] + assert 'google.com' in provider_ids + user = auth.update_user(new_user_with_provider.uid, providers_to_delete=['google.com']) + assert user.uid == new_user_with_provider.uid + new_provider_ids = [provider.provider_id for provider in user.provider_data] + assert 'google.com' not in new_provider_ids + +def test_delete_user(): + user = auth.create_user() + auth.delete_user(user.uid) + with pytest.raises(auth.UserNotFoundError): + auth.get_user(user.uid) + + +class TestDeleteUsers: + def test_delete_multiple_users(self): + uid1 = auth.create_user(disabled=True).uid + uid2 = auth.create_user(disabled=False).uid + uid3 = auth.create_user(disabled=True).uid + + delete_users_result = self._slow_delete_users(auth, [uid1, uid2, uid3]) + assert delete_users_result.success_count == 3 + assert delete_users_result.failure_count == 0 + assert len(delete_users_result.errors) == 0 + + get_users_results = auth.get_users( + [auth.UidIdentifier(uid1), auth.UidIdentifier(uid2), auth.UidIdentifier(uid3)]) + assert len(get_users_results.users) == 0 + + def test_is_idempotent(self): + uid = auth.create_user().uid + + delete_users_result = self._slow_delete_users(auth, [uid]) + assert delete_users_result.success_count == 1 + assert delete_users_result.failure_count == 0 + + # Delete the user again, ensuring that everything still counts as a + # success. + delete_users_result = self._slow_delete_users(auth, [uid]) + assert delete_users_result.success_count == 1 + assert delete_users_result.failure_count == 0 + + def _slow_delete_users(self, auth, uids): + """The batchDelete endpoint has a rate limit of 1 QPS. Use this test + helper to ensure you don't exceed the quota.""" + time.sleep(1) + return auth.delete_users(uids) + + +def test_revoke_refresh_tokens(new_user): + user = auth.get_user(new_user.uid) + old_valid_after = user.tokens_valid_after_timestamp + time.sleep(1) + auth.revoke_refresh_tokens(new_user.uid) + user = auth.get_user(new_user.uid) + new_valid_after = user.tokens_valid_after_timestamp + assert new_valid_after > old_valid_after + +def test_verify_id_token_revoked(new_user, api_key): + custom_token = auth.create_custom_token(new_user.uid) + id_token = _sign_in(custom_token, api_key) + claims = auth.verify_id_token(id_token) + assert claims['iat'] * 1000 >= new_user.tokens_valid_after_timestamp + + time.sleep(1) + auth.revoke_refresh_tokens(new_user.uid) + claims = auth.verify_id_token(id_token, check_revoked=False) + user = auth.get_user(new_user.uid) + # verify_id_token succeeded because it didn't check revoked. + assert claims['iat'] * 1000 < user.tokens_valid_after_timestamp + + with pytest.raises(auth.RevokedIdTokenError) as excinfo: + claims = auth.verify_id_token(id_token, check_revoked=True) + assert str(excinfo.value) == 'The Firebase ID token has been revoked.' + + # Sign in again, verify works. + id_token = _sign_in(custom_token, api_key) + claims = auth.verify_id_token(id_token, check_revoked=True) + assert claims['iat'] * 1000 >= user.tokens_valid_after_timestamp + +def test_verify_id_token_disabled(new_user, api_key): + custom_token = auth.create_custom_token(new_user.uid) + id_token = _sign_in(custom_token, api_key) + claims = auth.verify_id_token(id_token, check_revoked=True) + + # Disable the user record. + auth.update_user(new_user.uid, disabled=True) + # Verify the ID token without checking revocation. This should + # not raise. + claims = auth.verify_id_token(id_token, check_revoked=False) + assert claims['sub'] == new_user.uid + + # Verify the ID token while checking revocation. This should + # raise an exception. + with pytest.raises(auth.UserDisabledError) as excinfo: + auth.verify_id_token(id_token, check_revoked=True) + assert str(excinfo.value) == 'The user record is disabled.' + +def test_verify_session_cookie_revoked(new_user, api_key): + custom_token = auth.create_custom_token(new_user.uid) + id_token = _sign_in(custom_token, api_key) + session_cookie = auth.create_session_cookie(id_token, expires_in=datetime.timedelta(days=1)) + + time.sleep(1) + auth.revoke_refresh_tokens(new_user.uid) + claims = auth.verify_session_cookie(session_cookie, check_revoked=False) + user = auth.get_user(new_user.uid) + # verify_session_cookie succeeded because it didn't check revoked. + assert claims['iat'] * 1000 < user.tokens_valid_after_timestamp + + with pytest.raises(auth.RevokedSessionCookieError) as excinfo: + claims = auth.verify_session_cookie(session_cookie, check_revoked=True) + assert str(excinfo.value) == 'The Firebase session cookie has been revoked.' + + # Sign in again, verify works. + id_token = _sign_in(custom_token, api_key) + session_cookie = auth.create_session_cookie(id_token, expires_in=datetime.timedelta(days=1)) + claims = auth.verify_session_cookie(session_cookie, check_revoked=True) + assert claims['iat'] * 1000 >= user.tokens_valid_after_timestamp + + +def test_verify_session_cookie_disabled(new_user, api_key): + custom_token = auth.create_custom_token(new_user.uid) + id_token = _sign_in(custom_token, api_key) + session_cookie = auth.create_session_cookie(id_token, expires_in=datetime.timedelta(days=1)) + + # Disable the user record. + auth.update_user(new_user.uid, disabled=True) + # Verify the session cookie without checking revocation. This should + # not raise. + claims = auth.verify_session_cookie(session_cookie, check_revoked=False) + assert claims['sub'] == new_user.uid + + # Verify the session cookie while checking revocation. This should + # raise an exception. + with pytest.raises(auth.UserDisabledError) as excinfo: + auth.verify_session_cookie(session_cookie, check_revoked=True) + assert str(excinfo.value) == 'The user record is disabled.' + +def test_import_users(): + uid, email = _random_id() + user = auth.ImportUserRecord(uid=uid, email=email) + result = auth.import_users([user]) + try: + assert result.success_count == 1 + assert result.failure_count == 0 + saved_user = auth.get_user(uid) + assert saved_user.email == email + finally: + auth.delete_user(uid) + +def test_import_users_with_password(api_key): + uid, email = _random_id() + password_hash = base64.b64decode( + 'V358E8LdWJXAO7muq0CufVpEOXaj8aFiC7T/rcaGieN04q/ZPJ08WhJEHGjj9lz/2TT+/86N5VjVoc5DdBhBiw==') + user = auth.ImportUserRecord( + uid=uid, email=email, password_hash=password_hash, password_salt=b'NaCl') + + scrypt_key = base64.b64decode( + 'jxspr8Ki0RYycVU8zykbdLGjFQ3McFUH0uiiTvC8pVMXAn210wjLNmdZJzxUECKbm0QsEmYUSDzZvpjeJ9WmXA==') + salt_separator = base64.b64decode('Bw==') + scrypt = auth.UserImportHash.scrypt( + key=scrypt_key, salt_separator=salt_separator, rounds=8, memory_cost=14) + result = auth.import_users([user], hash_alg=scrypt) + try: + assert result.success_count == 1 + assert result.failure_count == 0 + saved_user = auth.get_user(uid) + assert saved_user.email == email + id_token = _sign_in_with_password(email, 'password', api_key) + assert len(id_token) > 0 + finally: + auth.delete_user(uid) + +def test_password_reset(new_user_email_unverified, api_key): + link = auth.generate_password_reset_link(new_user_email_unverified.email) + assert isinstance(link, str) + query_dict = _extract_link_params(link) + user_email = _reset_password(query_dict['oobCode'], 'newPassword', api_key) + assert new_user_email_unverified.email == user_email + # password reset also set email_verified to True + assert auth.get_user(new_user_email_unverified.uid).email_verified + +def test_email_verification(new_user_email_unverified, api_key): + link = auth.generate_email_verification_link(new_user_email_unverified.email) + assert isinstance(link, str) + query_dict = _extract_link_params(link) + user_email = _verify_email(query_dict['oobCode'], api_key) + assert new_user_email_unverified.email == user_email + assert auth.get_user(new_user_email_unverified.uid).email_verified + +def test_password_reset_with_settings(new_user_email_unverified, api_key): + action_code_settings = auth.ActionCodeSettings(ACTION_LINK_CONTINUE_URL) + link = auth.generate_password_reset_link(new_user_email_unverified.email, + action_code_settings=action_code_settings) + assert isinstance(link, str) + query_dict = _extract_link_params(link) + assert query_dict['continueUrl'] == ACTION_LINK_CONTINUE_URL + user_email = _reset_password(query_dict['oobCode'], 'newPassword', api_key) + assert new_user_email_unverified.email == user_email + # password reset also set email_verified to True + assert auth.get_user(new_user_email_unverified.uid).email_verified + +def test_email_verification_with_settings(new_user_email_unverified, api_key): + action_code_settings = auth.ActionCodeSettings(ACTION_LINK_CONTINUE_URL) + link = auth.generate_email_verification_link(new_user_email_unverified.email, + action_code_settings=action_code_settings) + assert isinstance(link, str) + query_dict = _extract_link_params(link) + assert query_dict['continueUrl'] == ACTION_LINK_CONTINUE_URL + user_email = _verify_email(query_dict['oobCode'], api_key) + assert new_user_email_unverified.email == user_email + assert auth.get_user(new_user_email_unverified.uid).email_verified + +def test_email_sign_in_with_settings(new_user_email_unverified, api_key): + action_code_settings = auth.ActionCodeSettings(ACTION_LINK_CONTINUE_URL) + link = auth.generate_sign_in_with_email_link(new_user_email_unverified.email, + action_code_settings=action_code_settings) + assert isinstance(link, str) + query_dict = _extract_link_params(link) + assert query_dict['continueUrl'] == ACTION_LINK_CONTINUE_URL + oob_code = query_dict['oobCode'] + id_token = _sign_in_with_email_link(new_user_email_unverified.email, oob_code, api_key) + assert id_token is not None and len(id_token) > 0 + assert auth.get_user(new_user_email_unverified.uid).email_verified + +def test_auth_error_parse(new_user_email_unverified): + action_code_settings = auth.ActionCodeSettings( + ACTION_LINK_CONTINUE_URL, handle_code_in_app=True, link_domain="cool.link") + with pytest.raises(auth.InvalidHostingLinkDomainError) as excinfo: + auth.generate_sign_in_with_email_link(new_user_email_unverified.email, + action_code_settings=action_code_settings) + assert str(excinfo.value) == ('The provided hosting link domain is not configured in Firebase ' + 'Hosting or is not owned by the current project ' + '(INVALID_HOSTING_LINK_DOMAIN). The provided hosting link ' + 'domain is not configured in Firebase Hosting or is not owned ' + 'by the current project. This cannot be a default hosting domain ' + '(web.app or firebaseapp.com).') + + +@pytest.fixture(scope='module') +def oidc_provider(): + provider_config = _create_oidc_provider_config() + yield provider_config + auth.delete_oidc_provider_config(provider_config.provider_id) + + +def test_create_oidc_provider_config(oidc_provider): + assert isinstance(oidc_provider, auth.OIDCProviderConfig) + assert oidc_provider.client_id == 'OIDC_CLIENT_ID' + assert oidc_provider.issuer == 'https://oidc.com/issuer' + assert oidc_provider.display_name == 'OIDC_DISPLAY_NAME' + assert oidc_provider.enabled is True + assert oidc_provider.id_token_response_type is True + assert oidc_provider.code_response_type is False + assert oidc_provider.client_secret is None + + +def test_get_oidc_provider_config(oidc_provider): + provider_config = auth.get_oidc_provider_config(oidc_provider.provider_id) + assert isinstance(provider_config, auth.OIDCProviderConfig) + assert provider_config.provider_id == oidc_provider.provider_id + assert provider_config.client_id == 'OIDC_CLIENT_ID' + assert provider_config.issuer == 'https://oidc.com/issuer' + assert provider_config.display_name == 'OIDC_DISPLAY_NAME' + assert provider_config.enabled is True + assert provider_config.id_token_response_type is True + assert provider_config.code_response_type is False + assert provider_config.client_secret is None + + +def test_list_oidc_provider_configs(oidc_provider): + page = auth.list_oidc_provider_configs() + result = None + for provider_config in page.iterate_all(): + if provider_config.provider_id == oidc_provider.provider_id: + result = provider_config + break + + assert result is not None + + +def test_update_oidc_provider_config(): + provider_config = _create_oidc_provider_config() + try: + provider_config = auth.update_oidc_provider_config( + provider_config.provider_id, + client_id='UPDATED_OIDC_CLIENT_ID', + issuer='https://oidc.com/updated_issuer', + display_name='UPDATED_OIDC_DISPLAY_NAME', + enabled=False, + client_secret='CLIENT_SECRET', + id_token_response_type=False, + code_response_type=True) + assert provider_config.client_id == 'UPDATED_OIDC_CLIENT_ID' + assert provider_config.issuer == 'https://oidc.com/updated_issuer' + assert provider_config.display_name == 'UPDATED_OIDC_DISPLAY_NAME' + assert provider_config.enabled is False + assert provider_config.id_token_response_type is False + assert provider_config.code_response_type is True + assert provider_config.client_secret == 'CLIENT_SECRET' + finally: + auth.delete_oidc_provider_config(provider_config.provider_id) + + +def test_delete_oidc_provider_config(): + provider_config = _create_oidc_provider_config() + auth.delete_oidc_provider_config(provider_config.provider_id) + with pytest.raises(auth.ConfigurationNotFoundError): + auth.get_oidc_provider_config(provider_config.provider_id) + + +@pytest.fixture(scope='module') +def saml_provider(): + provider_config = _create_saml_provider_config() + yield provider_config + auth.delete_saml_provider_config(provider_config.provider_id) + + +def test_create_saml_provider_config(saml_provider): + assert isinstance(saml_provider, auth.SAMLProviderConfig) + assert saml_provider.idp_entity_id == 'IDP_ENTITY_ID' + assert saml_provider.sso_url == 'https://example.com/login' + assert saml_provider.x509_certificates == [X509_CERTIFICATES[0]] + assert saml_provider.rp_entity_id == 'RP_ENTITY_ID' + assert saml_provider.callback_url == 'https://projectId.firebaseapp.com/__/auth/handler' + assert saml_provider.display_name == 'SAML_DISPLAY_NAME' + assert saml_provider.enabled is True + + +def test_get_saml_provider_config(saml_provider): + provider_config = auth.get_saml_provider_config(saml_provider.provider_id) + assert isinstance(provider_config, auth.SAMLProviderConfig) + assert provider_config.provider_id == saml_provider.provider_id + assert provider_config.idp_entity_id == 'IDP_ENTITY_ID' + assert provider_config.sso_url == 'https://example.com/login' + assert provider_config.x509_certificates == [X509_CERTIFICATES[0]] + assert provider_config.rp_entity_id == 'RP_ENTITY_ID' + assert provider_config.callback_url == 'https://projectId.firebaseapp.com/__/auth/handler' + assert provider_config.display_name == 'SAML_DISPLAY_NAME' + assert provider_config.enabled is True + + +def test_list_saml_provider_configs(saml_provider): + page = auth.list_saml_provider_configs() + result = None + for provider_config in page.iterate_all(): + if provider_config.provider_id == saml_provider.provider_id: + result = provider_config + break + + assert result is not None + + +def test_update_saml_provider_config(): + provider_config = _create_saml_provider_config() + try: + provider_config = auth.update_saml_provider_config( + provider_config.provider_id, + idp_entity_id='UPDATED_IDP_ENTITY_ID', + sso_url='https://example.com/updated_login', + x509_certificates=[X509_CERTIFICATES[1]], + rp_entity_id='UPDATED_RP_ENTITY_ID', + callback_url='https://updatedProjectId.firebaseapp.com/__/auth/handler', + display_name='UPDATED_SAML_DISPLAY_NAME', + enabled=False) + assert provider_config.idp_entity_id == 'UPDATED_IDP_ENTITY_ID' + assert provider_config.sso_url == 'https://example.com/updated_login' + assert provider_config.x509_certificates == [X509_CERTIFICATES[1]] + assert provider_config.rp_entity_id == 'UPDATED_RP_ENTITY_ID' + assert provider_config.callback_url == ('https://updatedProjectId.firebaseapp.com/' + '__/auth/handler') + assert provider_config.display_name == 'UPDATED_SAML_DISPLAY_NAME' + assert provider_config.enabled is False + finally: + auth.delete_saml_provider_config(provider_config.provider_id) + + +def test_delete_saml_provider_config(): + provider_config = _create_saml_provider_config() + auth.delete_saml_provider_config(provider_config.provider_id) + with pytest.raises(auth.ConfigurationNotFoundError): + auth.get_saml_provider_config(provider_config.provider_id) + + +def _create_oidc_provider_config(): + provider_id = f'oidc.{_random_string()}' + return auth.create_oidc_provider_config( + provider_id=provider_id, + client_id='OIDC_CLIENT_ID', + issuer='https://oidc.com/issuer', + display_name='OIDC_DISPLAY_NAME', + enabled=True, + id_token_response_type=True, + code_response_type=False) + + +def _create_saml_provider_config(): + provider_id = f'saml.{_random_string()}' + return auth.create_saml_provider_config( + provider_id=provider_id, + idp_entity_id='IDP_ENTITY_ID', + sso_url='https://example.com/login', + x509_certificates=[X509_CERTIFICATES[0]], + rp_entity_id='RP_ENTITY_ID', + callback_url='https://projectId.firebaseapp.com/__/auth/handler', + display_name='SAML_DISPLAY_NAME', + enabled=True) + + +class CredentialWrapper(credentials.Base): + """A custom Firebase credential that wraps an OAuth2 token.""" + + def __init__(self, token): + self._delegate = google.oauth2.credentials.Credentials(token) + + def get_credential(self): + return self._delegate + + @classmethod + def from_existing_credential(cls, google_cred): + if not google_cred.token: + request = transport.requests.Request() + google_cred.refresh(request) + return CredentialWrapper(google_cred.token) diff --git a/integration/test_db.py b/integration/test_db.py new file mode 100644 index 000000000..1ceb0b992 --- /dev/null +++ b/integration/test_db.py @@ -0,0 +1,446 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Integration tests for firebase_admin.db module.""" +import collections +import json +import os +import time + +import pytest + +import firebase_admin +from firebase_admin import db +from firebase_admin import exceptions +from integration import conftest +from tests import testutils + + +def integration_conf(request): + host_override = os.environ.get('FIREBASE_DATABASE_EMULATOR_HOST') + if host_override: + return None, 'fake-project-id' + + return conftest.integration_conf(request) + + +@pytest.fixture(scope='module') +def app(request): + cred, project_id = integration_conf(request) + ops = { + 'databaseURL' : f'https://{project_id}.firebaseio.com', + } + return firebase_admin.initialize_app(cred, ops, name='integration-db') + + +@pytest.fixture(scope='module', autouse=True) +def default_app(): + # Overwrites the default_app fixture in conftest.py. + # This test suite should not use the default app. Use the app fixture instead. + pass + + +@pytest.fixture(scope='module') +def update_rules(app): + with open(testutils.resource_filename('dinosaurs_index.json'), encoding='utf-8') as rules_file: + new_rules = json.load(rules_file) + client = db.reference('', app)._client + rules = client.body('get', '/.settings/rules.json', params='format=strict') + existing = rules.get('rules') + if existing != new_rules: + rules['rules'] = new_rules + client.request('put', '/.settings/rules.json', json=rules) + +@pytest.fixture(scope='module') +def testdata(): + with open(testutils.resource_filename('dinosaurs.json'), encoding='utf-8') as dino_file: + return json.load(dino_file) + +@pytest.fixture(scope='module') +def testref(update_rules, testdata, app): + """Adds the necessary DB indices, and sets the initial values. + + This fixture is attached to the module scope, and therefore is guaranteed to run only once + during the execution of this test module. + + Returns: + Reference: A reference to the test dinosaur database. + """ + del update_rules + ref = db.reference('_adminsdk/python/dinodb', app) + ref.set(testdata) + return ref + + +class TestReferenceAttributes: + """Test cases for attributes exposed by db.Reference class.""" + + def test_ref_attributes(self, testref): + assert testref.key == 'dinodb' + assert testref.path == '/_adminsdk/python/dinodb' + + def test_child(self, testref): + child = testref.child('dinosaurs') + assert child.key == 'dinosaurs' + assert child.path == '/_adminsdk/python/dinodb/dinosaurs' + + def test_parent(self, testref): + parent = testref.parent + assert parent.key == 'python' + assert parent.path == '/_adminsdk/python' + + +class TestReadOperations: + """Test cases for reading node values.""" + + def test_get_value(self, testref, testdata): + value = testref.get() + assert isinstance(value, dict) + assert testdata == value + + def test_get_value_and_etag(self, testref, testdata): + value, etag = testref.get(etag=True) + assert isinstance(value, dict) + assert testdata == value + assert isinstance(etag, str) + + def test_get_shallow(self, testref): + value = testref.get(shallow=True) + assert isinstance(value, dict) + assert value == {'dinosaurs': True, 'scores': True} + + def test_get_if_changed(self, testref, testdata): + success, data, etag = testref.get_if_changed('wrong_etag') + assert success is True + assert data == testdata + assert isinstance(etag, str) + assert testref.get_if_changed(etag) == (False, None, None) + + def test_get_child_value(self, testref, testdata): + child = testref.child('dinosaurs') + assert child is not None + value = child.get() + assert isinstance(value, dict) + assert testdata['dinosaurs'] == value + + def test_get_grandchild_value(self, testref, testdata): + value = testref.child('dinosaurs').child('lambeosaurus').get() + assert isinstance(value, dict) + assert testdata['dinosaurs']['lambeosaurus'] == value + + def test_get_nonexisting_child_value(self, testref): + assert testref.child('none_existing').get() is None + + +class TestWriteOperations: + """Test cases for creating and updating node values.""" + + def test_push(self, testref): + python = testref.parent + ref = python.child('users').push() + assert ref.path == '/_adminsdk/python/users/' + ref.key + assert ref.get() == '' + + def test_push_with_value(self, testref): + python = testref.parent + value = {'name' : 'Luis Alvarez', 'since' : 1911} + ref = python.child('users').push(value) + assert ref.path == '/_adminsdk/python/users/' + ref.key + assert ref.get() == value + + def test_set_primitive_value(self, testref): + python = testref.parent + ref = python.child('users').push() + ref.set('value') + assert ref.get() == 'value' + + def test_set_complex_value(self, testref): + python = testref.parent + value = {'name' : 'Mary Anning', 'since' : 1799} + ref = python.child('users').push() + ref.set(value) + assert ref.get() == value + + def test_update_children(self, testref): + python = testref.parent + value = {'name' : 'Robert Bakker', 'since' : 1945} + ref = python.child('users').push() + ref.update(value) + assert ref.get() == value + + def test_update_children_with_existing_values(self, testref): + python = testref.parent + value = {'name' : 'Edwin Colbert', 'since' : 1900, 'temp': True} + ref = python.child('users').push(value) + ref.update({'since' : 1905}) + value['since'] = 1905 + assert ref.get() == value + ref.update({'temp': None}) + del value['temp'] + assert ref.get() == value + + def test_update_nested_children(self, testref): + python = testref.parent + edward = python.child('users').push({'name' : 'Edward Cope', 'since' : 1800}) + jack = python.child('users').push({'name' : 'Jack Horner', 'since' : 1940}) + delta = { + f'{edward.key}/since' : 1840, + f'{jack.key}/since' : 1946 + } + python.child('users').update(delta) + assert edward.get() == {'name' : 'Edward Cope', 'since' : 1840} + assert jack.get() == {'name' : 'Jack Horner', 'since' : 1946} + + def test_set_if_unchanged(self, testref): + python = testref.parent + push_data = {'name' : 'Edward Cope', 'since' : 1800} + edward = python.child('users').push(push_data) + + update_data = {'name' : 'Jack Horner', 'since' : 1940} + success, data, etag = edward.set_if_unchanged('invalid-etag', update_data) + assert success is False + assert data == push_data + assert isinstance(etag, str) + + success, data, new_etag = edward.set_if_unchanged(etag, update_data) + assert success is True + assert data == update_data + assert new_etag != etag + + def test_transaction(self, testref): + python = testref.parent + def transaction_update(snapshot): + snapshot['name'] += ' Owen' + snapshot['since'] = 1804 + return snapshot + ref = python.child('users').push({'name' : 'Richard'}) + new_value = ref.transaction(transaction_update) + expected = {'name': 'Richard Owen', 'since': 1804} + assert new_value == expected + assert ref.get() == expected + + def test_transaction_scalar(self, testref): + python = testref.parent + ref = python.child('users/count') + ref.set(42) + new_value = ref.transaction(lambda x: x + 1 if x else 1) + expected = 43 + assert new_value == expected + assert ref.get() == expected + + def test_delete(self, testref): + python = testref.parent + ref = python.child('users').push('foo') + assert ref.get() == 'foo' + ref.delete() + assert ref.get() is None + +class TestListenOperations: + """Test cases for listening to changes to node values.""" + + def test_listen(self, testref): + self.events = [] + def callback(event): + self.events.append(event) + + python = testref.parent + registration = python.listen(callback) + try: + ref = python.child('users').push() + assert ref.path == '/_adminsdk/python/users/' + ref.key + assert ref.get() == '' + + self.wait_for(self.events, count=2) + assert len(self.events) == 2 + + assert self.events[1].event_type == 'put' + assert self.events[1].path == '/users/' + ref.key + assert self.events[1].data == '' + finally: + registration.close() + + @classmethod + def wait_for(cls, events, count=1, timeout_seconds=5): + must_end = time.time() + timeout_seconds + while time.time() < must_end: + if len(events) >= count: + return + raise pytest.fail('Timed out while waiting for events') + +class TestAdvancedQueries: + """Test cases for advanced interactions via the db.Query interface.""" + + height_sorted = [ + 'linhenykus', 'pterodactyl', 'lambeosaurus', + 'triceratops', 'stegosaurus', 'bruhathkayosaurus', + ] + + def test_order_by_key(self, testref): + value = testref.child('dinosaurs').order_by_key().get() + assert isinstance(value, collections.OrderedDict) + assert list(value.keys()) == [ + 'bruhathkayosaurus', 'lambeosaurus', 'linhenykus', + 'pterodactyl', 'stegosaurus', 'triceratops' + ] + + def test_order_by_value(self, testref): + value = testref.child('scores').order_by_value().get() + assert list(value.keys()) == [ + 'stegosaurus', 'lambeosaurus', 'triceratops', + 'bruhathkayosaurus', 'linhenykus', 'pterodactyl', + ] + + def test_order_by_child(self, testref): + value = testref.child('dinosaurs').order_by_child('height').get() + assert list(value.keys()) == self.height_sorted + + def test_limit_first(self, testref): + value = testref.child('dinosaurs').order_by_child('height').limit_to_first(2).get() + assert list(value.keys()) == self.height_sorted[:2] + + def test_limit_first_all(self, testref): + value = testref.child('dinosaurs').order_by_child('height').limit_to_first(10).get() + assert list(value.keys()) == self.height_sorted + + def test_limit_last(self, testref): + value = testref.child('dinosaurs').order_by_child('height').limit_to_last(2).get() + assert list(value.keys()) == self.height_sorted[-2:] + + def test_limit_last_all(self, testref): + value = testref.child('dinosaurs').order_by_child('height').limit_to_last(10).get() + assert list(value.keys()) == self.height_sorted + + def test_start_at(self, testref): + value = testref.child('dinosaurs').order_by_child('height').start_at(3.5).get() + assert list(value.keys()) == self.height_sorted[-2:] + + def test_end_at(self, testref): + value = testref.child('dinosaurs').order_by_child('height').end_at(3.5).get() + assert list(value.keys()) == self.height_sorted[:4] + + def test_start_and_end_at(self, testref): + value = testref.child('dinosaurs').order_by_child('height') \ + .start_at(2.5).end_at(5).get() + assert list(value.keys()) == self.height_sorted[-3:-1] + + def test_equal_to(self, testref): + value = testref.child('dinosaurs').order_by_child('height').equal_to(0.6).get() + assert list(value.keys()) == self.height_sorted[:2] + + def test_order_by_nested_child(self, testref): + value = testref.child('dinosaurs').order_by_child('ratings/pos').start_at(4).get() + assert len(value) == 3 + assert 'pterodactyl' in value + assert 'stegosaurus' in value + assert 'triceratops' in value + + def test_filter_by_key(self, testref): + value = testref.child('dinosaurs').order_by_key().limit_to_first(2).get() + assert len(value) == 2 + assert 'bruhathkayosaurus' in value + assert 'lambeosaurus' in value + + def test_filter_by_value(self, testref): + value = testref.child('scores').order_by_value().limit_to_last(2).get() + assert len(value) == 2 + assert 'pterodactyl' in value + assert 'linhenykus' in value + + +@pytest.fixture(scope='module') +def override_app(request, update_rules): + del update_rules + cred, project_id = integration_conf(request) + ops = { + 'databaseURL' : f'https://{project_id}.firebaseio.com', + 'databaseAuthVariableOverride' : {'uid' : 'user1'} + } + app = firebase_admin.initialize_app(cred, ops, 'db-override') + yield app + firebase_admin.delete_app(app) + +@pytest.fixture(scope='module') +def none_override_app(request, update_rules): + del update_rules + cred, project_id = integration_conf(request) + ops = { + 'databaseURL' : f'https://{project_id}.firebaseio.com', + 'databaseAuthVariableOverride' : None + } + app = firebase_admin.initialize_app(cred, ops, 'db-none-override') + yield app + firebase_admin.delete_app(app) + + +class TestAuthVariableOverride: + """Test cases for database auth variable overrides.""" + + def init_ref(self, path, app): + admin_ref = db.reference(path, app) + admin_ref.set('test') + assert admin_ref.get() == 'test' + + def test_no_access(self, app, override_app): + path = '_adminsdk/python/admin' + self.init_ref(path, app) + user_ref = db.reference(path, override_app) + with pytest.raises(exceptions.UnauthenticatedError) as excinfo: + assert user_ref.get() + assert str(excinfo.value) == 'Permission denied' + + with pytest.raises(exceptions.UnauthenticatedError) as excinfo: + user_ref.set('test2') + assert str(excinfo.value) == 'Permission denied' + + def test_read(self, app, override_app): + path = '_adminsdk/python/protected/user2' + self.init_ref(path, app) + user_ref = db.reference(path, override_app) + assert user_ref.get() == 'test' + with pytest.raises(exceptions.UnauthenticatedError) as excinfo: + user_ref.set('test2') + assert str(excinfo.value) == 'Permission denied' + + def test_read_write(self, app, override_app): + path = '_adminsdk/python/protected/user1' + self.init_ref(path, app) + user_ref = db.reference(path, override_app) + assert user_ref.get() == 'test' + user_ref.set('test2') + assert user_ref.get() == 'test2' + + def test_query(self, override_app): + user_ref = db.reference('_adminsdk/python/protected', override_app) + with pytest.raises(exceptions.UnauthenticatedError) as excinfo: + user_ref.order_by_key().limit_to_first(2).get() + assert str(excinfo.value) == 'Permission denied' + + def test_none_auth_override(self, app, none_override_app): + path = '_adminsdk/python/public' + self.init_ref(path, app) + public_ref = db.reference(path, none_override_app) + assert public_ref.get() == 'test' + + ref = db.reference('_adminsdk/python', none_override_app) + with pytest.raises(exceptions.UnauthenticatedError) as excinfo: + assert ref.child('protected/user1').get() + assert str(excinfo.value) == 'Permission denied' + + with pytest.raises(exceptions.UnauthenticatedError) as excinfo: + assert ref.child('protected/user2').get() + assert str(excinfo.value) == 'Permission denied' + + with pytest.raises(exceptions.UnauthenticatedError) as excinfo: + assert ref.child('admin').get() + assert str(excinfo.value) == 'Permission denied' diff --git a/integration/test_firestore.py b/integration/test_firestore.py new file mode 100644 index 000000000..96cdd3fb1 --- /dev/null +++ b/integration/test_firestore.py @@ -0,0 +1,104 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Integration tests for firebase_admin.firestore module.""" +import datetime + +from firebase_admin import firestore + +_CITY = { + 'name': 'Mountain View', + 'country': 'USA', + 'population': 77846, + 'capital': False + } + +_MOVIE = { + 'Name': 'Interstellar', + 'Year': 2014, + 'Runtime': '2h 49m', + 'Academy Award Winner': True + } + + +def test_firestore(): + client = firestore.client() + expected = { + 'name': 'Mountain View', + 'country': 'USA', + 'population': 77846, + 'capital': False + } + doc = client.collection('cities').document() + doc.set(expected) + + data = doc.get().to_dict() + assert data == expected + + doc.delete() + assert doc.get().exists is False + +def test_firestore_explicit_database_id(): + client = firestore.client(database_id='testing-database') + expected = _CITY + doc = client.collection('cities').document() + doc.set(expected) + + data = doc.get() + assert data.to_dict() == expected + + doc.delete() + data = doc.get() + assert data.exists is False + +def test_firestore_multi_db(): + city_client = firestore.client() + movie_client = firestore.client(database_id='testing-database') + + expected_city = _CITY + expected_movie = _MOVIE + + city_doc = city_client.collection('cities').document() + movie_doc = movie_client.collection('movies').document() + + city_doc.set(expected_city) + movie_doc.set(expected_movie) + + city_data = city_doc.get() + movie_data = movie_doc.get() + + assert city_data.to_dict() == expected_city + assert movie_data.to_dict() == expected_movie + + city_doc.delete() + movie_doc.delete() + + city_data = city_doc.get() + movie_data = movie_doc.get() + + assert city_data.exists is False + assert movie_data.exists is False + +def test_server_timestamp(): + client = firestore.client() + expected = { + 'name': 'Mountain View', + 'timestamp': firestore.SERVER_TIMESTAMP # pylint: disable=no-member + } + doc = client.collection('cities').document() + doc.set(expected) + + data = doc.get().to_dict() + assert isinstance(data['timestamp'], datetime.datetime) + doc.delete() diff --git a/integration/test_firestore_async.py b/integration/test_firestore_async.py new file mode 100644 index 000000000..e899f25b2 --- /dev/null +++ b/integration/test_firestore_async.py @@ -0,0 +1,114 @@ +# Copyright 2022 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Integration tests for firebase_admin.firestore_async module.""" +import asyncio +import datetime +import pytest + +from firebase_admin import firestore_async + +_CITY = { + 'name': 'Mountain View', + 'country': 'USA', + 'population': 77846, + 'capital': False + } + +_MOVIE = { + 'Name': 'Interstellar', + 'Year': 2014, + 'Runtime': '2h 49m', + 'Academy Award Winner': True + } + + +@pytest.mark.asyncio(loop_scope="session") +async def test_firestore_async(): + client = firestore_async.client() + expected = _CITY + doc = client.collection('cities').document() + await doc.set(expected) + + data = await doc.get() + assert data.to_dict() == expected + + await doc.delete() + data = await doc.get() + assert data.exists is False + +@pytest.mark.asyncio(loop_scope="session") +async def test_firestore_async_explicit_database_id(): + client = firestore_async.client(database_id='testing-database') + expected = _CITY + doc = client.collection('cities').document() + await doc.set(expected) + + data = await doc.get() + assert data.to_dict() == expected + + await doc.delete() + data = await doc.get() + assert data.exists is False + +@pytest.mark.asyncio(loop_scope="session") +async def test_firestore_async_multi_db(): + city_client = firestore_async.client() + movie_client = firestore_async.client(database_id='testing-database') + + expected_city = _CITY + expected_movie = _MOVIE + + city_doc = city_client.collection('cities').document() + movie_doc = movie_client.collection('movies').document() + + await asyncio.gather( + city_doc.set(expected_city), + movie_doc.set(expected_movie) + ) + + data = await asyncio.gather( + city_doc.get(), + movie_doc.get() + ) + + assert data[0].to_dict() == expected_city + assert data[1].to_dict() == expected_movie + + await asyncio.gather( + city_doc.delete(), + movie_doc.delete() + ) + + data = await asyncio.gather( + city_doc.get(), + movie_doc.get() + ) + assert data[0].exists is False + assert data[1].exists is False + +@pytest.mark.asyncio(loop_scope="session") +async def test_server_timestamp(): + client = firestore_async.client() + expected = { + 'name': 'Mountain View', + 'timestamp': firestore_async.SERVER_TIMESTAMP # pylint: disable=no-member + } + doc = client.collection('cities').document() + await doc.set(expected) + + data = await doc.get() + data = data.to_dict() + assert isinstance(data['timestamp'], datetime.datetime) + await doc.delete() diff --git a/integration/test_functions.py b/integration/test_functions.py new file mode 100644 index 000000000..fc972f9e5 --- /dev/null +++ b/integration/test_functions.py @@ -0,0 +1,88 @@ +# Copyright 2024 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Integration tests for firebase_admin.functions module.""" + +import os +import pytest + +import firebase_admin +from firebase_admin import functions +from firebase_admin import _utils +from integration import conftest + + +_DEFAULT_DATA = {'data': {'city': 'Seattle'}} +def integration_conf(request): + host_override = os.environ.get('CLOUD_TASKS_EMULATOR_HOST') + if host_override: + return _utils.EmulatorAdminCredentials(), 'fake-project-id' + + return conftest.integration_conf(request) + +@pytest.fixture(scope='module') +def app(request): + cred, project_id = integration_conf(request) + return firebase_admin.initialize_app( + cred, options={'projectId': project_id}, name='integration-functions') + +@pytest.fixture(scope='module', autouse=True) +def default_app(): + # Overwrites the default_app fixture in conftest.py. + # This test suite should not use the default app. Use the app fixture instead. + pass + + +class TestFunctions: + + _TEST_FUNCTIONS_PARAMS = [ + {'function_name': 'function-name'}, + {'function_name': 'projects/test-project/locations/test-location/functions/function-name'}, + {'function_name': 'function-name', 'extension_id': 'extension-id'}, + { + 'function_name': \ + 'projects/test-project/locations/test-location/functions/function-name', + 'extension_id': 'extension-id' + } + ] + + @pytest.mark.parametrize('task_queue_params', _TEST_FUNCTIONS_PARAMS) + def test_task_queue(self, task_queue_params, app): + assert app.name == 'integration-functions' + queue = functions.task_queue(**task_queue_params, app=app) + assert queue is not None + assert callable(queue.enqueue) + assert callable(queue.delete) + + def test_task_enqueue(self, app): + queue = functions.task_queue('testTaskQueue', app=app) + task_id = queue.enqueue(_DEFAULT_DATA) + assert task_id is not None + + @pytest.mark.skipif( + os.environ.get('CLOUD_TASKS_EMULATOR_HOST') is not None, + reason="Skipping test_task_delete against emulator due to bug in firebase-tools" + ) + def test_task_delete(self, app): + # Skip this test against the emulator since tasks can't be delayed there to verify deletion + # See: https://github.com/firebase/firebase-tools/issues/8254 + task_options = functions.TaskOptions(schedule_delay_seconds=60) + queue = functions.task_queue('testTaskQueue', app=app) + task_id = queue.enqueue(_DEFAULT_DATA, task_options) + assert task_id is not None + queue.delete(task_id) + # We don't have a way to check the contents of the queue so we check that the deleted + # task is not found using the delete method again. + with pytest.raises(firebase_admin.exceptions.NotFoundError): + queue.delete(task_id) diff --git a/integration/test_instance_id.py b/integration/test_instance_id.py new file mode 100644 index 000000000..99b6787d3 --- /dev/null +++ b/integration/test_instance_id.py @@ -0,0 +1,26 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Integration tests for firebase_admin.instance_id module.""" + +import pytest + +from firebase_admin import exceptions +from firebase_admin import instance_id + +def test_delete_non_existing(): + with pytest.raises(exceptions.NotFoundError) as excinfo: + # legal instance IDs are /[cdef][A-Za-z0-9_-]{9}[AEIMQUYcgkosw048]/ + instance_id.delete_instance_id('fictive-ID0') + assert str(excinfo.value) == 'Instance ID "fictive-ID0": Failed to find the instance ID.' diff --git a/integration/test_messaging.py b/integration/test_messaging.py new file mode 100644 index 000000000..e72086741 --- /dev/null +++ b/integration/test_messaging.py @@ -0,0 +1,223 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Integration tests for firebase_admin.messaging module.""" + +import re +from datetime import datetime + +import pytest + +from firebase_admin import exceptions +from firebase_admin import messaging + + +_REGISTRATION_TOKEN = ('fGw0qy4TGgk:APA91bGtWGjuhp4WRhHXgbabIYp1jxEKI08ofj_v1bKhWAGJQ4e3arRCWzeTf' + 'HaLz83mBnDh0aPWB1AykXAVUUGl2h1wT4XI6XazWpvY7RBUSYfoxtqSWGIm2nvWh2BOP1YG50' + '1SsRoE') + + +def test_send(): + msg = messaging.Message( + topic='foo-bar', + notification=messaging.Notification('test-title', 'test-body', + 'https://images.unsplash.com/photo-1494438639946' + '-1ebd1d20bf85?fit=crop&w=900&q=60'), + android=messaging.AndroidConfig( + restricted_package_name='com.google.firebase.demos', + notification=messaging.AndroidNotification( + title='android-title', + body='android-body', + image='https://images.unsplash.com/' + 'photo-1494438639946-1ebd1d20bf85?fit=crop&w=900&q=60', + event_timestamp=datetime.now(), + priority='high', + vibrate_timings_millis=[100, 200, 300, 400], + visibility='public', + sticky=True, + local_only=False, + default_vibrate_timings=False, + default_sound=True, + default_light_settings=False, + light_settings=messaging.LightSettings( + color='#aabbcc', + light_off_duration_millis=200, + light_on_duration_millis=300 + ), + notification_count=1, + proxy='if_priority_lowered', + ) + ), + apns=messaging.APNSConfig(payload=messaging.APNSPayload( + aps=messaging.Aps( + alert=messaging.ApsAlert( + title='apns-title', + body='apns-body' + ) + ) + )) + ) + msg_id = messaging.send(msg, dry_run=True) + assert re.match('^projects/.*/messages/.*$', msg_id) + +def test_send_invalid_token(): + msg = messaging.Message( + token=_REGISTRATION_TOKEN, + notification=messaging.Notification('test-title', 'test-body') + ) + with pytest.raises(messaging.UnregisteredError): + messaging.send(msg, dry_run=True) + +def test_send_malformed_token(): + msg = messaging.Message( + token='not-a-token', + notification=messaging.Notification('test-title', 'test-body') + ) + with pytest.raises(exceptions.InvalidArgumentError): + messaging.send(msg, dry_run=True) + +def test_send_each(): + messages = [ + messaging.Message( + topic='foo-bar', notification=messaging.Notification('Title', 'Body')), + messaging.Message( + topic='foo-bar', notification=messaging.Notification('Title', 'Body')), + messaging.Message( + token='not-a-token', notification=messaging.Notification('Title', 'Body')), + ] + + batch_response = messaging.send_each(messages, dry_run=True) + + assert batch_response.success_count == 2 + assert batch_response.failure_count == 1 + assert len(batch_response.responses) == 3 + + response = batch_response.responses[0] + assert response.success is True + assert response.exception is None + assert re.match('^projects/.*/messages/.*$', response.message_id) + + response = batch_response.responses[1] + assert response.success is True + assert response.exception is None + assert re.match('^projects/.*/messages/.*$', response.message_id) + + response = batch_response.responses[2] + assert response.success is False + assert isinstance(response.exception, exceptions.InvalidArgumentError) + assert response.message_id is None + +def test_send_each_500(): + messages = [] + for msg_number in range(500): + topic = f'foo-bar-{msg_number % 10}' + messages.append(messaging.Message(topic=topic)) + + batch_response = messaging.send_each(messages, dry_run=True) + + assert batch_response.success_count == 500 + assert batch_response.failure_count == 0 + assert len(batch_response.responses) == 500 + for response in batch_response.responses: + assert response.success is True + assert response.exception is None + assert re.match('^projects/.*/messages/.*$', response.message_id) + +def test_send_each_for_multicast(): + multicast = messaging.MulticastMessage( + notification=messaging.Notification('Title', 'Body'), + tokens=['not-a-token', 'also-not-a-token']) + + batch_response = messaging.send_each_for_multicast(multicast) + + assert batch_response.success_count == 0 + assert batch_response.failure_count == 2 + assert len(batch_response.responses) == 2 + for response in batch_response.responses: + assert response.success is False + assert response.exception is not None + assert response.message_id is None + +def test_subscribe(): + resp = messaging.subscribe_to_topic(_REGISTRATION_TOKEN, 'mock-topic') + assert resp.success_count + resp.failure_count == 1 + +def test_unsubscribe(): + resp = messaging.unsubscribe_from_topic(_REGISTRATION_TOKEN, 'mock-topic') + assert resp.success_count + resp.failure_count == 1 + +@pytest.mark.asyncio(loop_scope="session") +async def test_send_each_async(): + messages = [ + messaging.Message( + topic='foo-bar', notification=messaging.Notification('Title', 'Body')), + messaging.Message( + topic='foo-bar', notification=messaging.Notification('Title', 'Body')), + messaging.Message( + token='not-a-token', notification=messaging.Notification('Title', 'Body')), + ] + + batch_response = await messaging.send_each_async(messages, dry_run=True) + + assert batch_response.success_count == 2 + assert batch_response.failure_count == 1 + assert len(batch_response.responses) == 3 + + response = batch_response.responses[0] + assert response.success is True + assert response.exception is None + assert re.match('^projects/.*/messages/.*$', response.message_id) + + response = batch_response.responses[1] + assert response.success is True + assert response.exception is None + assert re.match('^projects/.*/messages/.*$', response.message_id) + + response = batch_response.responses[2] + assert response.success is False + assert isinstance(response.exception, exceptions.InvalidArgumentError) + assert response.message_id is None + +@pytest.mark.asyncio(loop_scope="session") +async def test_send_each_async_500(): + messages = [] + for msg_number in range(500): + topic = f'foo-bar-{msg_number % 10}' + messages.append(messaging.Message(topic=topic)) + + batch_response = await messaging.send_each_async(messages, dry_run=True) + + assert batch_response.success_count == 500 + assert batch_response.failure_count == 0 + assert len(batch_response.responses) == 500 + for response in batch_response.responses: + assert response.success is True + assert response.exception is None + assert re.match('^projects/.*/messages/.*$', response.message_id) + +@pytest.mark.asyncio(loop_scope="session") +async def test_send_each_for_multicast_async(): + multicast = messaging.MulticastMessage( + notification=messaging.Notification('Title', 'Body'), + tokens=['not-a-token', 'also-not-a-token']) + + batch_response = await messaging.send_each_for_multicast_async(multicast) + + assert batch_response.success_count == 0 + assert batch_response.failure_count == 2 + assert len(batch_response.responses) == 2 + for response in batch_response.responses: + assert response.success is False + assert response.exception is not None + assert response.message_id is None diff --git a/integration/test_ml.py b/integration/test_ml.py new file mode 100644 index 000000000..ea5b10be9 --- /dev/null +++ b/integration/test_ml.py @@ -0,0 +1,378 @@ +# Copyright 2020 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Integration tests for firebase_admin.ml module.""" +import os +import random +import re +import shutil +import string +import tempfile + +import pytest + +from firebase_admin import exceptions +from firebase_admin import ml +from tests import testutils + + +# pylint: disable=import-error, no-member +try: + import tensorflow as tf + _TF_ENABLED = True +except ImportError: + _TF_ENABLED = False + +def _random_identifier(prefix): + #pylint: disable=unused-variable + suffix = ''.join([random.choice(string.ascii_letters + string.digits) for n in range(8)]) + return f'{prefix}_{suffix}' + + +NAME_ONLY_ARGS = { + 'display_name': _random_identifier('TestModel_') +} +NAME_ONLY_ARGS_UPDATED = { + 'display_name': _random_identifier('TestModel_updated_') +} +NAME_AND_TAGS_ARGS = { + 'display_name': _random_identifier('TestModel_tags_'), + 'tags': ['test_tag123'] +} +FULL_MODEL_ARGS = { + 'display_name': _random_identifier('TestModel_full_'), + 'tags': ['test_tag567'], + 'file_name': 'model1.tflite' +} +INVALID_FULL_MODEL_ARGS = { + 'display_name': _random_identifier('TestModel_invalid_full_'), + 'tags': ['test_tag890'], + 'file_name': 'invalid_model.tflite' +} + +@pytest.fixture +def firebase_model(request): + args = request.param + tflite_format = None + file_name = args.get('file_name') + if file_name: + file_path = testutils.resource_filename(file_name) + source = ml.TFLiteGCSModelSource.from_tflite_model_file(file_path) + tflite_format = ml.TFLiteFormat(model_source=source) + + ml_model = ml.Model( + display_name=args.get('display_name'), + tags=args.get('tags'), + model_format=tflite_format) + model = ml.create_model(model=ml_model) + yield model + _clean_up_model(model) + + +@pytest.fixture +def model_list(): + ml_model_1 = ml.Model(display_name=_random_identifier('TestModel123_list1_')) + model_1 = ml.create_model(model=ml_model_1) + + ml_model_2 = ml.Model(display_name=_random_identifier('TestModel123_list2_'), + tags=['test_tag123']) + model_2 = ml.create_model(model=ml_model_2) + + yield [model_1, model_2] + + _clean_up_model(model_1) + _clean_up_model(model_2) + + +def _clean_up_model(model): + try: + # Try to delete the model. + # Some tests delete the model as part of the test. + model.wait_for_unlocked() + ml.delete_model(model.model_id) + except exceptions.NotFoundError: + pass + + +# For rpc errors +def check_firebase_error(excinfo, status, msg): + err = excinfo.value + assert isinstance(err, exceptions.FirebaseError) + assert err.cause is not None + assert err.http_response is not None + assert err.http_response.status_code == status + assert str(err) == msg + + +# For operation errors +def check_operation_error(excinfo, msg): + err = excinfo.value + assert isinstance(err, exceptions.FirebaseError) + assert str(err) == msg + + +def check_model(model, args): + assert model.display_name == args.get('display_name') + assert model.tags == args.get('tags') + assert model.model_id is not None + assert model.create_time is not None + assert model.update_time is not None + assert model.locked is False + assert model.etag is not None + +# Model Format Checks + +def check_no_model_format(model): + assert model.model_format is None + assert model.validation_error == 'No model file has been uploaded.' + assert model.published is False + assert model.model_hash is None + + +def check_tflite_gcs_format(model, validation_error=None): + assert model.validation_error == validation_error + assert model.published is False + assert model.model_format.model_source.gcs_tflite_uri.startswith('gs://') + if validation_error: + assert model.model_format.size_bytes is None + assert model.model_hash is None + else: + assert model.model_format.size_bytes is not None + assert model.model_hash is not None + + +@pytest.mark.parametrize('firebase_model', [NAME_AND_TAGS_ARGS], indirect=True) +def test_create_simple_model(firebase_model): + check_model(firebase_model, NAME_AND_TAGS_ARGS) + check_no_model_format(firebase_model) + + +@pytest.mark.parametrize('firebase_model', [FULL_MODEL_ARGS], indirect=True) +def test_create_full_model(firebase_model): + check_model(firebase_model, FULL_MODEL_ARGS) + check_tflite_gcs_format(firebase_model) + + +@pytest.mark.parametrize('firebase_model', [FULL_MODEL_ARGS], indirect=True) +def test_create_already_existing_fails(firebase_model): + with pytest.raises(exceptions.AlreadyExistsError) as excinfo: + ml.create_model(model=firebase_model) + check_operation_error( + excinfo, + f'Model \'{firebase_model.display_name}\' already exists') + + +@pytest.mark.parametrize('firebase_model', [INVALID_FULL_MODEL_ARGS], indirect=True) +def test_create_invalid_model(firebase_model): + check_model(firebase_model, INVALID_FULL_MODEL_ARGS) + check_tflite_gcs_format(firebase_model, 'Invalid flatbuffer format') + + +@pytest.mark.parametrize('firebase_model', [NAME_AND_TAGS_ARGS], indirect=True) +def test_get_model(firebase_model): + get_model = ml.get_model(firebase_model.model_id) + check_model(get_model, NAME_AND_TAGS_ARGS) + check_no_model_format(get_model) + + +@pytest.mark.parametrize('firebase_model', [NAME_ONLY_ARGS], indirect=True) +def test_get_non_existing_model(firebase_model): + # Get a valid model_id that no longer exists + ml.delete_model(firebase_model.model_id) + + with pytest.raises(exceptions.NotFoundError) as excinfo: + ml.get_model(firebase_model.model_id) + check_firebase_error(excinfo, 404, 'Requested entity was not found.') + + +@pytest.mark.parametrize('firebase_model', [NAME_ONLY_ARGS], indirect=True) +def test_update_model(firebase_model): + new_model_name = NAME_ONLY_ARGS_UPDATED.get('display_name') + firebase_model.display_name = new_model_name + updated_model = ml.update_model(firebase_model) + check_model(updated_model, NAME_ONLY_ARGS_UPDATED) + check_no_model_format(updated_model) + + # Second call with same model does not cause error + updated_model2 = ml.update_model(updated_model) + check_model(updated_model2, NAME_ONLY_ARGS_UPDATED) + check_no_model_format(updated_model2) + + +@pytest.mark.parametrize('firebase_model', [NAME_ONLY_ARGS], indirect=True) +def test_update_non_existing_model(firebase_model): + ml.delete_model(firebase_model.model_id) + + firebase_model.tags = ['tag987'] + with pytest.raises(exceptions.NotFoundError) as excinfo: + ml.update_model(firebase_model) + check_operation_error( + excinfo, + f'Model \'{firebase_model.as_dict().get("name")}\' was not found') + + +@pytest.mark.parametrize('firebase_model', [FULL_MODEL_ARGS], indirect=True) +def test_publish_unpublish_model(firebase_model): + assert firebase_model.published is False + + published_model = ml.publish_model(firebase_model.model_id) + assert published_model.published is True + + unpublished_model = ml.unpublish_model(published_model.model_id) + assert unpublished_model.published is False + + +@pytest.mark.parametrize('firebase_model', [NAME_ONLY_ARGS], indirect=True) +def test_publish_invalid_fails(firebase_model): + assert firebase_model.validation_error is not None + + with pytest.raises(exceptions.FailedPreconditionError) as excinfo: + ml.publish_model(firebase_model.model_id) + check_operation_error( + excinfo, + 'Cannot publish a model that is not verified.') + + +@pytest.mark.parametrize('firebase_model', [FULL_MODEL_ARGS], indirect=True) +def test_publish_unpublish_non_existing_model(firebase_model): + ml.delete_model(firebase_model.model_id) + + with pytest.raises(exceptions.NotFoundError) as excinfo: + ml.publish_model(firebase_model.model_id) + check_operation_error( + excinfo, + f'Model \'{firebase_model.as_dict().get("name")}\' was not found') + + with pytest.raises(exceptions.NotFoundError) as excinfo: + ml.unpublish_model(firebase_model.model_id) + check_operation_error( + excinfo, + f'Model \'{firebase_model.as_dict().get("name")}\' was not found') + + +def test_list_models(model_list): + filter_str = f'displayName={model_list[0].display_name} OR tags:{model_list[1].tags[0]}' + + all_models = ml.list_models(list_filter=filter_str) + all_model_ids = [mdl.model_id for mdl in all_models.iterate_all()] + for mdl in model_list: + assert mdl.model_id in all_model_ids + + +def test_list_models_invalid_filter(): + invalid_filter = 'InvalidFilterParam=123' + + with pytest.raises(exceptions.InvalidArgumentError) as excinfo: + ml.list_models(list_filter=invalid_filter) + check_firebase_error(excinfo, 400, 'Request contains an invalid argument.') + + +@pytest.mark.parametrize('firebase_model', [NAME_ONLY_ARGS], indirect=True) +def test_delete_model(firebase_model): + ml.delete_model(firebase_model.model_id) + + # Second delete of same model will fail + with pytest.raises(exceptions.NotFoundError) as excinfo: + ml.delete_model(firebase_model.model_id) + check_firebase_error(excinfo, 404, 'Requested entity was not found.') + + +# Test tensor flow conversion functions if tensor flow is enabled. +#'pip install tensorflow' in the environment if you want _TF_ENABLED = True +#'pip install tensorflow==2.2.0' for version 2.2.0 etc. + + +def _clean_up_directory(save_dir): + if save_dir.startswith(tempfile.gettempdir()) and os.path.exists(save_dir): + shutil.rmtree(save_dir) + + +@pytest.fixture +def keras_model(): + assert _TF_ENABLED + x_list = [-1, 0, 1, 2, 3, 4] + y_list = [-3, -1, 1, 3, 5, 7] + x_tensor = tf.convert_to_tensor(x_list, dtype=tf.float32) + y_tensor = tf.convert_to_tensor(y_list, dtype=tf.float32) + model = tf.keras.models.Sequential([ + tf.keras.Input(shape=(1,)), + tf.keras.layers.Dense(units=1) + ]) + model.compile(optimizer='sgd', loss='mean_squared_error') + model.fit(x_tensor, y_tensor, epochs=3) + return model + + +@pytest.fixture +def saved_model_dir(keras_model): + assert _TF_ENABLED + # Make a new parent directory. The child directory must not exist yet. + # The child directory gets created by tf. If it exists, the tf call fails. + parent = tempfile.mkdtemp() + save_dir = os.path.join(parent, 'child') + + # different versions have different model conversion capability + # pick something that works for each version + if tf.version.VERSION.startswith('1.'): + tf.reset_default_graph() + x_var = tf.placeholder(tf.float32, (None, 3), name="x") + y_var = tf.multiply(x_var, x_var, name="y") + with tf.Session() as sess: + tf.saved_model.simple_save(sess, save_dir, {"x": x_var}, {"y": y_var}) + else: + # If it's not version 1.x or version 2.x we need to update the test. + assert tf.version.VERSION.startswith('2.') + tf.saved_model.save(keras_model, save_dir) + yield save_dir + _clean_up_directory(parent) + + + +@pytest.mark.skipif(not _TF_ENABLED, reason='Tensor flow is required for this test.') +def test_from_keras_model(keras_model): + source = ml.TFLiteGCSModelSource.from_keras_model(keras_model, 'model2.tflite') + assert re.search( + '^gs://.*/Firebase/ML/Models/model2.tflite$', + source.gcs_tflite_uri) is not None + + # Validate the conversion by creating a model + model_format = ml.TFLiteFormat(model_source=source) + model = ml.Model(display_name=_random_identifier('KerasModel_'), model_format=model_format) + created_model = ml.create_model(model) + + try: + check_model(created_model, {'display_name': model.display_name}) + check_tflite_gcs_format(created_model) + finally: + _clean_up_model(created_model) + + +@pytest.mark.skipif(not _TF_ENABLED, reason='Tensor flow is required for this test.') +def test_from_saved_model(saved_model_dir): + # Test the conversion helper + source = ml.TFLiteGCSModelSource.from_saved_model(saved_model_dir, 'model3.tflite') + assert re.search( + '^gs://.*/Firebase/ML/Models/model3.tflite$', + source.gcs_tflite_uri) is not None + + # Validate the conversion by creating a model + model_format = ml.TFLiteFormat(model_source=source) + model = ml.Model(display_name=_random_identifier('SavedModel_'), model_format=model_format) + created_model = ml.create_model(model) + + try: + assert created_model.model_id is not None + assert created_model.validation_error is None + finally: + _clean_up_model(created_model) diff --git a/integration/test_project_management.py b/integration/test_project_management.py new file mode 100644 index 000000000..ba2c5ec16 --- /dev/null +++ b/integration/test_project_management.py @@ -0,0 +1,179 @@ +# Copyright 2018 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Integration tests for the firebase_admin.project_management module.""" + +import json +import plistlib +import random + +import pytest + +from firebase_admin import exceptions +from firebase_admin import project_management + + +TEST_APP_BUNDLE_ID = 'com.firebase.adminsdk-python-integration-test' +TEST_APP_PACKAGE_NAME = 'com.firebase.adminsdk_python_integration_test' +TEST_APP_DISPLAY_NAME_PREFIX = 'Created By Firebase AdminSDK Python Integration Testing' + +SHA_256_HASH_1 = '123456789a123456789a123456789a123456789a123456789a123456789a1234' +SHA_256_HASH_2 = 'cafef00dba5eba11b01dfaceacc01adeda7aba5eca55e77e0b57ac1e5ca1ab1e' +SHA_1 = project_management.SHACertificate.SHA_1 +SHA_256 = project_management.SHACertificate.SHA_256 + + +def _starts_with(display_name, prefix): + return display_name and display_name.startswith(prefix) + + +@pytest.fixture(scope='module') +def android_app(default_app): + del default_app + android_apps = project_management.list_android_apps() + for android_app in android_apps: + if _starts_with(android_app.get_metadata().display_name, TEST_APP_DISPLAY_NAME_PREFIX): + return android_app + return project_management.create_android_app( + package_name=TEST_APP_PACKAGE_NAME, display_name=TEST_APP_DISPLAY_NAME_PREFIX) + + +@pytest.fixture(scope='module') +def ios_app(default_app): + del default_app + ios_apps = project_management.list_ios_apps() + for ios_app in ios_apps: + if _starts_with(ios_app.get_metadata().display_name, TEST_APP_DISPLAY_NAME_PREFIX): + return ios_app + return project_management.create_ios_app( + bundle_id=TEST_APP_BUNDLE_ID, display_name=TEST_APP_DISPLAY_NAME_PREFIX) + + +def test_create_android_app_already_exists(android_app): + del android_app + + with pytest.raises(exceptions.AlreadyExistsError) as excinfo: + project_management.create_android_app( + package_name=TEST_APP_PACKAGE_NAME, display_name=TEST_APP_DISPLAY_NAME_PREFIX) + assert 'Requested entity already exists' in str(excinfo.value) + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + + +def test_android_set_display_name_and_get_metadata(android_app, project_id): + app_id = android_app.app_id + android_app = project_management.android_app(app_id) + new_display_name = f'{TEST_APP_DISPLAY_NAME_PREFIX} helloworld {random.randint(0, 10000)}' + + android_app.set_display_name(new_display_name) + metadata = project_management.android_app(app_id).get_metadata() + android_app.set_display_name(TEST_APP_DISPLAY_NAME_PREFIX) # Revert the display name. + + assert metadata._name == f'projects/{project_id}/androidApps/{app_id}' + assert metadata.app_id == app_id + assert metadata.project_id == project_id + assert metadata.display_name == new_display_name + assert metadata.package_name == TEST_APP_PACKAGE_NAME + + +def test_list_android_apps(android_app): + del android_app + + android_apps = project_management.list_android_apps() + + assert any(_starts_with(android_app.get_metadata().display_name, TEST_APP_DISPLAY_NAME_PREFIX) + for android_app in android_apps) + + +def test_get_android_app_config(android_app, project_id): + config = android_app.get_config() + + json_config = json.loads(config) + assert json_config['project_info']['project_id'] == project_id + for client in json_config['client']: + client_info = client['client_info'] + if client_info['mobilesdk_app_id'] == android_app.app_id: + assert client_info['android_client_info']['package_name'] == TEST_APP_PACKAGE_NAME + break + else: + pytest.fail('Failed to find the test Android app in the Android config.') + + +def test_android_sha_certificates(android_app): + """Tests all of get_sha_certificates, add_sha_certificate, and delete_sha_certificate.""" + # Delete all existing certs. + for cert in android_app.get_sha_certificates(): + android_app.delete_sha_certificate(cert) + + # Add two different certs and assert that they have all been added successfully. + android_app.add_sha_certificate(project_management.SHACertificate(SHA_256_HASH_1)) + android_app.add_sha_certificate(project_management.SHACertificate(SHA_256_HASH_2)) + + cert_list = android_app.get_sha_certificates() + + sha_256_hashes = set(cert.sha_hash for cert in cert_list if cert.cert_type == SHA_256) + assert sha_256_hashes == set([SHA_256_HASH_1, SHA_256_HASH_2]) + for cert in cert_list: + assert cert.name + + # Delete all certs and assert that they have all been deleted successfully. + for cert in cert_list: + android_app.delete_sha_certificate(cert) + + assert android_app.get_sha_certificates() == [] + + +def test_create_ios_app_already_exists(ios_app): + del ios_app + + with pytest.raises(exceptions.AlreadyExistsError) as excinfo: + project_management.create_ios_app( + bundle_id=TEST_APP_BUNDLE_ID, display_name=TEST_APP_DISPLAY_NAME_PREFIX) + assert 'Requested entity already exists' in str(excinfo.value) + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + + +def test_ios_set_display_name_and_get_metadata(ios_app, project_id): + app_id = ios_app.app_id + ios_app = project_management.ios_app(app_id) + new_display_name = f'{TEST_APP_DISPLAY_NAME_PREFIX} helloworld {random.randint(0, 10000)}' + + ios_app.set_display_name(new_display_name) + metadata = project_management.ios_app(app_id).get_metadata() + ios_app.set_display_name(TEST_APP_DISPLAY_NAME_PREFIX) # Revert the display name. + + assert metadata._name == f'projects/{project_id}/iosApps/{app_id}' + assert metadata.project_id == project_id + assert metadata.display_name == new_display_name + assert metadata.bundle_id == TEST_APP_BUNDLE_ID + + +def test_list_ios_apps(ios_app): + del ios_app + + ios_apps = project_management.list_ios_apps() + + assert any(_starts_with(ios_app.get_metadata().display_name, TEST_APP_DISPLAY_NAME_PREFIX) + for ios_app in ios_apps) + + +def test_get_ios_app_config(ios_app, project_id): + config = ios_app.get_config() + + plist = plistlib.loads(config.encode('utf-8')) + + assert plist['BUNDLE_ID'] == TEST_APP_BUNDLE_ID + assert plist['PROJECT_ID'] == project_id + assert plist['GOOGLE_APP_ID'] == ios_app.app_id diff --git a/integration/test_storage.py b/integration/test_storage.py new file mode 100644 index 000000000..32e4d86a3 --- /dev/null +++ b/integration/test_storage.py @@ -0,0 +1,44 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Integration tests for firebase_admin.storage module.""" +import time + +from firebase_admin import storage + + +def test_default_bucket(project_id): + bucket = storage.bucket() + _verify_bucket(bucket, f'{project_id}.appspot.com') + +def test_custom_bucket(project_id): + bucket_name = f'{project_id}.appspot.com' + bucket = storage.bucket(bucket_name) + _verify_bucket(bucket, bucket_name) + +def test_non_existing_bucket(): + bucket = storage.bucket('non.existing') + assert bucket.exists() is False + +def _verify_bucket(bucket, expected_name): + assert bucket.name == expected_name + file_name = f'data_{int(time.time())}.txt' + blob = bucket.blob(file_name) + blob.upload_from_string('Hello World') + + blob = bucket.get_blob(file_name) + assert blob.download_as_bytes().decode() == 'Hello World' + + bucket.delete_blob(file_name) + assert not bucket.get_blob(file_name) diff --git a/integration/test_tenant_mgt.py b/integration/test_tenant_mgt.py new file mode 100644 index 000000000..f0bad58b2 --- /dev/null +++ b/integration/test_tenant_mgt.py @@ -0,0 +1,418 @@ +# Copyright 2020 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Integration tests for firebase_admin.tenant_mgt module.""" + +import random +import string +import time +from urllib import parse +import uuid + +import requests +import pytest + +from firebase_admin import auth +from firebase_admin import tenant_mgt +from firebase_admin._http_client import DEFAULT_TIMEOUT_SECONDS as timeout +from integration import test_auth + + +ACTION_LINK_CONTINUE_URL = 'http://localhost?a=1&b=5#f=1' +ACTION_CODE_SETTINGS = auth.ActionCodeSettings(ACTION_LINK_CONTINUE_URL) +VERIFY_TOKEN_URL = 'https://www.googleapis.com/identitytoolkit/v3/relyingparty/verifyCustomToken' + + +@pytest.fixture(scope='module') +def sample_tenant(): + tenant = tenant_mgt.create_tenant( + display_name='admin-python-tenant', + allow_password_sign_up=True, + enable_email_link_sign_in=True) + yield tenant + tenant_mgt.delete_tenant(tenant.tenant_id) + + +@pytest.fixture(scope='module') +def tenant_user(sample_tenant): + client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id) + email = _random_email() + user = client.create_user(email=email) + yield user + client.delete_user(user.uid) + + +def test_get_tenant(sample_tenant): + tenant = tenant_mgt.get_tenant(sample_tenant.tenant_id) + assert isinstance(tenant, tenant_mgt.Tenant) + assert tenant.tenant_id == sample_tenant.tenant_id + assert tenant.display_name == 'admin-python-tenant' + assert tenant.allow_password_sign_up is True + assert tenant.enable_email_link_sign_in is True + + +def test_list_tenants(sample_tenant): + page = tenant_mgt.list_tenants() + result = None + for tenant in page.iterate_all(): + if tenant.tenant_id == sample_tenant.tenant_id: + result = tenant + break + assert isinstance(result, tenant_mgt.Tenant) + assert result.tenant_id == sample_tenant.tenant_id + assert result.display_name == 'admin-python-tenant' + assert result.allow_password_sign_up is True + assert result.enable_email_link_sign_in is True + + +def test_update_tenant(): + tenant = tenant_mgt.create_tenant( + display_name='py-update-test', allow_password_sign_up=True, enable_email_link_sign_in=True) + try: + tenant = tenant_mgt.update_tenant( + tenant.tenant_id, display_name='updated-py-tenant', allow_password_sign_up=False, + enable_email_link_sign_in=False) + assert isinstance(tenant, tenant_mgt.Tenant) + assert tenant.tenant_id == tenant.tenant_id + assert tenant.display_name == 'updated-py-tenant' + assert tenant.allow_password_sign_up is False + assert tenant.enable_email_link_sign_in is False + finally: + tenant_mgt.delete_tenant(tenant.tenant_id) + + +def test_delete_tenant(): + tenant = tenant_mgt.create_tenant(display_name='py-delete-test') + tenant_mgt.delete_tenant(tenant.tenant_id) + with pytest.raises(tenant_mgt.TenantNotFoundError): + tenant_mgt.get_tenant(tenant.tenant_id) + + +def test_auth_for_client(sample_tenant): + client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id) + assert isinstance(client, auth.Client) + assert client.tenant_id == sample_tenant.tenant_id + + +def test_custom_token(sample_tenant, api_key): + client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id) + custom_token = client.create_custom_token('user1') + id_token = _sign_in(custom_token, sample_tenant.tenant_id, api_key) + claims = client.verify_id_token(id_token) + assert claims['uid'] == 'user1' + assert claims['firebase']['tenant'] == sample_tenant.tenant_id + + +def test_custom_token_with_claims(sample_tenant, api_key): + client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id) + custom_token = client.create_custom_token('user1', {'premium': True}) + id_token = _sign_in(custom_token, sample_tenant.tenant_id, api_key) + claims = client.verify_id_token(id_token) + assert claims['uid'] == 'user1' + assert claims['premium'] is True + assert claims['firebase']['tenant'] == sample_tenant.tenant_id + + +def test_create_user(sample_tenant, tenant_user): + assert tenant_user.tenant_id == sample_tenant.tenant_id + + +def test_update_user(sample_tenant): + client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id) + user = client.create_user() + try: + email = _random_email() + phone = _random_phone() + user = client.update_user(user.uid, email=email, phone_number=phone) + assert user.tenant_id == sample_tenant.tenant_id + assert user.email == email + assert user.phone_number == phone + finally: + client.delete_user(user.uid) + + +def test_get_user(sample_tenant, tenant_user): + client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id) + user = client.get_user(tenant_user.uid) + assert user.uid == tenant_user.uid + assert user.tenant_id == sample_tenant.tenant_id + + +def test_list_users(sample_tenant, tenant_user): + client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id) + page = client.list_users() + result = None + for user in page.iterate_all(): + if user.uid == tenant_user.uid: + result = user + break + assert result.tenant_id == sample_tenant.tenant_id + + +def test_set_custom_user_claims(sample_tenant, tenant_user): + client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id) + client.set_custom_user_claims(tenant_user.uid, {'premium': True}) + user = client.get_user(tenant_user.uid) + assert user.custom_claims == {'premium': True} + + +def test_delete_user(sample_tenant): + client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id) + user = client.create_user() + client.delete_user(user.uid) + with pytest.raises(auth.UserNotFoundError): + client.get_user(user.uid) + + +def test_revoke_refresh_tokens(sample_tenant, tenant_user): + valid_since = int(time.time()) + time.sleep(1) + client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id) + client.revoke_refresh_tokens(tenant_user.uid) + user = client.get_user(tenant_user.uid) + assert user.tokens_valid_after_timestamp > valid_since + + +def test_password_reset_link(sample_tenant, tenant_user): + client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id) + link = client.generate_password_reset_link(tenant_user.email, ACTION_CODE_SETTINGS) + assert _tenant_id_from_link(link) == sample_tenant.tenant_id + + +def test_email_verification_link(sample_tenant, tenant_user): + client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id) + link = client.generate_email_verification_link(tenant_user.email, ACTION_CODE_SETTINGS) + assert _tenant_id_from_link(link) == sample_tenant.tenant_id + + +def test_sign_in_with_email_link(sample_tenant, tenant_user): + client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id) + link = client.generate_sign_in_with_email_link(tenant_user.email, ACTION_CODE_SETTINGS) + assert _tenant_id_from_link(link) == sample_tenant.tenant_id + + +def test_import_users(sample_tenant): + client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id) + user = auth.ImportUserRecord( + uid=_random_uid(), email=_random_email()) + result = client.import_users([user]) + try: + assert result.success_count == 1 + assert result.failure_count == 0 + saved_user = client.get_user(user.uid) + assert saved_user.email == user.email + finally: + client.delete_user(user.uid) + + +@pytest.fixture(scope='module') +def oidc_provider(sample_tenant): + client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id) + provider_config = _create_oidc_provider_config(client) + yield provider_config + client.delete_oidc_provider_config(provider_config.provider_id) + + +def test_create_oidc_provider_config(oidc_provider): + assert isinstance(oidc_provider, auth.OIDCProviderConfig) + assert oidc_provider.client_id == 'OIDC_CLIENT_ID' + assert oidc_provider.issuer == 'https://oidc.com/issuer' + assert oidc_provider.display_name == 'OIDC_DISPLAY_NAME' + assert oidc_provider.enabled is True + + +def test_get_oidc_provider_config(sample_tenant, oidc_provider): + client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id) + provider_config = client.get_oidc_provider_config(oidc_provider.provider_id) + assert isinstance(provider_config, auth.OIDCProviderConfig) + assert provider_config.provider_id == oidc_provider.provider_id + assert provider_config.client_id == 'OIDC_CLIENT_ID' + assert provider_config.issuer == 'https://oidc.com/issuer' + assert provider_config.display_name == 'OIDC_DISPLAY_NAME' + assert provider_config.enabled is True + + +def test_list_oidc_provider_configs(sample_tenant, oidc_provider): + client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id) + page = client.list_oidc_provider_configs() + result = None + for provider_config in page.iterate_all(): + if provider_config.provider_id == oidc_provider.provider_id: + result = provider_config + break + + assert result is not None + + +def test_update_oidc_provider_config(sample_tenant): + client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id) + provider_config = _create_oidc_provider_config(client) + try: + provider_config = client.update_oidc_provider_config( + provider_config.provider_id, + client_id='UPDATED_OIDC_CLIENT_ID', + issuer='https://oidc.com/updated_issuer', + display_name='UPDATED_OIDC_DISPLAY_NAME', + enabled=False) + assert provider_config.client_id == 'UPDATED_OIDC_CLIENT_ID' + assert provider_config.issuer == 'https://oidc.com/updated_issuer' + assert provider_config.display_name == 'UPDATED_OIDC_DISPLAY_NAME' + assert provider_config.enabled is False + finally: + client.delete_oidc_provider_config(provider_config.provider_id) + + +def test_delete_oidc_provider_config(sample_tenant): + client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id) + provider_config = _create_oidc_provider_config(client) + client.delete_oidc_provider_config(provider_config.provider_id) + with pytest.raises(auth.ConfigurationNotFoundError): + client.get_oidc_provider_config(provider_config.provider_id) + + +@pytest.fixture(scope='module') +def saml_provider(sample_tenant): + client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id) + provider_config = _create_saml_provider_config(client) + yield provider_config + client.delete_saml_provider_config(provider_config.provider_id) + + +def test_create_saml_provider_config(saml_provider): + assert isinstance(saml_provider, auth.SAMLProviderConfig) + assert saml_provider.idp_entity_id == 'IDP_ENTITY_ID' + assert saml_provider.sso_url == 'https://example.com/login' + assert saml_provider.x509_certificates == [test_auth.X509_CERTIFICATES[0]] + assert saml_provider.rp_entity_id == 'RP_ENTITY_ID' + assert saml_provider.callback_url == 'https://projectId.firebaseapp.com/__/auth/handler' + assert saml_provider.display_name == 'SAML_DISPLAY_NAME' + assert saml_provider.enabled is True + + +def test_get_saml_provider_config(sample_tenant, saml_provider): + client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id) + provider_config = client.get_saml_provider_config(saml_provider.provider_id) + assert isinstance(provider_config, auth.SAMLProviderConfig) + assert provider_config.provider_id == saml_provider.provider_id + assert provider_config.idp_entity_id == 'IDP_ENTITY_ID' + assert provider_config.sso_url == 'https://example.com/login' + assert provider_config.x509_certificates == [test_auth.X509_CERTIFICATES[0]] + assert provider_config.rp_entity_id == 'RP_ENTITY_ID' + assert provider_config.callback_url == 'https://projectId.firebaseapp.com/__/auth/handler' + assert provider_config.display_name == 'SAML_DISPLAY_NAME' + assert provider_config.enabled is True + + +def test_list_saml_provider_configs(sample_tenant, saml_provider): + client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id) + page = client.list_saml_provider_configs() + result = None + for provider_config in page.iterate_all(): + if provider_config.provider_id == saml_provider.provider_id: + result = provider_config + break + + assert result is not None + + +def test_update_saml_provider_config(sample_tenant): + client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id) + provider_config = _create_saml_provider_config(client) + try: + provider_config = client.update_saml_provider_config( + provider_config.provider_id, + idp_entity_id='UPDATED_IDP_ENTITY_ID', + sso_url='https://example.com/updated_login', + x509_certificates=[test_auth.X509_CERTIFICATES[1]], + rp_entity_id='UPDATED_RP_ENTITY_ID', + callback_url='https://updatedProjectId.firebaseapp.com/__/auth/handler', + display_name='UPDATED_SAML_DISPLAY_NAME', + enabled=False) + assert provider_config.idp_entity_id == 'UPDATED_IDP_ENTITY_ID' + assert provider_config.sso_url == 'https://example.com/updated_login' + assert provider_config.x509_certificates == [test_auth.X509_CERTIFICATES[1]] + assert provider_config.rp_entity_id == 'UPDATED_RP_ENTITY_ID' + assert provider_config.callback_url == ('https://updatedProjectId.firebaseapp.com/' + '__/auth/handler') + assert provider_config.display_name == 'UPDATED_SAML_DISPLAY_NAME' + assert provider_config.enabled is False + finally: + client.delete_saml_provider_config(provider_config.provider_id) + + +def test_delete_saml_provider_config(sample_tenant): + client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id) + provider_config = _create_saml_provider_config(client) + client.delete_saml_provider_config(provider_config.provider_id) + with pytest.raises(auth.ConfigurationNotFoundError): + client.get_saml_provider_config(provider_config.provider_id) + + +def _create_oidc_provider_config(client): + provider_id = f'oidc.{_random_string()}' + return client.create_oidc_provider_config( + provider_id=provider_id, + client_id='OIDC_CLIENT_ID', + issuer='https://oidc.com/issuer', + display_name='OIDC_DISPLAY_NAME', + enabled=True) + + +def _create_saml_provider_config(client): + provider_id = f'saml.{_random_string()}' + return client.create_saml_provider_config( + provider_id=provider_id, + idp_entity_id='IDP_ENTITY_ID', + sso_url='https://example.com/login', + x509_certificates=[test_auth.X509_CERTIFICATES[0]], + rp_entity_id='RP_ENTITY_ID', + callback_url='https://projectId.firebaseapp.com/__/auth/handler', + display_name='SAML_DISPLAY_NAME', + enabled=True) + + +def _random_uid(): + return str(uuid.uuid4()).lower().replace('-', '') + + +def _random_email(): + random_id = str(uuid.uuid4()).lower().replace('-', '') + return f'test{random_id[:12]}@example.{random_id[12:]}.com' + + +def _random_phone(): + return '+1' + ''.join([str(random.randint(0, 9)) for _ in range(0, 10)]) + + +def _random_string(length=10): + letters = string.ascii_lowercase + return ''.join(random.choice(letters) for i in range(length)) + + +def _tenant_id_from_link(link): + query = parse.urlparse(link).query + parsed_query = parse.parse_qs(query) + return parsed_query['tenantId'][0] + + +def _sign_in(custom_token, tenant_id, api_key): + body = { + 'token' : custom_token.decode(), + 'returnSecureToken' : True, + 'tenantId': tenant_id, + } + params = {'key' : api_key} + resp = requests.request('post', VERIFY_TOKEN_URL, params=params, json=body, timeout=timeout) + resp.raise_for_status() + return resp.json().get('idToken') diff --git a/lint.sh b/lint.sh index 2e89768a5..5e65862f3 100755 --- a/lint.sh +++ b/lint.sh @@ -1,3 +1,5 @@ +#!/bin/bash + # Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -12,15 +14,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -#!/bin/bash - function lintAllFiles () { echo "Running linter on module $1" pylint --disable=$2 $1 } function lintChangedFiles () { - files=`git status -s $1 | grep -v "^D" | awk '{print $NF}' | grep .py$` + files=`git status -s $1 | (grep -v "^D") | awk '{print $NF}' | (grep .py$ || true)` for f in $files do echo "Running linter on $f" @@ -28,13 +28,32 @@ function lintChangedFiles () { done } -SKIP_FOR_TESTS="redefined-outer-name,protected-access,missing-docstring" +set -o errexit +set -o nounset + +SKIP_FOR_TESTS="redefined-outer-name,protected-access,missing-docstring,too-many-lines,len-as-condition" +SKIP_FOR_SNIPPETS="${SKIP_FOR_TESTS},reimported,unused-variable,unused-import,import-outside-toplevel" + +if [[ "$#" -eq 1 && "$1" = "all" ]] +then + CHECK_ALL=true +elif [[ "$#" -eq 0 ]] +then + CHECK_ALL=false +else + echo "Usage: ./lint.sh [all]" + exit 1 +fi -if [[ $1 = "all" ]] +if [[ "$CHECK_ALL" = true ]] then - lintAllFiles firebase_admin - lintAllFiles tests $SKIP_FOR_TESTS + lintAllFiles "firebase_admin" "" + lintAllFiles "tests" "$SKIP_FOR_TESTS" + lintAllFiles "integration" "$SKIP_FOR_TESTS" + lintAllFiles "snippets" "$SKIP_FOR_SNIPPETS" else - lintChangedFiles firebase_admin - lintChangedFiles tests $SKIP_FOR_TESTS + lintChangedFiles "firebase_admin" "" + lintChangedFiles "tests" "$SKIP_FOR_TESTS" + lintChangedFiles "integration" "$SKIP_FOR_TESTS" + lintChangedFiles "snippets" "$SKIP_FOR_SNIPPETS" fi diff --git a/requirements.txt b/requirements.txt index 1f8ec1437..3b96eea00 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,8 +1,15 @@ -pylint >= 1.6.4 -pytest >= 3.0.6 +astroid == 3.3.11 +pylint == 3.3.9 +pytest >= 8.2.2 pytest-cov >= 2.4.0 -tox >= 2.6.0 +pytest-localserver >= 0.4.1 +pytest-asyncio >= 0.26.0 +pytest-mock >= 3.6.1 +respx == 0.22.0 -google-auth >= 1.0.0 -requests >= 2.13.0 -six >= 1.6.1 +cachecontrol >= 0.14.3 +google-api-core[grpc] >= 2.25.1, < 3.0.0dev; platform.python_implementation != 'PyPy' +google-cloud-firestore >= 2.21.0; platform.python_implementation != 'PyPy' +google-cloud-storage >= 3.1.1 +pyjwt[crypto] >= 2.10.1 +httpx[http2] == 0.28.1 \ No newline at end of file diff --git a/scripts/prepare_release.sh b/scripts/prepare_release.sh deleted file mode 100755 index 3461469da..000000000 --- a/scripts/prepare_release.sh +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright 2017 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -#!/bin/bash - -source bash_utils.sh - -function isNewerVersion { - parseVersion "$1" - ARG_MAJOR=$MAJOR_VERSION - ARG_MINOR=$MINOR_VERSION - ARG_PATCH=$PATCH_VERSION - - parseVersion "$2" - if [ "$ARG_MAJOR" -ne "$MAJOR_VERSION" ]; then - if [ "$ARG_MAJOR" -lt "$MAJOR_VERSION" ]; then return 1; else return 0; fi; - fi - if [ "$ARG_MINOR" -ne "$MINOR_VERSION" ]; then - if [ "$ARG_MINOR" -lt "$MINOR_VERSION" ]; then return 1; else return 0; fi; - fi - if [ "$ARG_PATCH" -ne "$PATCH_VERSION" ]; then - if [ "$ARG_PATCH" -lt "$PATCH_VERSION" ]; then return 1; else return 0; fi; - fi - # The build numbers are equal - return 1 -} - -set -e - -if [ -z "$1" ]; then - echo "[ERROR] No version number provided." - echo "[INFO] Usage: ./prepare_release.sh " - exit 1 -fi - -VERSION="$1" -if ! parseVersion "$VERSION"; then - echo "[ERROR] Illegal version number provided. Version number must match semver." - exit 1 -fi - -CUR_VERSION=`grep "^__version__ =" ../firebase_admin/__init__.py | awk '{print $3}' | sed "s/'//g"` -if [ -z "$CUR_VERSION" ]; then - echo "[ERROR] Failed to find the current version. Check firebase_admin/__init__.py for version declaration." - exit 1 -fi -if ! parseVersion "$CUR_VERSION"; then - echo "[ERROR] Illegal current version number. Version number must match semver." - exit 1 -fi - -if ! isNewerVersion "$VERSION" "$CUR_VERSION"; then - echo "[ERROR] Illegal version number provided. Version $VERSION <= $CUR_VERSION" - exit 1 -fi - -CHECKED_OUT_BRANCH="$(git branch | grep "*" | awk -F ' ' '{print $2}')" -if [[ $CHECKED_OUT_BRANCH != "master" ]]; then - echo "[ERROR] You are on the '${CHECKED_OUT_BRANCH}' branch. Release must be prepared from the 'master' branch." - exit 1 -fi -if [[ `git status --porcelain` ]]; then - echo "[ERROR] Local changes exist in the repo. Resolve local changes before release." - exit 1 -fi - -HOST=$(uname) -echo "[INFO] Updating version number in firebase_admin/__init__.py" -if [ $HOST == "Darwin" ]; then - sed -i "" -e "s/__version__ = '$CUR_VERSION'/__version__ = '$VERSION'/" "../firebase_admin/__init__.py" -else - sed --in-place -e "s/__version__ = '$CUR_VERSION'/__version__ = '$VERSION'/" "../firebase_admin/__init__.py" -fi - -echo "[INFO] Running unit tests" -tox --workdir .. - -echo "[INFO] This repo has been prepared for a release. Create a branch and commit the changes." diff --git a/scripts/verify_release.sh b/scripts/verify_release.sh deleted file mode 100755 index f4edd25de..000000000 --- a/scripts/verify_release.sh +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright 2017 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -#!/bin/bash - -source bash_utils.sh - -if [ -z "$1" ]; then - echo "[ERROR] No version number provided." - echo "[INFO] Usage: ./verify_release.sh " - exit 1 -fi - -VERSION="$1" -if ! parseVersion "$VERSION"; then - echo "[ERROR] Illegal version number provided. Version number must match semver." - exit 1 -fi - -mkdir sandbox -virtualenv sandbox -source sandbox/bin/activate -pip install firebase_admin -INSTALLED_VERSION=`python -c 'import firebase_admin; print firebase_admin.__version__'` -echo "[INFO] Installed firebase_admin version $INSTALLED_VERSION" -deactivate -rm -rf sandbox - -if [[ "$VERSION" == "$INSTALLED_VERSION" ]]; then - echo "[INFO] Release verified successfully" -else - echo "[ERROR] Installed version did not match the release version." - exit 1 -fi diff --git a/setup.cfg b/setup.cfg index 2a9acf13d..4c6cf8d8f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,2 +1,3 @@ -[bdist_wheel] -universal = 1 +[tool:pytest] +testpaths = tests +asyncio_default_test_loop_scope = class diff --git a/setup.py b/setup.py index 2d16d6175..21e29332e 100644 --- a/setup.py +++ b/setup.py @@ -18,47 +18,59 @@ from os import path import sys -from setuptools import find_packages from setuptools import setup -import firebase_admin - -if sys.version_info < (2, 7): - print('firebase_admin requires python2 version >= 2.7 or python3.', file=sys.stderr) +(major, minor) = (sys.version_info.major, sys.version_info.minor) +if major != 3 or minor < 9: + print('firebase_admin requires python >= 3.9', file=sys.stderr) sys.exit(1) +# Read in the package metadata per recommendations from: +# https://packaging.python.org/guides/single-sourcing-package-version/ +about_path = path.join(path.dirname(path.abspath(__file__)), 'firebase_admin', '__about__.py') +about = {} +with open(about_path) as fp: + exec(fp.read(), about) # pylint: disable=exec-used + long_description = ('The Firebase Admin Python SDK enables server-side (backend) Python developers ' 'to integrate Firebase into their services and applications.') install_requires = [ - 'google-auth>=1.0.0', - 'requests>=2.13.0', - 'six>=1.6.1' + 'cachecontrol>=0.14.3', + 'google-api-core[grpc] >= 2.25.1, < 3.0.0dev; platform.python_implementation != "PyPy"', + 'google-cloud-firestore>=2.21.0; platform.python_implementation != "PyPy"', + 'google-cloud-storage>=3.1.1', + 'pyjwt[crypto] >= 2.10.1', + 'httpx[http2] == 0.28.1', ] -version = firebase_admin.__version__ - setup( - name='firebase_admin', - version=version, + name=about['__title__'], + version=about['__version__'], description='Firebase Admin Python SDK', long_description=long_description, - url='https://firebase.google.com/docs/admin/setup/', - author='Firebase', - license='Apache License 2.0', + url=about['__url__'], + project_urls={ + 'Release Notes': 'https://firebase.google.com/support/release-notes/admin/python', + 'Source': 'https://github.com/firebase/firebase-admin-python', + }, + author=about['__author__'], + license=about['__license__'], keywords='firebase cloud development', install_requires=install_requires, - packages=find_packages(exclude=['tests']), + packages=['firebase_admin'], + python_requires='>=3.9', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Topic :: Software Development :: Build Tools', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.3', - 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.11', + 'Programming Language :: Python :: 3.12', + 'Programming Language :: Python :: 3.13', 'License :: OSI Approved :: Apache Software License', ], ) diff --git a/snippets/__init__.py b/snippets/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/snippets/auth/__init__.py b/snippets/auth/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/snippets/auth/get_service_account_tokens.py b/snippets/auth/get_service_account_tokens.py new file mode 100644 index 000000000..7ad67a093 --- /dev/null +++ b/snippets/auth/get_service_account_tokens.py @@ -0,0 +1,29 @@ +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +# [START get_service_account_tokens] +from firebase_admin import credentials + +cred = credentials.Certificate('path/to/serviceAccountKey.json') +access_token_info = cred.get_access_token() + +access_token = access_token_info.access_token +expiration_time = access_token_info.expiry +# Attach access_token to HTTPS request in the "Authorization: Bearer" header +# After expiration_time, you must generate a new access token +# [END get_service_account_tokens] + +print(f'The access token {access_token} expires at {expiration_time}') diff --git a/snippets/auth/index.py b/snippets/auth/index.py new file mode 100644 index 000000000..656137dba --- /dev/null +++ b/snippets/auth/index.py @@ -0,0 +1,1106 @@ +# Copyright 2018 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import base64 +import datetime +import sys +import time + +# [START import_sdk] +import firebase_admin +# [END import_sdk] +from firebase_admin import credentials +from firebase_admin import auth +from firebase_admin import exceptions +from firebase_admin import tenant_mgt + +sys.path.append("lib") + +def initialize_sdk_with_service_account(): + # [START initialize_sdk_with_service_account] + import firebase_admin + from firebase_admin import credentials + from firebase_admin import exceptions + + cred = credentials.Certificate('path/to/serviceAccountKey.json') + default_app = firebase_admin.initialize_app(cred) + # [END initialize_sdk_with_service_account] + firebase_admin.delete_app(default_app) + +def initialize_sdk_with_application_default(): + # [START initialize_sdk_with_application_default] + default_app = firebase_admin.initialize_app() + # [END initialize_sdk_with_application_default] + firebase_admin.delete_app(default_app) + +def initialize_sdk_with_refresh_token(): + # [START initialize_sdk_with_refresh_token] + cred = credentials.RefreshToken('path/to/refreshToken.json') + default_app = firebase_admin.initialize_app(cred) + # [END initialize_sdk_with_refresh_token] + firebase_admin.delete_app(default_app) + +def initialize_sdk_with_service_account_id(): + # [START initialize_sdk_with_service_account_id] + options = { + 'serviceAccountId': 'my-client-id@my-project-id.iam.gserviceaccount.com', + } + firebase_admin.initialize_app(options=options) + # [END initialize_sdk_with_service_account_id] + firebase_admin.delete_app(firebase_admin.get_app()) + +def access_services_default(): + cred = credentials.Certificate('path/to/service.json') + # [START access_services_default] + # Import the Firebase service + from firebase_admin import auth + + # Initialize the default app + default_app = firebase_admin.initialize_app(cred) + print(default_app.name) # "[DEFAULT]" + + # Retrieve services via the auth package... + # auth.create_custom_token(...) + # [END access_services_default] + firebase_admin.delete_app(default_app) + +def access_services_nondefault(): + cred = credentials.Certificate('path/to/service.json') + other_cred = credentials.Certificate('path/to/other_service.json') + + # [START access_services_nondefault] + # Initialize the default app + default_app = firebase_admin.initialize_app(cred) + + # Initialize another app with a different config + other_app = firebase_admin.initialize_app(cred, name='other') + + print(default_app.name) # "[DEFAULT]" + print(other_app.name) # "other" + + # Retrieve default services via the auth package... + # auth.create_custom_token(...) + + # Use the `app` argument to retrieve the other app's services + # auth.create_custom_token(..., app=other_app) + # [END access_services_nondefault] + firebase_admin.delete_app(default_app) + firebase_admin.delete_app(other_app) + +def create_token_uid(): + cred = credentials.Certificate('path/to/service.json') + default_app = firebase_admin.initialize_app(cred) + # [START create_token_uid] + uid = 'some-uid' + + custom_token = auth.create_custom_token(uid) + # [END create_token_uid] + firebase_admin.delete_app(default_app) + return custom_token + +def create_token_with_claims(): + cred = credentials.Certificate('path/to/service.json') + default_app = firebase_admin.initialize_app(cred) + # [START create_token_with_claims] + uid = 'some-uid' + additional_claims = { + 'premiumAccount': True + } + + custom_token = auth.create_custom_token(uid, additional_claims) + # [END create_token_with_claims] + firebase_admin.delete_app(default_app) + return custom_token + +def verify_token_uid(id_token): + cred = credentials.Certificate('path/to/service.json') + default_app = firebase_admin.initialize_app(cred) + # [START verify_token_uid] + # id_token comes from the client app (shown above) + + decoded_token = auth.verify_id_token(id_token) + uid = decoded_token['uid'] + # [END verify_token_uid] + print(uid) + firebase_admin.delete_app(default_app) + +def verify_token_uid_check_revoke(id_token): + cred = credentials.Certificate('path/to/service.json') + default_app = firebase_admin.initialize_app(cred) + # [START verify_token_id_check_revoked] + try: + # Verify the ID token while checking if the token is revoked by + # passing check_revoked=True. + decoded_token = auth.verify_id_token(id_token, check_revoked=True) + # Token is valid and not revoked. + uid = decoded_token['uid'] + except auth.RevokedIdTokenError: + # Token revoked, inform the user to reauthenticate or signOut(). + pass + except auth.UserDisabledError: + # Token belongs to a disabled user record. + pass + except auth.InvalidIdTokenError: + # Token is invalid + pass + # [END verify_token_id_check_revoked] + firebase_admin.delete_app(default_app) + return uid + +def revoke_refresh_token_uid(): + cred = credentials.Certificate('path/to/service.json') + default_app = firebase_admin.initialize_app(cred) + # [START revoke_tokens] + # Revoke tokens on the backend. + auth.revoke_refresh_tokens(uid) + user = auth.get_user(uid) + # Convert to seconds as the auth_time in the token claims is in seconds. + revocation_second = user.tokens_valid_after_timestamp / 1000 + print(f'Tokens revoked at: {revocation_second}') + # [END revoke_tokens] + # [START save_revocation_in_db] + metadata_ref = firebase_admin.db.reference("metadata/" + uid) + metadata_ref.set({'revokeTime': revocation_second}) + # [END save_revocation_in_db] + print(uid) + firebase_admin.delete_app(default_app) + +def get_user(uid): + # [START get_user] + from firebase_admin import auth + + user = auth.get_user(uid) + print(f'Successfully fetched user data: {user.uid}') + # [END get_user] + +def get_user_by_email(): + email = 'user@example.com' + # [START get_user_by_email] + from firebase_admin import auth + + user = auth.get_user_by_email(email) + print(f'Successfully fetched user data: {user.uid}') + # [END get_user_by_email] + +def bulk_get_users(): + # [START bulk_get_users] + from firebase_admin import auth + + result = auth.get_users([ + auth.UidIdentifier('uid1'), + auth.EmailIdentifier('user2@example.com'), + auth.PhoneIdentifier(+15555550003), + auth.ProviderIdentifier('google.com', 'google_uid4') + ]) + + print('Successfully fetched user data:') + for user in result.users: + print(user.uid) + + print('Unable to find users corresponding to these identifiers:') + for uid in result.not_found: + print(uid) + # [END bulk_get_users] + +def get_user_by_phone_number(): + phone = '+1 555 555 0100' + # [START get_user_by_phone] + from firebase_admin import auth + + user = auth.get_user_by_phone_number(phone) + print(f'Successfully fetched user data: {user.uid}') + # [END get_user_by_phone] + +def create_user(): + # [START create_user] + user = auth.create_user( + email='user@example.com', + email_verified=False, + phone_number='+15555550100', + password='secretPassword', + display_name='John Doe', + photo_url='http://www.example.com/12345678/photo.png', + disabled=False) + print(f'Sucessfully created new user: {user.uid}') + # [END create_user] + return user.uid + +def create_user_with_id(): + # [START create_user_with_id] + user = auth.create_user( + uid='some-uid', email='user@example.com', phone_number='+15555550100') + print(f'Sucessfully created new user: {user.uid}') + # [END create_user_with_id] + +def update_user(uid): + # [START update_user] + user = auth.update_user( + uid, + email='user@example.com', + phone_number='+15555550100', + email_verified=True, + password='newPassword', + display_name='John Doe', + photo_url='http://www.example.com/12345678/photo.png', + disabled=True) + print(f'Sucessfully updated user: {user.uid}') + # [END update_user] + +def delete_user(uid): + # [START delete_user] + auth.delete_user(uid) + print('Successfully deleted user') + # [END delete_user] + +def bulk_delete_users(): + # [START bulk_delete_users] + from firebase_admin import auth + + result = auth.delete_users(["uid1", "uid2", "uid3"]) + + print(f'Successfully deleted {result.success_count} users') + print(f'Failed to delete {result.failure_count} users') + for err in result.errors: + print(f'error #{result.index}, reason: {result.reason}') + # [END bulk_delete_users] + +def set_custom_user_claims(uid): + # [START set_custom_user_claims] + # Set admin privilege on the user corresponding to uid. + auth.set_custom_user_claims(uid, {'admin': True}) + # The new custom claims will propagate to the user's ID token the + # next time a new one is issued. + # [END set_custom_user_claims] + + id_token = 'id_token' + # [START verify_custom_claims] + # Verify the ID token first. + claims = auth.verify_id_token(id_token) + if claims['admin'] is True: + # Allow access to requested admin resource. + pass + # [END verify_custom_claims] + + # [START read_custom_user_claims] + # Lookup the user associated with the specified uid. + user = auth.get_user(uid) + # The claims can be accessed on the user record. + print(user.custom_claims.get('admin')) + # [END read_custom_user_claims] + +def set_custom_user_claims_script(): + # [START set_custom_user_claims_script] + user = auth.get_user_by_email('user@admin.example.com') + # Confirm user is verified + if user.email_verified: + # Add custom claims for additional privileges. + # This will be picked up by the user on token refresh or next sign in on new device. + auth.set_custom_user_claims(user.uid, { + 'admin': True + }) + # [END set_custom_user_claims_script] + +def set_custom_user_claims_incremental(): + # [START set_custom_user_claims_incremental] + user = auth.get_user_by_email('user@admin.example.com') + # Add incremental custom claim without overwriting existing claims. + current_custom_claims = user.custom_claims + if current_custom_claims.get('admin'): + # Add level. + current_custom_claims['accessLevel'] = 10 + # Add custom claims for additional privileges. + auth.set_custom_user_claims(user.uid, current_custom_claims) + # [END set_custom_user_claims_incremental] + +def list_all_users(): + # [START list_all_users] + # Start listing users from the beginning, 1000 at a time. + page = auth.list_users() + while page: + for user in page.users: + print('User: ' + user.uid) + # Get next batch of users. + page = page.get_next_page() + + # Iterate through all users. This will still retrieve users in batches, + # buffering no more than 1000 users in memory at a time. + for user in auth.list_users().iterate_all(): + print('User: ' + user.uid) + # [END list_all_users] + +def create_session_cookie(flask, app): + # [START session_login] + @app.route('/sessionLogin', methods=['POST']) + def session_login(): + # Get the ID token sent by the client + id_token = flask.request.json['idToken'] + # Set session expiration to 5 days. + expires_in = datetime.timedelta(days=5) + try: + # Create the session cookie. This will also verify the ID token in the process. + # The session cookie will have the same claims as the ID token. + session_cookie = auth.create_session_cookie(id_token, expires_in=expires_in) + response = flask.jsonify({'status': 'success'}) + # Set cookie policy for session cookie. + expires = datetime.datetime.now() + expires_in + response.set_cookie( + 'session', session_cookie, expires=expires, httponly=True, secure=True) + return response + except exceptions.FirebaseError: + return flask.abort(401, 'Failed to create a session cookie') + # [END session_login] + +def check_auth_time(id_token, flask): + # [START check_auth_time] + # To ensure that cookies are set only on recently signed in users, check auth_time in + # ID token before creating a cookie. + try: + decoded_claims = auth.verify_id_token(id_token) + # Only process if the user signed in within the last 5 minutes. + if time.time() - decoded_claims['auth_time'] < 5 * 60: + expires_in = datetime.timedelta(days=5) + expires = datetime.datetime.now() + expires_in + session_cookie = auth.create_session_cookie(id_token, expires_in=expires_in) + response = flask.jsonify({'status': 'success'}) + response.set_cookie( + 'session', session_cookie, expires=expires, httponly=True, secure=True) + return response + # User did not sign in recently. To guard against ID token theft, require + # re-authentication. + return flask.abort(401, 'Recent sign in required') + except auth.InvalidIdTokenError: + return flask.abort(401, 'Invalid ID token') + except exceptions.FirebaseError: + return flask.abort(401, 'Failed to create a session cookie') + # [END check_auth_time] + +def verfy_session_cookie(app, flask): + def serve_content_for_user(decoded_claims): + print('Serving content with claims:', decoded_claims) + return flask.jsonify({'status': 'success'}) + + # [START session_verify] + @app.route('/profile', methods=['POST']) + def access_restricted_content(): + session_cookie = flask.request.cookies.get('session') + if not session_cookie: + # Session cookie is unavailable. Force user to login. + return flask.redirect('/login') + + # Verify the session cookie. In this case an additional check is added to detect + # if the user's Firebase session was revoked, user deleted/disabled, etc. + try: + decoded_claims = auth.verify_session_cookie(session_cookie, check_revoked=True) + return serve_content_for_user(decoded_claims) + except auth.InvalidSessionCookieError: + # Session cookie is invalid, expired or revoked. Force user to login. + return flask.redirect('/login') + # [END session_verify] + +def check_permissions(session_cookie, flask): + def serve_content_for_admin(decoded_claims): + print('Serving content with claims:', decoded_claims) + return flask.jsonify({'status': 'success'}) + + # [START session_verify_with_permission_check] + try: + decoded_claims = auth.verify_session_cookie(session_cookie, check_revoked=True) + # Check custom claims to confirm user is an admin. + if decoded_claims.get('admin') is True: + return serve_content_for_admin(decoded_claims) + + return flask.abort(401, 'Insufficient permissions') + except auth.InvalidSessionCookieError: + # Session cookie is invalid, expired or revoked. Force user to login. + return flask.redirect('/login') + # [END session_verify_with_permission_check] + +def clear_session_cookie(app, flask): + # [START session_clear] + @app.route('/sessionLogout', methods=['POST']) + def session_logout(): + response = flask.make_response(flask.redirect('/login')) + response.set_cookie('session', expires=0) + return response + # [END session_clear] + +def clear_session_cookie_and_revoke(app, flask): + # [START session_clear_and_revoke] + @app.route('/sessionLogout', methods=['POST']) + def session_logout(): + session_cookie = flask.request.cookies.get('session') + try: + decoded_claims = auth.verify_session_cookie(session_cookie) + auth.revoke_refresh_tokens(decoded_claims['sub']) + response = flask.make_response(flask.redirect('/login')) + response.set_cookie('session', expires=0) + return response + except auth.InvalidSessionCookieError: + return flask.redirect('/login') + # [END session_clear_and_revoke] + +def import_users(): + # [START build_user_list] + # Up to 1000 users can be imported at once. + users = [ + auth.ImportUserRecord( + uid='uid1', + email='user1@example.com', + password_hash=b'password_hash_1', + password_salt=b'salt1' + ), + auth.ImportUserRecord( + uid='uid2', + email='user2@example.com', + password_hash=b'password_hash_2', + password_salt=b'salt2' + ), + ] + # [END build_user_list] + + # [START import_users] + hash_alg = auth.UserImportHash.hmac_sha256(key=b'secret_key') + try: + result = auth.import_users(users, hash_alg=hash_alg) + print( + f'Successfully imported {result.success_count} users. Failed to import ' + f'{result.failure_count} users.') + for err in result.errors: + print(f'Failed to import {users[err.index].uid} due to {err.reason}') + except exceptions.FirebaseError: + # Some unrecoverable error occurred that prevented the operation from running. + pass + # [END import_users] + +def import_with_hmac(): + # [START import_with_hmac] + users = [ + auth.ImportUserRecord( + uid='some-uid', + email='user@example.com', + password_hash=b'password_hash', + password_salt=b'salt' + ), + ] + + hash_alg = auth.UserImportHash.hmac_sha256(key=b'secret') + try: + result = auth.import_users(users, hash_alg=hash_alg) + for err in result.errors: + print('Failed to import user:', err.reason) + except exceptions.FirebaseError as error: + print('Error importing users:', error) + # [END import_with_hmac] + +def import_with_pbkdf(): + # [START import_with_pbkdf] + users = [ + auth.ImportUserRecord( + uid='some-uid', + email='user@example.com', + password_hash=b'password_hash', + password_salt=b'salt' + ), + ] + + hash_alg = auth.UserImportHash.pbkdf2_sha256(rounds=100000) + try: + result = auth.import_users(users, hash_alg=hash_alg) + for err in result.errors: + print('Failed to import user:', err.reason) + except exceptions.FirebaseError as error: + print('Error importing users:', error) + # [END import_with_pbkdf] + +def import_with_standard_scrypt(): + # [START import_with_standard_scrypt] + users = [ + auth.ImportUserRecord( + uid='some-uid', + email='user@example.com', + password_hash=b'password_hash', + password_salt=b'salt' + ), + ] + + hash_alg = auth.UserImportHash.standard_scrypt( + memory_cost=1024, parallelization=16, block_size=8, derived_key_length=64) + try: + result = auth.import_users(users, hash_alg=hash_alg) + for err in result.errors: + print('Failed to import user:', err.reason) + except exceptions.FirebaseError as error: + print('Error importing users:', error) + # [END import_with_standard_scrypt] + +def import_with_bcrypt(): + # [START import_with_bcrypt] + users = [ + auth.ImportUserRecord( + uid='some-uid', + email='user@example.com', + password_hash=b'password_hash', + password_salt=b'salt' + ), + ] + + hash_alg = auth.UserImportHash.bcrypt() + try: + result = auth.import_users(users, hash_alg=hash_alg) + for err in result.errors: + print('Failed to import user:', err.reason) + except exceptions.FirebaseError as error: + print('Error importing users:', error) + # [END import_with_bcrypt] + +def import_with_scrypt(): + # [START import_with_scrypt] + users = [ + auth.ImportUserRecord( + uid='some-uid', + email='user@example.com', + password_hash=base64.urlsafe_b64decode('password_hash'), + password_salt=base64.urlsafe_b64decode('salt') + ), + ] + + # All the parameters below can be obtained from the Firebase Console's "Users" + # section. Base64 encoded parameters must be decoded into raw bytes. + hash_alg = auth.UserImportHash.scrypt( + key=base64.b64decode('base64_secret'), + salt_separator=base64.b64decode('base64_salt_separator'), + rounds=8, + memory_cost=14 + ) + try: + result = auth.import_users(users, hash_alg=hash_alg) + for err in result.errors: + print('Failed to import user:', err.reason) + except exceptions.FirebaseError as error: + print('Error importing users:', error) + # [END import_with_scrypt] + +def import_without_password(): + # [START import_without_password] + users = [ + auth.ImportUserRecord( + uid='some-uid', + display_name='John Doe', + email='johndoe@gmail.com', + photo_url='http://www.example.com/12345678/photo.png', + email_verified=True, + phone_number='+11234567890', + custom_claims={'admin': True}, # set this user as admin + provider_data=[ # user with Google provider + auth.UserProvider( + uid='google-uid', + email='johndoe@gmail.com', + display_name='John Doe', + photo_url='http://www.example.com/12345678/photo.png', + provider_id='google.com' + ) + ], + ), + ] + try: + result = auth.import_users(users) + for err in result.errors: + print('Failed to import user:', err.reason) + except exceptions.FirebaseError as error: + print('Error importing users:', error) + # [END import_without_password] + +def init_action_code_settings(): + # [START init_action_code_settings] + action_code_settings = auth.ActionCodeSettings( + url='https://www.example.com/checkout?cartId=1234', + handle_code_in_app=True, + ios_bundle_id='com.example.ios', + android_package_name='com.example.android', + android_install_app=True, + android_minimum_version='12', + dynamic_link_domain='coolapp.page.link', + ) + # [END init_action_code_settings] + return action_code_settings + +def password_reset_link(): + action_code_settings = init_action_code_settings() + # [START password_reset_link] + email = 'user@example.com' + link = auth.generate_password_reset_link(email, action_code_settings) + # Construct password reset email from a template embedding the link, and send + # using a custom SMTP server. + send_custom_email(email, link) + # [END password_reset_link] + +def email_verification_link(): + action_code_settings = init_action_code_settings() + # [START email_verification_link] + email = 'user@example.com' + link = auth.generate_email_verification_link(email, action_code_settings) + # Construct email from a template embedding the link, and send + # using a custom SMTP server. + send_custom_email(email, link) + # [END email_verification_link] + +def sign_in_with_email_link(): + action_code_settings = init_action_code_settings() + # [START sign_in_with_email_link] + email = 'user@example.com' + link = auth.generate_sign_in_with_email_link(email, action_code_settings) + # Construct email from a template embedding the link, and send + # using a custom SMTP server. + send_custom_email(email, link) + # [END sign_in_with_email_link] + +def send_custom_email(email, link): + del email + del link + +def create_saml_provider_config(): + # [START create_saml_provider] + saml = auth.create_saml_provider_config( + display_name='SAML provider name', + enabled=True, + provider_id='saml.myProvider', + idp_entity_id='IDP_ENTITY_ID', + sso_url='https://example.com/saml/sso/1234/', + x509_certificates=[ + '-----BEGIN CERTIFICATE-----\nCERT1...\n-----END CERTIFICATE-----', + '-----BEGIN CERTIFICATE-----\nCERT2...\n-----END CERTIFICATE-----', + ], + rp_entity_id='P_ENTITY_ID', + callback_url='https://project-id.firebaseapp.com/__/auth/handler') + + print('Created new SAML provider:', saml.provider_id) + # [END create_saml_provider] + +def update_saml_provider_config(): + # [START update_saml_provider] + saml = auth.update_saml_provider_config( + 'saml.myProvider', + x509_certificates=[ + '-----BEGIN CERTIFICATE-----\nCERT2...\n-----END CERTIFICATE-----', + '-----BEGIN CERTIFICATE-----\nCERT3...\n-----END CERTIFICATE-----', + ]) + + print('Updated SAML provider:', saml.provider_id) + # [END update_saml_provider] + +def get_saml_provider_config(): + # [START get_saml_provider] + saml = auth.get_saml_provider_config('saml.myProvider') + print(saml.display_name, saml.enabled) + # [END get_saml_provider] + +def delete_saml_provider_config(): + # [START delete_saml_provider] + auth.delete_saml_provider_config('saml.myProvider') + # [END delete_saml_provider] + +def list_saml_provider_configs(): + # [START list_saml_providers] + for saml in auth.list_saml_provider_configs('nextPageToken').iterate_all(): + print(saml.provider_id) + # [END list_saml_providers] + +def create_oidc_provider_config(): + # [START create_oidc_provider] + oidc = auth.create_oidc_provider_config( + display_name='OIDC provider name', + enabled=True, + provider_id='oidc.myProvider', + client_id='CLIENT_ID2', + issuer='https://oidc.com/CLIENT_ID2') + + print('Created new OIDC provider:', oidc.provider_id) + # [END create_oidc_provider] + +def update_oidc_provider_config(): + # [START update_oidc_provider] + oidc = auth.update_oidc_provider_config( + 'oidc.myProvider', + client_id='CLIENT_ID', + issuer='https://oidc.com') + + print('Updated OIDC provider:', oidc.provider_id) + # [END update_oidc_provider] + +def get_oidc_provider_config(): + # [START get_oidc_provider] + oidc = auth.get_oidc_provider_config('oidc.myProvider') + + print(oidc.display_name, oidc.enabled) + # [END get_oidc_provider] + +def delete_oidc_provider_config(): + # [START delete_oidc_provider] + auth.delete_oidc_provider_config('oidc.myProvider') + # [END delete_oidc_provider] + +def list_oidc_provider_configs(): + # [START list_oidc_providers] + for oidc in auth.list_oidc_provider_configs('nextPageToken').iterate_all(): + print(oidc.provider_id) + # [END list_oidc_providers] + +def get_tenant_client(tenant_id): + # [START get_tenant_client] + from firebase_admin import tenant_mgt + + tenant_client = tenant_mgt.auth_for_tenant(tenant_id) + # [END get_tenant_client] + return tenant_client + +def get_tenant(tenant_id): + # [START get_tenant] + tenant = tenant_mgt.get_tenant(tenant_id) + + print('Retrieved tenant:', tenant.tenant_id) + # [END get_tenant] + +def create_tenant(): + # [START create_tenant] + tenant = tenant_mgt.create_tenant( + display_name='myTenant1', + enable_email_link_sign_in=True, + allow_password_sign_up=True) + + print('Created tenant:', tenant.tenant_id) + # [END create_tenant] + +def update_tenant(tenant_id): + # [START update_tenant] + tenant = tenant_mgt.update_tenant( + tenant_id, + display_name='updatedName', + allow_password_sign_up=False) # Disable email provider + + print('Updated tenant:', tenant.tenant_id) + # [END update_tenant] + +def delete_tenant(tenant_id): + # [START delete_tenant] + tenant_mgt.delete_tenant(tenant_id) + # [END delete_tenant] + +def list_tenants(): + # [START list_tenants] + for tenant in tenant_mgt.list_tenants().iterate_all(): + print('Retrieved tenant:', tenant.tenant_id) + # [END list_tenants] + +def create_provider_tenant(): + # [START get_tenant_client_short] + tenant_client = tenant_mgt.auth_for_tenant('TENANT-ID') + # [END get_tenant_client_short] + + # [START create_saml_provider_tenant] + saml = tenant_client.create_saml_provider_config( + display_name='SAML provider name', + enabled=True, + provider_id='saml.myProvider', + idp_entity_id='IDP_ENTITY_ID', + sso_url='https://example.com/saml/sso/1234/', + x509_certificates=[ + '-----BEGIN CERTIFICATE-----\nCERT1...\n-----END CERTIFICATE-----', + '-----BEGIN CERTIFICATE-----\nCERT2...\n-----END CERTIFICATE-----', + ], + rp_entity_id='P_ENTITY_ID', + callback_url='https://project-id.firebaseapp.com/__/auth/handler') + + print('Created new SAML provider:', saml.provider_id) + # [END create_saml_provider_tenant] + +def update_provider_tenant(tenant_client): + # [START update_saml_provider_tenant] + saml = tenant_client.update_saml_provider_config( + 'saml.myProvider', + x509_certificates=[ + '-----BEGIN CERTIFICATE-----\nCERT2...\n-----END CERTIFICATE-----', + '-----BEGIN CERTIFICATE-----\nCERT3...\n-----END CERTIFICATE-----', + ]) + + print('Updated SAML provider:', saml.provider_id) + # [END update_saml_provider_tenant] + +def get_provider_tenant(tennat_client): + # [START get_saml_provider_tenant] + saml = tennat_client.get_saml_provider_config('saml.myProvider') + print(saml.display_name, saml.enabled) + # [END get_saml_provider_tenant] + +def list_provider_configs_tenant(tenant_client): + # [START list_saml_providers_tenant] + for saml in tenant_client.list_saml_provider_configs('nextPageToken').iterate_all(): + print(saml.provider_id) + # [END list_saml_providers_tenant] + +def delete_provider_config_tenant(tenant_client): + # [START delete_saml_provider_tenant] + tenant_client.delete_saml_provider_config('saml.myProvider') + # [END delete_saml_provider_tenant] + +def get_user_tenant(tenant_client): + uid = 'some_string_uid' + + # [START get_user_tenant] + # Get an auth.Client from tenant_mgt.auth_for_tenant() + user = tenant_client.get_user(uid) + print('Successfully fetched user data:', user.uid) + # [END get_user_tenant] + +def get_user_by_email_tenant(tenant_client): + email = 'some@email.com' + # [START get_user_by_email_tenant] + user = tenant_client.get_user_by_email(email) + print('Successfully fetched user data:', user.uid) + # [END get_user_by_email_tenant] + +def create_user_tenant(tenant_client): + # [START create_user_tenant] + user = tenant_client.create_user( + email='user@example.com', + email_verified=False, + phone_number='+15555550100', + password='secretPassword', + display_name='John Doe', + photo_url='http://www.example.com/12345678/photo.png', + disabled=False) + print('Sucessfully created new user:', user.uid) + # [END create_user_tenant] + +def update_user_tenant(tenant_client, uid): + # [START update_user_tenant] + user = tenant_client.update_user( + uid, + email='user@example.com', + phone_number='+15555550100', + email_verified=True, + password='newPassword', + display_name='John Doe', + photo_url='http://www.example.com/12345678/photo.png', + disabled=True) + print('Sucessfully updated user:', user.uid) + # [END update_user_tenant] + +def delete_user_tenant(tenant_client, uid): + # [START delete_user_tenant] + tenant_client.delete_user(uid) + print('Successfully deleted user') + # [END delete_user_tenant] + +def list_users_tenant(tenant_client): + # [START list_all_users_tenant] + # Note, behind the scenes, the iterator will retrive 1000 users at a time through the API + for user in tenant_client.list_users().iterate_all(): + print('User: ' + user.uid) + + # Iterating by pages of 1000 users at a time. + page = tenant_client.list_users() + while page: + for user in page.users: + print('User: ' + user.uid) + # Get next batch of users. + page = page.get_next_page() + # [END list_all_users_tenant] + +def import_with_hmac_tenant(tenant_client): + # [START import_with_hmac_tenant] + users = [ + auth.ImportUserRecord( + uid='uid1', + email='user1@example.com', + password_hash=b'password_hash_1', + password_salt=b'salt1' + ), + auth.ImportUserRecord( + uid='uid2', + email='user2@example.com', + password_hash=b'password_hash_2', + password_salt=b'salt2' + ), + ] + + hash_alg = auth.UserImportHash.hmac_sha256(key=b'secret') + try: + result = tenant_client.import_users(users, hash_alg=hash_alg) + for err in result.errors: + print('Failed to import user:', err.reason) + except exceptions.FirebaseError as error: + print('Error importing users:', error) + # [END import_with_hmac_tenant] + +def import_without_password_tenant(tenant_client): + # [START import_without_password_tenant] + users = [ + auth.ImportUserRecord( + uid='some-uid', + display_name='John Doe', + email='johndoe@gmail.com', + photo_url='http://www.example.com/12345678/photo.png', + email_verified=True, + phone_number='+11234567890', + custom_claims={'admin': True}, # set this user as admin + provider_data=[ # user with SAML provider + auth.UserProvider( + uid='saml-uid', + email='johndoe@gmail.com', + display_name='John Doe', + photo_url='http://www.example.com/12345678/photo.png', + provider_id='saml.acme' + ) + ], + ), + ] + try: + result = tenant_client.import_users(users) + for err in result.errors: + print('Failed to import user:', err.reason) + except exceptions.FirebaseError as error: + print('Error importing users:', error) + # [END import_without_password_tenant] + +def verify_id_token_tenant(tenant_client, id_token): + # [START verify_id_token_tenant] + # id_token comes from the client app + try: + decoded_token = tenant_client.verify_id_token(id_token) + + # This should be set to TENANT-ID. Otherwise TenantIdMismatchError error raised. + print('Verified ID token from tenant:', decoded_token['firebase']['tenant']) + except tenant_mgt.TenantIdMismatchError: + # Token revoked, inform the user to reauthenticate or signOut(). + pass + # [END verify_id_token_tenant] + +def verify_id_token_access_control_tenant(id_token): + # [START id_token_access_control_tenant] + decoded_token = auth.verify_id_token(id_token) + + tenant = decoded_token['firebase']['tenant'] + if tenant == 'TENANT-ID1': + # Allow appropriate level of access for TENANT-ID1. + pass + elif tenant == 'TENANT-ID2': + # Allow appropriate level of access for TENANT-ID2. + pass + else: + # Access not allowed -- Handle error + pass + # [END id_token_access_control_tenant] + +def revoke_refresh_tokens_tenant(tenant_client, uid): + # [START revoke_tokens_tenant] + # Revoke all refresh tokens for a specified user in a specified tenant for whatever reason. + # Retrieve the timestamp of the revocation, in seconds since the epoch. + tenant_client.revoke_refresh_tokens(uid) + + user = tenant_client.get_user(uid) + # Convert to seconds as the auth_time in the token claims is in seconds. + revocation_second = user.tokens_valid_after_timestamp / 1000 + print(f'Tokens revoked at: {revocation_second}') + # [END revoke_tokens_tenant] + +def verify_id_token_and_check_revoked_tenant(tenant_client, id_token): + # [START verify_id_token_and_check_revoked_tenant] + # Verify the ID token for a specific tenant while checking if the token is revoked. + try: + # Verify the ID token while checking if the token is revoked by + # passing check_revoked=True. + decoded_token = tenant_client.verify_id_token(id_token, check_revoked=True) + # Token is valid and not revoked. + uid = decoded_token['uid'] + except tenant_mgt.TenantIdMismatchError: + # Token belongs to a different tenant. + pass + except auth.RevokedIdTokenError: + # Token revoked, inform the user to reauthenticate or signOut(). + pass + except auth.UserDisabledError: + # Token belongs to a disabled user record. + pass + except auth.InvalidIdTokenError: + # Token is invalid + pass + # [END verify_id_token_and_check_revoked_tenant] + +def custom_claims_set_tenant(tenant_client, uid): + # [START set_custom_user_claims_tenant] + # Set admin privilege on the user corresponding to uid. + tenant_client.set_custom_user_claims(uid, {'admin': True}) + # The new custom claims will propagate to the user's ID token the + # next time a new one is issued. + # [END set_custom_user_claims_tenant] + +def custom_claims_verify_tenant(tenant_client, id_token): + # [START verify_custom_claims_tenant] + # Verify the ID token first. + claims = tenant_client.verify_id_token(id_token) + if claims['admin'] is True: + # Allow access to requested admin resource. + pass + # [END verify_custom_claims_tenant] + +def custom_claims_read_tenant(tenant_client, uid): + # [START read_custom_user_claims_tenant] + # Lookup the user associated with the specified uid. + user = tenant_client.get_user(uid) + + # The claims can be accessed on the user record. + print(user.custom_claims.get('admin')) + # [END read_custom_user_claims_tenant] + +def generate_email_verification_link_tenant(tenant_client): + # [START email_verification_link_tenant] + action_code_settings = auth.ActionCodeSettings( + url='https://www.example.com/checkout?cartId=1234', + handle_code_in_app=True, + ios_bundle_id='com.example.ios', + android_package_name='com.example.android', + android_install_app=True, + android_minimum_version='12', + # FDL custom domain. + dynamic_link_domain='coolapp.page.link', + ) + + email = 'user@example.com' + link = tenant_client.generate_email_verification_link(email, action_code_settings) + # Construct email from a template embedding the link, and send + # using a custom SMTP server. + send_custom_email(email, link) + # [END email_verification_link_tenant] + + +initialize_sdk_with_service_account() +initialize_sdk_with_application_default() +#initialize_sdk_with_refresh_token() +access_services_default() +access_services_nondefault() +create_token_uid() +token_with_claims = create_token_with_claims() +#verify_token_uid() + +uid = create_user() +create_user_with_id() +get_user(uid) +get_user_by_email() +get_user_by_phone_number() +update_user(uid) +set_custom_user_claims(uid) +list_all_users() +delete_user(uid) diff --git a/snippets/database/__init__.py b/snippets/database/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/snippets/database/index.py b/snippets/database/index.py new file mode 100644 index 000000000..99bb4981e --- /dev/null +++ b/snippets/database/index.py @@ -0,0 +1,360 @@ +# Copyright 2018 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import firebase_admin +from firebase_admin import credentials +from firebase_admin import db + +def authenticate_with_admin_privileges(): + # [START authenticate_with_admin_privileges] + import firebase_admin + from firebase_admin import credentials + from firebase_admin import db + + # Fetch the service account key JSON file contents + cred = credentials.Certificate('path/to/serviceAccountKey.json') + + # Initialize the app with a service account, granting admin privileges + firebase_admin.initialize_app(cred, { + 'databaseURL': 'https://databaseName.firebaseio.com' + }) + + # As an admin, the app has access to read and write all data, regradless of Security Rules + ref = db.reference('restricted_access/secret_document') + print(ref.get()) + # [END authenticate_with_admin_privileges] + firebase_admin.delete_app(firebase_admin.get_app()) + +def authenticate_with_limited_privileges(): + # [START authenticate_with_limited_privileges] + import firebase_admin + from firebase_admin import credentials + from firebase_admin import db + + # Fetch the service account key JSON file contents + cred = credentials.Certificate('path/to/serviceAccountKey.json') + + # Initialize the app with a custom auth variable, limiting the server's access + firebase_admin.initialize_app(cred, { + 'databaseURL': 'https://databaseName.firebaseio.com', + 'databaseAuthVariableOverride': { + 'uid': 'my-service-worker' + } + }) + + # The app only has access as defined in the Security Rules + ref = db.reference('/some_resource') + print(ref.get()) + # [END authenticate_with_limited_privileges] + firebase_admin.delete_app(firebase_admin.get_app()) + +def authenticate_with_guest_privileges(): + # [START authenticate_with_guest_privileges] + import firebase_admin + from firebase_admin import credentials + from firebase_admin import db + + # Fetch the service account key JSON file contents + cred = credentials.Certificate('path/to/serviceAccountKey.json') + + # Initialize the app with a None auth variable, limiting the server's access + firebase_admin.initialize_app(cred, { + 'databaseURL': 'https://databaseName.firebaseio.com', + 'databaseAuthVariableOverride': None + }) + + # The app only has access to public data as defined in the Security Rules + ref = db.reference('/public_resource') + print(ref.get()) + # [END authenticate_with_guest_privileges] + firebase_admin.delete_app(firebase_admin.get_app()) + +def get_reference(): + # [START get_reference] + # Import database module. + from firebase_admin import db + + # Get a database reference to our blog. + ref = db.reference('server/saving-data/fireblog') + # [END get_reference] + print(ref.key) + +def set_value(): + ref = db.reference('server/saving-data/fireblog') + + # [START set_value] + users_ref = ref.child('users') + users_ref.set({ + 'alanisawesome': { + 'date_of_birth': 'June 23, 1912', + 'full_name': 'Alan Turing' + }, + 'gracehop': { + 'date_of_birth': 'December 9, 1906', + 'full_name': 'Grace Hopper' + } + }) + # [END set_value] + +def set_child_value(): + ref = db.reference('server/saving-data/fireblog') + users_ref = ref.child('users') + + # [START set_child_value] + users_ref.child('alanisawesome').set({ + 'date_of_birth': 'June 23, 1912', + 'full_name': 'Alan Turing' + }) + users_ref.child('gracehop').set({ + 'date_of_birth': 'December 9, 1906', + 'full_name': 'Grace Hopper' + }) + # [END set_child_value] + +def update_child(): + ref = db.reference('server/saving-data/fireblog') + users_ref = ref.child('users') + + # [START update_child] + hopper_ref = users_ref.child('gracehop') + hopper_ref.update({ + 'nickname': 'Amazing Grace' + }) + # [END update_child] + +def update_children(): + ref = db.reference('server/saving-data/fireblog') + users_ref = ref.child('users') + + # [START update_children] + users_ref.update({ + 'alanisawesome/nickname': 'Alan The Machine', + 'gracehop/nickname': 'Amazing Grace' + }) + # [END update_children] + +def overwrite_value(): + ref = db.reference('server/saving-data/fireblog') + users_ref = ref.child('users') + + # [START overwrite_value] + users_ref.update({ + 'alanisawesome': { + 'nickname': 'Alan The Machine' + }, + 'gracehop': { + 'nickname': 'Amazing Grace' + } + }) + # [END overwrite_value] + +def push_value(): + ref = db.reference('server/saving-data/fireblog') + + # [START push_value] + posts_ref = ref.child('posts') + + new_post_ref = posts_ref.push() + new_post_ref.set({ + 'author': 'gracehop', + 'title': 'Announcing COBOL, a New Programming Language' + }) + + # We can also chain the two calls together + posts_ref.push().set({ + 'author': 'alanisawesome', + 'title': 'The Turing Machine' + }) + # [END push_value] + +def push_and_set_value(): + ref = db.reference('server/saving-data/fireblog') + posts_ref = ref.child('posts') + + # [START push_and_set_value] + # This is equivalent to the calls to push().set(...) above + posts_ref.push({ + 'author': 'gracehop', + 'title': 'Announcing COBOL, a New Programming Language' + }) + # [END push_and_set_value] + +def get_push_key(): + ref = db.reference('server/saving-data/fireblog') + posts_ref = ref.child('posts') + + # [START push_key] + # Generate a reference to a new location and add some data using push() + new_post_ref = posts_ref.push() + + # Get the unique key generated by push() + post_id = new_post_ref.key + # [END push_key] + print(post_id) + +def run_transaction(): + # [START transaction] + def increment_votes(current_value): + return current_value + 1 if current_value else 1 + + upvotes_ref = db.reference('server/saving-data/fireblog/posts/-JRHTHaIs-jNPLXOQivY/upvotes') + try: + new_vote_count = upvotes_ref.transaction(increment_votes) + print('Transaction completed') + except db.TransactionAbortedError: + print('Transaction failed to commit') + # [END transaction] + +def read_value(): + # [START read_value] + # Import database module. + from firebase_admin import db + + # Get a database reference to our posts + ref = db.reference('server/saving-data/fireblog/posts') + + # Read the data at the posts reference (this is a blocking operation) + print(ref.get()) + # [END read_value] + +def order_by_child(): + # [START order_by_child] + ref = db.reference('dinosaurs') + snapshot = ref.order_by_child('height').get() + for key, val in snapshot.items(): + print(f'{key} was {val} meters tall') + # [END order_by_child] + +def order_by_nested_child(): + # [START order_by_nested_child] + ref = db.reference('dinosaurs') + snapshot = ref.order_by_child('dimensions/height').get() + for key, val in snapshot.items(): + print(f'{key} was {val} meters tall') + # [END order_by_nested_child] + +def order_by_key(): + # [START order_by_key] + ref = db.reference('dinosaurs') + snapshot = ref.order_by_key().get() + print(snapshot) + # [END order_by_key] + +def order_by_value(): + # [START order_by_value] + ref = db.reference('scores') + snapshot = ref.order_by_value().get() + for key, val in snapshot.items(): + print(f'The {key} dinosaur\'s score is {val}') + # [END order_by_value] + +def limit_query(): + # [START limit_query_1] + ref = db.reference('dinosaurs') + snapshot = ref.order_by_child('weight').limit_to_last(2).get() + for key in snapshot: + print(key) + # [END limit_query_1] + + # [START limit_query_2] + ref = db.reference('dinosaurs') + snapshot = ref.order_by_child('height').limit_to_first(2).get() + for key in snapshot: + print(key) + # [END limit_query_2] + + # [START limit_query_3] + scores_ref = db.reference('scores') + snapshot = scores_ref.order_by_value().limit_to_last(3).get() + for key, val in snapshot.items(): + print(f'The {key} dinosaur\'s score is {val}') + # [END limit_query_3] + +def range_query(): + # [START range_query_1] + ref = db.reference('dinosaurs') + snapshot = ref.order_by_child('height').start_at(3).get() + for key in snapshot: + print(key) + # [END range_query_1] + + # [START range_query_2] + ref = db.reference('dinosaurs') + snapshot = ref.order_by_key().end_at('pterodactyl').get() + for key in snapshot: + print(key) + # [END range_query_2] + + # [START range_query_3] + ref = db.reference('dinosaurs') + snapshot = ref.order_by_key().start_at('b').end_at('b\uf8ff').get() + for key in snapshot: + print(key) + # [END range_query_3] + + # [START range_query_4] + ref = db.reference('dinosaurs') + snapshot = ref.order_by_child('height').equal_to(25).get() + for key in snapshot: + print(key) + # [END range_query_4] + +def complex_query(): + # [START complex_query] + ref = db.reference('dinosaurs') + favotire_dino_height = ref.child('stegosaurus').child('height').get() + query = ref.order_by_child('height').end_at(favotire_dino_height).limit_to_last(2) + snapshot = query.get() + if len(snapshot) == 2: + # Data is ordered by increasing height, so we want the first entry. + # Second entry is stegosarus. + for key in snapshot: + print(f'The dinosaur just shorter than the stegosaurus is {key}') + return + else: + print('The stegosaurus is the shortest dino') + # [END complex_query] + + +service_account = 'path/to/serviceAccount.json' +database_url = 'https://databaseName.firebaseio.com' + +cred = credentials.Certificate(service_account) +firebase_admin.initialize_app(cred, { + 'databaseURL': database_url +}) + +get_reference() +set_value() +set_child_value() +update_child() +update_children() +overwrite_value() +push_value() +push_and_set_value() +get_push_key() +run_transaction() + +read_value() +order_by_child() +#order_by_nested_child() +order_by_key() +order_by_value() +limit_query() +range_query() +complex_query() + +firebase_admin.delete_app(firebase_admin.get_app()) diff --git a/snippets/firestore/__init__.py b/snippets/firestore/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/snippets/firestore/firestore.py b/snippets/firestore/firestore.py new file mode 100644 index 000000000..18040b742 --- /dev/null +++ b/snippets/firestore/firestore.py @@ -0,0 +1,84 @@ +# Copyright 2022 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from firebase_admin import firestore + +# pylint: disable=invalid-name +def init_firestore_client(): + # [START init_firestore_client] + import firebase_admin + from firebase_admin import firestore + + # Application Default credentials are automatically created. + app = firebase_admin.initialize_app() + db = firestore.client() + # [END init_firestore_client] + +def init_firestore_client_application_default(): + # [START init_firestore_client_application_default] + import firebase_admin + from firebase_admin import credentials + from firebase_admin import firestore + + # Use the application default credentials. + cred = credentials.ApplicationDefault() + + firebase_admin.initialize_app(cred) + db = firestore.client() + # [END init_firestore_client_application_default] + +def init_firestore_client_service_account(): + # [START init_firestore_client_service_account] + import firebase_admin + from firebase_admin import credentials + from firebase_admin import firestore + + # Use a service account. + cred = credentials.Certificate('path/to/serviceAccount.json') + + app = firebase_admin.initialize_app(cred) + + db = firestore.client() + # [END init_firestore_client_service_account] + +def read_data(): + import firebase_admin + from firebase_admin import firestore + + app = firebase_admin.initialize_app() + db = firestore.client() + + # [START read_data] + doc_ref = db.collection('users').document('alovelace') + doc = doc_ref.get() + if doc.exists: + return f'data: {doc.to_dict()}' + return "Document does not exist." + # [END read_data] + +def add_data(): + import firebase_admin + from firebase_admin import firestore + + app = firebase_admin.initialize_app() + db = firestore.client() + + # [START add_data] + doc_ref = db.collection("users").document("alovelace") + doc_ref.set({ + "first": "Ada", + "last": "Lovelace", + "born": 1815 + }) + # [END add_data] diff --git a/snippets/firestore/firestore_async.py b/snippets/firestore/firestore_async.py new file mode 100644 index 000000000..cf815504e --- /dev/null +++ b/snippets/firestore/firestore_async.py @@ -0,0 +1,132 @@ +# Copyright 2022 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import asyncio + +from firebase_admin import firestore_async + +# pylint: disable=invalid-name +def init_firestore_async_client(): + # [START init_firestore_async_client] + import firebase_admin + from firebase_admin import firestore_async + + # Application Default credentials are automatically created. + app = firebase_admin.initialize_app() + db = firestore_async.client() + # [END init_firestore_async_client] + +def init_firestore_async_client_application_default(): + # [START init_firestore_async_client_application_default] + import firebase_admin + from firebase_admin import credentials + from firebase_admin import firestore_async + + # Use the application default credentials. + cred = credentials.ApplicationDefault() + + firebase_admin.initialize_app(cred) + db = firestore_async.client() + # [END init_firestore_async_client_application_default] + +def init_firestore_async_client_service_account(): + # [START init_firestore_async_client_service_account] + import firebase_admin + from firebase_admin import credentials + from firebase_admin import firestore_async + + # Use a service account. + cred = credentials.Certificate('path/to/serviceAccount.json') + + app = firebase_admin.initialize_app(cred) + + db = firestore_async.client() + # [END init_firestore_async_client_service_account] + +def close_async_sessions(): + import firebase_admin + from firebase_admin import firestore_async + + # [START close_async_sessions] + app = firebase_admin.initialize_app() + db = firestore_async.client() + + # Perform firestore tasks... + + # Delete app to ensure that all the async sessions are closed gracefully. + firebase_admin.delete_app(app) + # [END close_async_sessions] + +async def read_data(): + import firebase_admin + from firebase_admin import firestore_async + + app = firebase_admin.initialize_app() + db = firestore_async.client() + + # [START read_data] + doc_ref = db.collection('users').document('alovelace') + doc = await doc_ref.get() + if doc.exists: + return f'data: {doc.to_dict()}' + # [END read_data] + +async def add_data(): + import firebase_admin + from firebase_admin import firestore_async + + app = firebase_admin.initialize_app() + db = firestore_async.client() + + # [START add_data] + doc_ref = db.collection("users").document("alovelace") + await doc_ref.set({ + "first": "Ada", + "last": "Lovelace", + "born": 1815 + }) + # [END add_data] + +def firestore_async_client_with_asyncio_eventloop(): + # [START firestore_async_client_with_asyncio_eventloop] + import asyncio + import firebase_admin + from firebase_admin import firestore_async + + app = firebase_admin.initialize_app() + db = firestore_async.client() + + # Create coroutine to add user data. + async def add_data(): + doc_ref = db.collection("users").document("alovelace") + print("Start adding user...") + await doc_ref.set({ + "first": "Ada", + "last": "Lovelace", + "born": 1815 + }) + print("Done adding user!") + + # Another corutine with secondary tasks we want to complete. + async def while_waiting(): + print("Start other tasks...") + await asyncio.sleep(2) + print("Finished with other tasks!") + + # Initialize an eventloop to execute tasks until completion. + loop = asyncio.get_event_loop() + tasks = [add_data(), while_waiting()] + loop.run_until_complete(asyncio.gather(*tasks)) + firebase_admin.delete_app(app) + # [END firestore_async_client_with_asyncio_eventloop] diff --git a/snippets/messaging/__init__.py b/snippets/messaging/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/snippets/messaging/cloud_messaging.py b/snippets/messaging/cloud_messaging.py new file mode 100644 index 000000000..6fb525231 --- /dev/null +++ b/snippets/messaging/cloud_messaging.py @@ -0,0 +1,269 @@ +# Copyright 2018 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import datetime + +from firebase_admin import messaging + + +def send_to_token(): + # [START send_to_token] + # This registration token comes from the client FCM SDKs. + registration_token = 'YOUR_REGISTRATION_TOKEN' + + # See documentation on defining a message payload. + message = messaging.Message( + data={ + 'score': '850', + 'time': '2:45', + }, + token=registration_token, + ) + + # Send a message to the device corresponding to the provided + # registration token. + response = messaging.send(message) + # Response is a message ID string. + print('Successfully sent message:', response) + # [END send_to_token] + + +def send_to_topic(): + # [START send_to_topic] + # The topic name can be optionally prefixed with "/topics/". + topic = 'highScores' + + # See documentation on defining a message payload. + message = messaging.Message( + data={ + 'score': '850', + 'time': '2:45', + }, + topic=topic, + ) + + # Send a message to the devices subscribed to the provided topic. + response = messaging.send(message) + # Response is a message ID string. + print('Successfully sent message:', response) + # [END send_to_topic] + + +def send_to_condition(): + # [START send_to_condition] + # Define a condition which will send to devices which are subscribed + # to either the Google stock or the tech industry topics. + condition = "'stock-GOOG' in topics || 'industry-tech' in topics" + + # See documentation on defining a message payload. + message = messaging.Message( + notification=messaging.Notification( + title='$GOOG up 1.43% on the day', + body='$GOOG gained 11.80 points to close at 835.67, up 1.43% on the day.', + ), + condition=condition, + ) + + # Send a message to devices subscribed to the combination of topics + # specified by the provided condition. + response = messaging.send(message) + # Response is a message ID string. + print('Successfully sent message:', response) + # [END send_to_condition] + + +def send_dry_run(): + message = messaging.Message( + data={ + 'score': '850', + 'time': '2:45', + }, + token='token', + ) + + # [START send_dry_run] + # Send a message in the dry run mode. + response = messaging.send(message, dry_run=True) + # Response is a message ID string. + print('Dry run successful:', response) + # [END send_dry_run] + + +def android_message(): + # [START android_message] + message = messaging.Message( + android=messaging.AndroidConfig( + ttl=datetime.timedelta(seconds=3600), + priority='normal', + notification=messaging.AndroidNotification( + title='$GOOG up 1.43% on the day', + body='$GOOG gained 11.80 points to close at 835.67, up 1.43% on the day.', + icon='stock_ticker_update', + color='#f45342' + ), + ), + topic='industry-tech', + ) + # [END android_message] + return message + + +def apns_message(): + # [START apns_message] + message = messaging.Message( + apns=messaging.APNSConfig( + headers={'apns-priority': '10'}, + payload=messaging.APNSPayload( + aps=messaging.Aps( + alert=messaging.ApsAlert( + title='$GOOG up 1.43% on the day', + body='$GOOG gained 11.80 points to close at 835.67, up 1.43% on the day.', + ), + badge=42, + ), + ), + ), + topic='industry-tech', + ) + # [END apns_message] + return message + + +def webpush_message(): + # [START webpush_message] + message = messaging.Message( + webpush=messaging.WebpushConfig( + notification=messaging.WebpushNotification( + title='$GOOG up 1.43% on the day', + body='$GOOG gained 11.80 points to close at 835.67, up 1.43% on the day.', + icon='https://my-server/icon.png', + ), + ), + topic='industry-tech', + ) + # [END webpush_message] + return message + + +def all_platforms_message(): + # [START multi_platforms_message] + message = messaging.Message( + notification=messaging.Notification( + title='$GOOG up 1.43% on the day', + body='$GOOG gained 11.80 points to close at 835.67, up 1.43% on the day.', + ), + android=messaging.AndroidConfig( + ttl=datetime.timedelta(seconds=3600), + priority='normal', + notification=messaging.AndroidNotification( + icon='stock_ticker_update', + color='#f45342' + ), + ), + apns=messaging.APNSConfig( + payload=messaging.APNSPayload( + aps=messaging.Aps(badge=42), + ), + ), + topic='industry-tech', + ) + # [END multi_platforms_message] + return message + + +def subscribe_to_topic(): + topic = 'highScores' + # [START subscribe] + # These registration tokens come from the client FCM SDKs. + registration_tokens = [ + 'YOUR_REGISTRATION_TOKEN_1', + # ... + 'YOUR_REGISTRATION_TOKEN_n', + ] + + # Subscribe the devices corresponding to the registration tokens to the + # topic. + response = messaging.subscribe_to_topic(registration_tokens, topic) + # See the TopicManagementResponse reference documentation + # for the contents of response. + print(response.success_count, 'tokens were subscribed successfully') + # [END subscribe] + + +def unsubscribe_from_topic(): + topic = 'highScores' + # [START unsubscribe] + # These registration tokens come from the client FCM SDKs. + registration_tokens = [ + 'YOUR_REGISTRATION_TOKEN_1', + # ... + 'YOUR_REGISTRATION_TOKEN_n', + ] + + # Unubscribe the devices corresponding to the registration tokens from the + # topic. + response = messaging.unsubscribe_from_topic(registration_tokens, topic) + # See the TopicManagementResponse reference documentation + # for the contents of response. + print(response.success_count, 'tokens were unsubscribed successfully') + # [END unsubscribe] + + +def send_each(): + registration_token = 'YOUR_REGISTRATION_TOKEN' + # [START send_each] + # Create a list containing up to 500 messages. + messages = [ + messaging.Message( + notification=messaging.Notification('Price drop', '5% off all electronics'), + token=registration_token, + ), + # ... + messaging.Message( + notification=messaging.Notification('Price drop', '2% off all books'), + topic='readers-club', + ), + ] + + response = messaging.send_each(messages) + # See the BatchResponse reference documentation + # for the contents of response. + print(f'{response.success_count} messages were sent successfully') + # [END send_each] + +def send_each_for_multicast_and_handle_errors(): + # [START send_each_for_multicast_error] + # These registration tokens come from the client FCM SDKs. + registration_tokens = [ + 'YOUR_REGISTRATION_TOKEN_1', + # ... + 'YOUR_REGISTRATION_TOKEN_N', + ] + + message = messaging.MulticastMessage( + data={'score': '850', 'time': '2:45'}, + tokens=registration_tokens, + ) + response = messaging.send_each_for_multicast(message) + if response.failure_count > 0: + responses = response.responses + failed_tokens = [] + for idx, resp in enumerate(responses): + if not resp.success: + # The order of responses corresponds to the order of the registration tokens. + failed_tokens.append(registration_tokens[idx]) + print(f'List of tokens that caused failures: {failed_tokens}') + # [END send_each_for_multicast_error] diff --git a/tests/data/dinosaurs.json b/tests/data/dinosaurs.json new file mode 100644 index 000000000..9d7afaab9 --- /dev/null +++ b/tests/data/dinosaurs.json @@ -0,0 +1,78 @@ +{ + "dinosaurs": { + "bruhathkayosaurus": { + "appeared": -70000000, + "height": 25, + "length": 44, + "order": "saurischia", + "vanished": -70000000, + "weight": 135000, + "ratings": { + "pos": 1 + } + }, + "lambeosaurus": { + "appeared": -76000000, + "height": 2.1, + "length": 12.5, + "order": "ornithischia", + "vanished": -75000000, + "weight": 5000, + "ratings": { + "pos": 2 + } + }, + "linhenykus": { + "appeared": -85000000, + "height": 0.6, + "length": 1, + "order": "theropoda", + "vanished": -75000000, + "weight": 3, + "ratings": { + "pos": 3 + } + }, + "pterodactyl": { + "appeared": -150000000, + "height": 0.6, + "length": 0.8, + "order": "pterosauria", + "vanished": -148500000, + "weight": 2, + "ratings": { + "pos": 4 + } + }, + "stegosaurus": { + "appeared": -155000000, + "height": 4, + "length": 9, + "order": "ornithischia", + "vanished": -150000000, + "weight": 2500, + "ratings": { + "pos": 5 + } + }, + "triceratops": { + "appeared": -68000000, + "height": 3, + "length": 8, + "order": "ornithischia", + "vanished": -66000000, + "weight": 11000, + "ratings": { + "pos": 6 + } + } + }, + "scores": { + "bruhathkayosaurus": 55, + "lambeosaurus": 21, + "linhenykus": 80, + "pterodactyl": 93, + "stegosaurus": 5, + "triceratops": 22 + } +} diff --git a/tests/data/dinosaurs_index.json b/tests/data/dinosaurs_index.json new file mode 100644 index 000000000..cd71ce460 --- /dev/null +++ b/tests/data/dinosaurs_index.json @@ -0,0 +1,27 @@ +{ + "_adminsdk": { + "python": { + "dinodb": { + "dinosaurs": { + ".indexOn": ["height", "ratings/pos"] + }, + "scores": { + ".indexOn": ".value" + } + }, + "protected": { + "$uid": { + ".read": "auth != null", + ".write": "$uid === auth.uid" + } + }, + "admin": { + ".read": "false", + ".write": "false" + }, + "public": { + ".read": "true" + } + } + } +} diff --git a/tests/data/firebase_config.json b/tests/data/firebase_config.json new file mode 100644 index 000000000..5a120e3b6 --- /dev/null +++ b/tests/data/firebase_config.json @@ -0,0 +1,6 @@ +{ + "databaseAuthVariableOverride": {"some_key": "some_val"}, + "databaseURL": "https://hipster-chat.firebaseio.mock", + "projectId": "hipster-chat-mock", + "storageBucket": "hipster-chat.appspot.mock" +} diff --git a/tests/data/firebase_config_empty.json b/tests/data/firebase_config_empty.json new file mode 100644 index 000000000..e69de29bb diff --git a/tests/data/firebase_config_invalid.json b/tests/data/firebase_config_invalid.json new file mode 100644 index 000000000..74c098fca --- /dev/null +++ b/tests/data/firebase_config_invalid.json @@ -0,0 +1 @@ +baaaaad diff --git a/tests/data/firebase_config_invalid_key.json b/tests/data/firebase_config_invalid_key.json new file mode 100644 index 000000000..223c60044 --- /dev/null +++ b/tests/data/firebase_config_invalid_key.json @@ -0,0 +1,4 @@ +{ + "databaseUrrrrL": "https://hipster-chat.firebaseio.mock", + "projectId": "hipster-chat-mock" +} diff --git a/tests/data/firebase_config_partial.json b/tests/data/firebase_config_partial.json new file mode 100644 index 000000000..b02f92dde --- /dev/null +++ b/tests/data/firebase_config_partial.json @@ -0,0 +1,4 @@ +{ + "databaseURL": "https://hipster-chat.firebaseio.mock", + "projectId": "hipster-chat-mock" +} diff --git a/tests/data/get_user.json b/tests/data/get_user.json new file mode 100644 index 000000000..1f476daa4 --- /dev/null +++ b/tests/data/get_user.json @@ -0,0 +1,29 @@ +{ + "kind" : "identitytoolkit#GetAccountInfoResponse", + "users" : [ { + "localId" : "testuser", + "email" : "testuser@example.com", + "phoneNumber" : "+1234567890", + "emailVerified" : true, + "displayName" : "Test User", + "providerUserInfo" : [ { + "providerId" : "password", + "displayName" : "Test User", + "photoUrl" : "http://www.example.com/testuser/photo.png", + "federatedId" : "testuser@example.com", + "email" : "testuser@example.com", + "rawId" : "testuser@example.com" + }, { + "providerId" : "phone", + "phoneNumber" : "+1234567890", + "rawId" : "+1234567890" + } ], + "photoUrl" : "http://www.example.com/testuser/photo.png", + "passwordHash" : "passwordhash", + "passwordUpdatedAt" : 1.494364393E+12, + "validSince" : "1494364393", + "disabled" : false, + "createdAt" : "1234567890000", + "customAttributes" : "{\"admin\": true, \"package\": \"gold\"}" + } ] +} diff --git a/tests/data/invalid_model.tflite b/tests/data/invalid_model.tflite new file mode 100644 index 000000000..d8482f436 --- /dev/null +++ b/tests/data/invalid_model.tflite @@ -0,0 +1 @@ +This is not a tflite file. diff --git a/tests/data/list_oidc_provider_configs.json b/tests/data/list_oidc_provider_configs.json new file mode 100644 index 000000000..b2b381304 --- /dev/null +++ b/tests/data/list_oidc_provider_configs.json @@ -0,0 +1,18 @@ +{ + "oauthIdpConfigs": [ + { + "name":"projects/mock-project-id/oauthIdpConfigs/oidc.provider0", + "clientId": "CLIENT_ID", + "issuer": "https://oidc.com/issuer", + "displayName": "oidcProviderName", + "enabled": true + }, + { + "name":"projects/mock-project-id/oauthIdpConfigs/oidc.provider1", + "clientId": "CLIENT_ID", + "issuer": "https://oidc.com/issuer", + "displayName": "oidcProviderName", + "enabled": true + } + ] +} diff --git a/tests/data/list_saml_provider_configs.json b/tests/data/list_saml_provider_configs.json new file mode 100644 index 000000000..b568e1e09 --- /dev/null +++ b/tests/data/list_saml_provider_configs.json @@ -0,0 +1,40 @@ +{ + "inboundSamlConfigs": [ + { + "name": "projects/mock-project-id/inboundSamlConfigs/saml.provider0", + "idpConfig": { + "idpEntityId": "IDP_ENTITY_ID", + "ssoUrl": "https://example.com/login", + "signRequest": true, + "idpCertificates": [ + {"x509Certificate": "CERT1"}, + {"x509Certificate": "CERT2"} + ] + }, + "spConfig": { + "spEntityId": "RP_ENTITY_ID", + "callbackUri": "https://projectId.firebaseapp.com/__/auth/handler" + }, + "displayName": "samlProviderName", + "enabled": true + }, + { + "name": "projects/mock-project-id/inboundSamlConfigs/saml.provider1", + "idpConfig": { + "idpEntityId": "IDP_ENTITY_ID", + "ssoUrl": "https://example.com/login", + "signRequest": true, + "idpCertificates": [ + {"x509Certificate": "CERT1"}, + {"x509Certificate": "CERT2"} + ] + }, + "spConfig": { + "spEntityId": "RP_ENTITY_ID", + "callbackUri": "https://projectId.firebaseapp.com/__/auth/handler" + }, + "displayName": "samlProviderName", + "enabled": true + } + ] +} diff --git a/tests/data/list_users.json b/tests/data/list_users.json new file mode 100644 index 000000000..158a138f8 --- /dev/null +++ b/tests/data/list_users.json @@ -0,0 +1,55 @@ +{ + "users" : [ { + "localId" : "testuser0", + "email" : "testuser@example.com", + "phoneNumber" : "+1234567890", + "emailVerified" : true, + "displayName" : "Test User", + "providerUserInfo" : [ { + "providerId" : "password", + "displayName" : "Test User", + "photoUrl" : "http://www.example.com/testuser/photo.png", + "federatedId" : "testuser@example.com", + "email" : "testuser@example.com", + "rawId" : "testuser@example.com" + }, { + "providerId" : "phone", + "phoneNumber" : "+1234567890", + "rawId" : "+1234567890" + } ], + "photoUrl" : "http://www.example.com/testuser/photo.png", + "passwordHash" : "passwordHash", + "salt": "passwordSalt", + "passwordUpdatedAt" : 1.494364393E+12, + "validSince" : "1494364393", + "disabled" : false, + "createdAt" : "1234567890000", + "customAttributes" : "{\"admin\": true, \"package\": \"gold\"}" + }, { + "localId" : "testuser1", + "email" : "testuser@example.com", + "phoneNumber" : "+1234567890", + "emailVerified" : true, + "displayName" : "Test User", + "providerUserInfo" : [ { + "providerId" : "password", + "displayName" : "Test User", + "photoUrl" : "http://www.example.com/testuser/photo.png", + "federatedId" : "testuser@example.com", + "email" : "testuser@example.com", + "rawId" : "testuser@example.com" + }, { + "providerId" : "phone", + "phoneNumber" : "+1234567890", + "rawId" : "+1234567890" + } ], + "photoUrl" : "http://www.example.com/testuser/photo.png", + "passwordHash" : "passwordHash", + "salt": "passwordSalt", + "passwordUpdatedAt" : 1.494364393E+12, + "validSince" : "1494364393", + "disabled" : false, + "createdAt" : "1234567890000", + "customAttributes" : "{\"admin\": true, \"package\": \"gold\"}" + } ] +} diff --git a/tests/data/model1.tflite b/tests/data/model1.tflite new file mode 100644 index 000000000..c4b71b7a2 Binary files /dev/null and b/tests/data/model1.tflite differ diff --git a/tests/data/oidc_provider_config.json b/tests/data/oidc_provider_config.json new file mode 100644 index 000000000..89cf3eacf --- /dev/null +++ b/tests/data/oidc_provider_config.json @@ -0,0 +1,7 @@ +{ + "name":"projects/mock-project-id/oauthIdpConfigs/oidc.provider", + "clientId": "CLIENT_ID", + "issuer": "https://oidc.com/issuer", + "displayName": "oidcProviderName", + "enabled": true +} diff --git a/tests/data/saml_provider_config.json b/tests/data/saml_provider_config.json new file mode 100644 index 000000000..577340f2a --- /dev/null +++ b/tests/data/saml_provider_config.json @@ -0,0 +1,18 @@ +{ + "name": "projects/mock-project-id/inboundSamlConfigs/saml.provider", + "idpConfig": { + "idpEntityId": "IDP_ENTITY_ID", + "ssoUrl": "https://example.com/login", + "signRequest": true, + "idpCertificates": [ + {"x509Certificate": "CERT1"}, + {"x509Certificate": "CERT2"} + ] + }, + "spConfig": { + "spEntityId": "RP_ENTITY_ID", + "callbackUri": "https://projectId.firebaseapp.com/__/auth/handler" + }, + "displayName": "samlProviderName", + "enabled": true +} \ No newline at end of file diff --git a/tests/test_app.py b/tests/test_app.py index 6e977374b..0ff0854b4 100644 --- a/tests/test_app.py +++ b/tests/test_app.py @@ -13,19 +13,25 @@ # limitations under the License. """Tests for firebase_admin.App.""" +from collections import namedtuple import os import pytest +from google.auth.exceptions import DefaultCredentialsError import firebase_admin from firebase_admin import credentials +from firebase_admin import _utils from tests import testutils - CREDENTIAL = credentials.Certificate( testutils.resource_filename('service_account.json')) +CONFIG_JSON = firebase_admin._FIREBASE_CONFIG_ENV_VAR + +# This fixture will ignore the environment variable pointing to the default +# configuration for the duration of the tests. -class CredentialProvider(object): +class CredentialProvider: def init(self): pass @@ -68,6 +74,10 @@ def get(self): return None +class AppService: + def __init__(self, app): + self._app = app + @pytest.fixture(params=[Cert(), RefreshToken(), ExplicitAppDefault(), ImplicitAppDefault()], ids=['cert', 'refreshtoken', 'explicit-appdefault', 'implicit-appdefault']) def app_credential(request): @@ -80,18 +90,136 @@ def app_credential(request): def init_app(request): if request.param: return firebase_admin.initialize_app(CREDENTIAL, name=request.param) - else: - return firebase_admin.initialize_app(CREDENTIAL) + return firebase_admin.initialize_app(CREDENTIAL) + +@pytest.fixture(scope="function") +def env_test_case(request): + config_old = set_config_env(request.param.config_json) + yield request.param + revert_config_env(config_old) + + +EnvOptionsTestCase = namedtuple('EnvOptionsTestCase', + 'name, config_json, init_options, want_options') +env_options_test_cases = [ + EnvOptionsTestCase(name='Environment var not set, initialized with an empty options dict', + config_json=None, + init_options={}, + want_options={}), + EnvOptionsTestCase(name='Environment var empty, initialized with an empty options dict', + config_json='', + init_options={}, + want_options={}), + EnvOptionsTestCase(name='Environment var not set, initialized with no options dict', + config_json=None, + init_options=None, + want_options={}), + EnvOptionsTestCase(name='Environment empty, initialized with no options dict', + config_json='', + init_options=None, + want_options={}), + EnvOptionsTestCase(name='Environment var not set, initialized with options dict', + config_json=None, + init_options={'storageBucket': 'bucket1'}, + want_options={'storageBucket': 'bucket1'}), + EnvOptionsTestCase(name='Environment var set to file but ignored, initialized with options', + config_json='firebase_config.json', + init_options={'storageBucket': 'bucket1'}, + want_options={'storageBucket': 'bucket1'}), + EnvOptionsTestCase(name='Environment var set to json but ignored, initialized with options', + config_json='{"storageBucket": "hipster-chat.appspot.mock"}', + init_options={'storageBucket': 'bucket1'}, + want_options={'storageBucket': 'bucket1'}), + EnvOptionsTestCase(name='Environment var set to file, initialized with no options dict', + config_json='firebase_config.json', + init_options=None, + want_options={'databaseAuthVariableOverride': {'some_key': 'some_val'}, + 'databaseURL': 'https://hipster-chat.firebaseio.mock', + 'projectId': 'hipster-chat-mock', + 'storageBucket': 'hipster-chat.appspot.mock'}), + EnvOptionsTestCase(name='Environment var set to json string, initialized with no options dict', + config_json='{"databaseAuthVariableOverride": {"some_key": "some_val"}, ' + + '"databaseURL": "https://hipster-chat.firebaseio.mock", ' + + '"projectId": "hipster-chat-mock",' + + '"storageBucket": "hipster-chat.appspot.mock"}', + init_options=None, + want_options={'databaseAuthVariableOverride': {'some_key': 'some_val'}, + 'databaseURL': 'https://hipster-chat.firebaseio.mock', + 'projectId': 'hipster-chat-mock', + 'storageBucket': 'hipster-chat.appspot.mock'}), + EnvOptionsTestCase(name='Invalid key in json file is ignored, the rest of the values are used', + config_json='firebase_config_invalid_key.json', + init_options=None, + want_options={'projectId': 'hipster-chat-mock'}), + EnvOptionsTestCase(name='Invalid key in json file is ignored, the rest of the values are used', + config_json='{"databaseUrrrrL": "https://hipster-chat.firebaseio.mock",' + + '"projectId": "hipster-chat-mock"}', + init_options=None, + want_options={'projectId': 'hipster-chat-mock'}), + EnvOptionsTestCase(name='Environment var set to file but ignored, init empty options dict', + config_json='firebase_config.json', + init_options={}, + want_options={}), + EnvOptionsTestCase(name='Environment var set to string but ignored, init empty options dict', + config_json='{"projectId": "hipster-chat-mock"}', + init_options={}, + want_options={}), + EnvOptionsTestCase(name='Environment variable set to json file with some options set', + config_json='firebase_config_partial.json', + init_options=None, + want_options={'databaseURL': 'https://hipster-chat.firebaseio.mock', + 'projectId': 'hipster-chat-mock'}), + EnvOptionsTestCase(name='Environment variable set to json string with some options set', + config_json='{"databaseURL": "https://hipster-chat.firebaseio.mock",' + + '"projectId": "hipster-chat-mock"}', + init_options=None, + want_options={'databaseURL': 'https://hipster-chat.firebaseio.mock', + 'projectId': 'hipster-chat-mock'}), + EnvOptionsTestCase(name='Environment var set to json file but ignored, init with options dict', + config_json='firebase_config_partial.json', + init_options={'projectId': 'pid1-mock', + 'storageBucket': 'sb1-mock'}, + want_options={'projectId': 'pid1-mock', + 'storageBucket': 'sb1-mock'}), + EnvOptionsTestCase(name='Environment var set to file but ignored, init with full options dict', + config_json='firebase_config.json', + init_options={'databaseAuthVariableOverride': 'davy1-mock', + 'databaseURL': 'https://db1-mock', + 'projectId': 'pid1-mock', + 'storageBucket': 'sb1-.mock'}, + want_options={'databaseAuthVariableOverride': 'davy1-mock', + 'databaseURL': 'https://db1-mock', + 'projectId': 'pid1-mock', + 'storageBucket': 'sb1-.mock'})] + +def set_config_env(config_json): + config_old = os.environ.get(CONFIG_JSON) + if config_json is not None: + if not config_json or config_json.startswith('{'): + os.environ[CONFIG_JSON] = config_json + else: + os.environ[CONFIG_JSON] = testutils.resource_filename( + config_json) + elif os.environ.get(CONFIG_JSON) is not None: + del os.environ[CONFIG_JSON] + return config_old + + +def revert_config_env(config_old): + if config_old is not None: + os.environ[CONFIG_JSON] = config_old + elif os.environ.get(CONFIG_JSON) is not None: + del os.environ[CONFIG_JSON] -class TestFirebaseApp(object): +class TestFirebaseApp: """Test cases for App initialization and life cycle.""" - invalid_credentials = ['', 'foo', 0, 1, dict(), list(), tuple(), True, False] - invalid_options = ['', 0, 1, list(), tuple(), True, False] - invalid_names = [None, '', 0, 1, dict(), list(), tuple(), True, False] + invalid_credentials = ['', 'foo', 0, 1, {}, [], tuple(), True, False] + invalid_options = ['', 0, 1, [], tuple(), True, False] + invalid_names = [None, '', 0, 1, {}, [], tuple(), True, False] invalid_apps = [ - None, '', 0, 1, dict(), list(), tuple(), True, False, + None, '', 0, 1, {}, [], tuple(), True, False, firebase_admin.App('uninitialized', CREDENTIAL, {}) ] @@ -118,6 +246,16 @@ def test_non_default_app_init(self, app_credential): with pytest.raises(ValueError): firebase_admin.initialize_app(app_credential, name='myApp') + def test_app_init_with_google_auth_cred(self): + cred = testutils.MockGoogleCredential() + assert isinstance(cred, credentials.GoogleAuthCredentials) + app = firebase_admin.initialize_app(cred) + assert cred is app.credential.get_credential() + assert isinstance(app.credential, credentials.Base) + assert isinstance(app.credential, credentials._ExternalCredentials) + with pytest.raises(ValueError): + firebase_admin.initialize_app(app_credential) + @pytest.mark.parametrize('cred', invalid_credentials) def test_app_init_with_invalid_credential(self, cred): with pytest.raises(ValueError): @@ -133,6 +271,87 @@ def test_app_init_with_invalid_name(self, name): with pytest.raises(ValueError): firebase_admin.initialize_app(CREDENTIAL, name=name) + + @pytest.mark.parametrize('bad_file_name', ['firebase_config_empty.json', + 'firebase_config_invalid.json', + 'no_such_file']) + def test_app_init_with_invalid_config_file(self, bad_file_name): + config_old = set_config_env(bad_file_name) + with pytest.raises(ValueError): + firebase_admin.initialize_app(CREDENTIAL) + revert_config_env(config_old) + + def test_app_init_with_invalid_config_string(self): + config_old = set_config_env('{,,') + with pytest.raises(ValueError): + firebase_admin.initialize_app(CREDENTIAL) + revert_config_env(config_old) + + + @pytest.mark.parametrize('env_test_case', env_options_test_cases, + ids=[x.name for x in env_options_test_cases], + indirect=['env_test_case']) + def test_app_init_with_default_config(self, env_test_case): + app = firebase_admin.initialize_app(CREDENTIAL, options=env_test_case.init_options) + assert app.options._options == env_test_case.want_options + + def test_project_id_from_options(self, app_credential): + app = firebase_admin.initialize_app( + app_credential, options={'projectId': 'test-project'}, name='myApp') + assert app.project_id == 'test-project' + + def test_project_id_from_credentials(self): + app = firebase_admin.initialize_app(CREDENTIAL, name='myApp') + assert app.project_id == 'mock-project-id' + + def test_project_id_from_environment(self): + variables = ['GOOGLE_CLOUD_PROJECT', 'GCLOUD_PROJECT'] + for idx, var in enumerate(variables): + old_project_id = os.environ.get(var) + new_project_id = f'env-project-{idx}' + os.environ[var] = new_project_id + try: + app = firebase_admin.initialize_app( + testutils.MockCredential(), name=f'myApp{var}') + assert app.project_id == new_project_id + finally: + if old_project_id: + os.environ[var] = old_project_id + else: + del os.environ[var] + + def test_no_project_id(self): + def evaluate(): + app = firebase_admin.initialize_app(testutils.MockCredential(), name='myApp') + assert app.project_id is None + testutils.run_without_project_id(evaluate) + + def test_no_project_id_from_environment(self, app_credential): + default_env = 'GOOGLE_APPLICATION_CREDENTIALS' + gcloud_env = 'CLOUDSDK_CONFIG' + def evaluate(): + app = firebase_admin.initialize_app(app_credential, name='myApp') + app._credential._g_credential = None + old_gcloud_var = os.environ.get(gcloud_env) + os.environ[gcloud_env] = '' + old_default_var = os.environ.get(default_env) + if old_default_var: + del os.environ[default_env] + with pytest.raises((AttributeError, DefaultCredentialsError)): + project_id = app._credential.project_id + project_id = app.project_id + if old_default_var: + os.environ[default_env] = old_default_var + if old_gcloud_var: + os.environ[gcloud_env] = old_gcloud_var + assert project_id is None + testutils.run_without_project_id(evaluate) + + def test_non_string_project_id(self): + options = {'projectId': {'key': 'not a string'}} + with pytest.raises(ValueError): + firebase_admin.initialize_app(CREDENTIAL, options=options) + def test_app_get(self, init_app): assert init_app is firebase_admin.get_app(init_app.name) @@ -159,3 +378,22 @@ def test_app_delete(self, init_app): firebase_admin.get_app(init_app.name) with pytest.raises(ValueError): firebase_admin.delete_app(init_app) + + def test_app_services(self, init_app): + service = _utils.get_app_service(init_app, 'test.service', AppService) + assert isinstance(service, AppService) + service2 = _utils.get_app_service(init_app, 'test.service', AppService) + assert service is service2 + firebase_admin.delete_app(init_app) + with pytest.raises(ValueError): + _utils.get_app_service(init_app, 'test.service', AppService) + + @pytest.mark.parametrize('arg', [0, 1, True, False, 'str', [], {}, tuple()]) + def test_app_services_invalid_arg(self, arg): + with pytest.raises(ValueError): + _utils.get_app_service(arg, 'test.service', AppService) + + def test_app_services_invalid_app(self, init_app): + app = firebase_admin.App(init_app.name, init_app.credential, {}) + with pytest.raises(ValueError): + _utils.get_app_service(app, 'test.service', AppService) diff --git a/tests/test_app_check.py b/tests/test_app_check.py new file mode 100644 index 000000000..e55ae39de --- /dev/null +++ b/tests/test_app_check.py @@ -0,0 +1,275 @@ +# Copyright 2022 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test cases for the firebase_admin.app_check module.""" +import base64 +import pytest + +from jwt import PyJWK, InvalidAudienceError, InvalidIssuerError +from jwt import ExpiredSignatureError, InvalidSignatureError +import firebase_admin +from firebase_admin import app_check +from tests import testutils + +NON_STRING_ARGS = [[], tuple(), {}, True, False, 1, 0] + +APP_ID = "1234567890" +PROJECT_ID = "1334" +SCOPED_PROJECT_ID = f"projects/{PROJECT_ID}" +ISSUER = "https://firebaseappcheck.googleapis.com/" +JWT_PAYLOAD_SAMPLE = { + "headers": { + "alg": "RS256", + "typ": "JWT" + }, + "sub": APP_ID, + "name": "John Doe", + "iss": ISSUER, + "aud": [SCOPED_PROJECT_ID] +} + +secret_key = "secret" +signing_key = { + "kty": "oct", + # Using HS256 for simplicity, production key will use RS256 + "alg": "HS256", + "k": base64.urlsafe_b64encode(secret_key.encode()) +} + +class TestBatch: + + @classmethod + def setup_class(cls): + cred = testutils.MockCredential() + firebase_admin.initialize_app(cred, {'projectId': PROJECT_ID}) + + @classmethod + def teardown_class(cls): + testutils.cleanup_apps() + +class TestVerifyToken(TestBatch): + + def test_no_project_id(self): + def evaluate(): + app = firebase_admin.initialize_app(testutils.MockCredential(), name='no_project_id') + with pytest.raises(ValueError): + app_check.verify_token(token="app_check_token", app=app) + testutils.run_without_project_id(evaluate) + + @pytest.mark.parametrize('token', NON_STRING_ARGS) + def test_verify_token_with_non_string_raises_error(self, token): + with pytest.raises(ValueError) as excinfo: + app_check.verify_token(token) + expected = f'app check token "{token}" must be a string.' + assert str(excinfo.value) == expected + + def test_has_valid_token_headers(self): + app = firebase_admin.get_app() + app_check_service = app_check._get_app_check_service(app) + + headers = {"alg": "RS256", 'typ': "JWT"} + assert app_check_service._has_valid_token_headers(headers=headers) is None + + def test_has_valid_token_headers_with_incorrect_type_raises_error(self): + app = firebase_admin.get_app() + app_check_service = app_check._get_app_check_service(app) + headers = {"alg": "RS256", 'typ': "WRONG"} + with pytest.raises(ValueError) as excinfo: + app_check_service._has_valid_token_headers(headers=headers) + + expected = 'The provided App Check token has an incorrect type header' + assert str(excinfo.value) == expected + + def test_has_valid_token_headers_with_incorrect_algorithm_raises_error(self): + app = firebase_admin.get_app() + app_check_service = app_check._get_app_check_service(app) + headers = {"alg": "HS256", 'typ': "JWT"} + with pytest.raises(ValueError) as excinfo: + app_check_service._has_valid_token_headers(headers=headers) + + expected = ('The provided App Check token has an incorrect alg header. ' + 'Expected RS256 but got HS256.') + assert str(excinfo.value) == expected + + def test_decode_and_verify(self, mocker): + jwt_decode_mock = mocker.patch("jwt.decode", return_value=JWT_PAYLOAD_SAMPLE) + app = firebase_admin.get_app() + app_check_service = app_check._get_app_check_service(app) + payload = app_check_service._decode_and_verify( + token=None, + signing_key="1234", + ) + + jwt_decode_mock.assert_called_once_with( + None, "1234", algorithms=["RS256"], audience=SCOPED_PROJECT_ID) + assert payload == JWT_PAYLOAD_SAMPLE.copy() + + def test_decode_and_verify_with_incorrect_token_and_key(self): + app = firebase_admin.get_app() + app_check_service = app_check._get_app_check_service(app) + with pytest.raises(ValueError) as excinfo: + app_check_service._decode_and_verify( + token="1232132", + signing_key=signing_key, + ) + + expected = ( + 'Decoding App Check token failed. Error: Not enough segments') + assert str(excinfo.value) == expected + + def test_decode_and_verify_with_expired_token_raises_error(self, mocker): + mocker.patch("jwt.decode", side_effect=ExpiredSignatureError) + app = firebase_admin.get_app() + app_check_service = app_check._get_app_check_service(app) + with pytest.raises(ValueError) as excinfo: + app_check_service._decode_and_verify( + token="1232132", + signing_key=signing_key, + ) + + expected = ( + 'The provided App Check token has expired.') + assert str(excinfo.value) == expected + + def test_decode_and_verify_with_invalid_signature_raises_error(self, mocker): + mocker.patch("jwt.decode", side_effect=InvalidSignatureError) + app = firebase_admin.get_app() + app_check_service = app_check._get_app_check_service(app) + with pytest.raises(ValueError) as excinfo: + app_check_service._decode_and_verify( + token="1232132", + signing_key=signing_key, + ) + + expected = ( + 'The provided App Check token has an invalid signature.') + assert str(excinfo.value) == expected + + def test_decode_and_verify_with_invalid_aud_raises_error(self, mocker): + mocker.patch("jwt.decode", side_effect=InvalidAudienceError) + app = firebase_admin.get_app() + app_check_service = app_check._get_app_check_service(app) + with pytest.raises(ValueError) as excinfo: + app_check_service._decode_and_verify( + token="1232132", + signing_key=signing_key, + ) + + expected = ( + 'The provided App Check token has an incorrect "aud" (audience) claim. ' + f'Expected payload to include {SCOPED_PROJECT_ID}.') + assert str(excinfo.value) == expected + + def test_decode_and_verify_with_invalid_iss_raises_error(self, mocker): + mocker.patch("jwt.decode", side_effect=InvalidIssuerError) + app = firebase_admin.get_app() + app_check_service = app_check._get_app_check_service(app) + with pytest.raises(ValueError) as excinfo: + app_check_service._decode_and_verify( + token="1232132", + signing_key=signing_key, + ) + + expected = ( + 'The provided App Check token has an incorrect "iss" (issuer) claim. ' + f'Expected claim to include {ISSUER}') + assert str(excinfo.value) == expected + + def test_decode_and_verify_with_none_sub_raises_error(self, mocker): + jwt_with_none_sub = JWT_PAYLOAD_SAMPLE.copy() + jwt_with_none_sub['sub'] = None + mocker.patch("jwt.decode", return_value=jwt_with_none_sub) + app = firebase_admin.get_app() + app_check_service = app_check._get_app_check_service(app) + with pytest.raises(ValueError) as excinfo: + app_check_service._decode_and_verify( + token="1232132", + signing_key=signing_key, + ) + + expected = ( + 'The provided App Check token "sub" (subject) claim ' + f'"{None}" must be a non-empty string.') + assert str(excinfo.value) == expected + + def test_decode_and_verify_with_non_string_sub_raises_error(self, mocker): + sub_number = 1234 + jwt_with_none_sub = JWT_PAYLOAD_SAMPLE.copy() + jwt_with_none_sub['sub'] = sub_number + mocker.patch("jwt.decode", return_value=jwt_with_none_sub) + app = firebase_admin.get_app() + app_check_service = app_check._get_app_check_service(app) + with pytest.raises(ValueError) as excinfo: + app_check_service._decode_and_verify( + token="1232132", + signing_key=signing_key, + ) + + expected = ( + 'The provided App Check token "sub" (subject) claim ' + f'"{sub_number}" must be a string.') + assert str(excinfo.value) == expected + + def test_verify_token(self, mocker): + mocker.patch("jwt.decode", return_value=JWT_PAYLOAD_SAMPLE) + mocker.patch("jwt.PyJWKClient.get_signing_key_from_jwt", return_value=PyJWK(signing_key)) + mocker.patch("jwt.get_unverified_header", return_value=JWT_PAYLOAD_SAMPLE.get("headers")) + app = firebase_admin.get_app() + + payload = app_check.verify_token("encoded", app) + expected = JWT_PAYLOAD_SAMPLE.copy() + expected['app_id'] = APP_ID + assert payload == expected + + def test_verify_token_with_non_list_audience_raises_error(self, mocker): + jwt_with_non_list_audience = JWT_PAYLOAD_SAMPLE.copy() + jwt_with_non_list_audience["aud"] = '1234' + mocker.patch("jwt.decode", return_value=jwt_with_non_list_audience) + mocker.patch("jwt.PyJWKClient.get_signing_key_from_jwt", return_value=PyJWK(signing_key)) + mocker.patch("jwt.get_unverified_header", return_value=JWT_PAYLOAD_SAMPLE.get("headers")) + app = firebase_admin.get_app() + + with pytest.raises(ValueError) as excinfo: + app_check.verify_token("encoded", app) + + expected = 'Firebase App Check token has incorrect "aud" (audience) claim.' + assert str(excinfo.value) == expected + + def test_verify_token_with_empty_list_audience_raises_error(self, mocker): + jwt_with_empty_list_audience = JWT_PAYLOAD_SAMPLE.copy() + jwt_with_empty_list_audience["aud"] = [] + mocker.patch("jwt.decode", return_value=jwt_with_empty_list_audience) + mocker.patch("jwt.PyJWKClient.get_signing_key_from_jwt", return_value=PyJWK(signing_key)) + mocker.patch("jwt.get_unverified_header", return_value=JWT_PAYLOAD_SAMPLE.get("headers")) + app = firebase_admin.get_app() + + with pytest.raises(ValueError) as excinfo: + app_check.verify_token("encoded", app) + + expected = 'Firebase App Check token has incorrect "aud" (audience) claim.' + assert str(excinfo.value) == expected + + def test_verify_token_with_incorrect_issuer_raises_error(self, mocker): + jwt_with_non_incorrect_issuer = JWT_PAYLOAD_SAMPLE.copy() + jwt_with_non_incorrect_issuer["iss"] = "https://dwyfrequency.googleapis.com/" + mocker.patch("jwt.decode", return_value=jwt_with_non_incorrect_issuer) + mocker.patch("jwt.PyJWKClient.get_signing_key_from_jwt", return_value=PyJWK(signing_key)) + mocker.patch("jwt.get_unverified_header", return_value=JWT_PAYLOAD_SAMPLE.get("headers")) + app = firebase_admin.get_app() + + with pytest.raises(ValueError) as excinfo: + app_check.verify_token("encoded", app) + + expected = 'Token does not contain the correct "iss" (issuer).' + assert str(excinfo.value) == expected diff --git a/tests/test_auth.py b/tests/test_auth.py deleted file mode 100644 index afcf5c754..000000000 --- a/tests/test_auth.py +++ /dev/null @@ -1,258 +0,0 @@ -# Copyright 2017 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Test cases for firebase_admin.auth module.""" -import os -import time - -from google.auth import crypt -from google.auth import exceptions -from google.auth import jwt -import google.oauth2.id_token -import pytest -import six - -import firebase_admin -from firebase_admin import auth -from firebase_admin import credentials -from tests import testutils - - -FIREBASE_AUDIENCE = ('https://identitytoolkit.googleapis.com/' - 'google.identity.identitytoolkit.v1.IdentityToolkit') - -MOCK_UID = 'user1' -MOCK_CREDENTIAL = credentials.Certificate( - testutils.resource_filename('service_account.json')) -MOCK_PUBLIC_CERTS = testutils.resource('public_certs.json') -MOCK_PRIVATE_KEY = testutils.resource('private_key.pem') -MOCK_SERVICE_ACCOUNT_EMAIL = MOCK_CREDENTIAL.service_account_email - - -class AuthFixture(object): - def __init__(self, name=None): - if name: - self.app = firebase_admin.get_app(name) - else: - self.app = None - - def create_custom_token(self, *args): - if self.app: - return auth.create_custom_token(*args, app=self.app) - else: - return auth.create_custom_token(*args) - - def verify_id_token(self, *args): - if self.app: - return auth.verify_id_token(*args, app=self.app) - else: - return auth.verify_id_token(*args) - -def setup_module(): - firebase_admin.initialize_app(MOCK_CREDENTIAL) - firebase_admin.initialize_app(MOCK_CREDENTIAL, name='testApp') - -def teardown_module(): - firebase_admin.delete_app(firebase_admin.get_app()) - firebase_admin.delete_app(firebase_admin.get_app('testApp')) - -@pytest.fixture(params=[None, 'testApp'], ids=['DefaultApp', 'CustomApp']) -def authtest(request): - """Returns an AuthFixture instance. - - Instances returned by this fixture are parameterized to use either the defult App instance, - or a custom App instance named 'testApp'. Due to this parameterization, each test case that - depends on this fixture will get executed twice (as two test cases); once with the default - App, and once with the custom App. - """ - return AuthFixture(request.param) - -@pytest.fixture -def non_cert_app(): - """Returns an App instance initialized with a mock non-cert credential. - - The lines of code following the yield statement are guaranteed to run after each test case - that depends on this fixture. This ensures the proper cleanup of the App instance after - tests. - """ - app = firebase_admin.initialize_app(credentials.Base(), name='non-cert-app') - yield app - firebase_admin.delete_app(app) - -def verify_custom_token(custom_token, expected_claims): - assert isinstance(custom_token, six.binary_type) - token = google.oauth2.id_token.verify_token( - custom_token, - testutils.MockRequest(200, MOCK_PUBLIC_CERTS), - FIREBASE_AUDIENCE) - assert token['uid'] == MOCK_UID - assert token['iss'] == MOCK_SERVICE_ACCOUNT_EMAIL - assert token['sub'] == MOCK_SERVICE_ACCOUNT_EMAIL - header = jwt.decode_header(custom_token) - assert header.get('typ') == 'JWT' - assert header.get('alg') == 'RS256' - if expected_claims: - for key, value in expected_claims.items(): - assert value == token['claims'][key] - -def _merge_jwt_claims(defaults, overrides): - defaults.update(overrides) - for key, value in overrides.items(): - if value is None: - del defaults[key] - return defaults - -def get_id_token(payload_overrides=None, header_overrides=None): - signer = crypt.RSASigner.from_string(MOCK_PRIVATE_KEY) - headers = { - 'kid': 'mock-key-id-1' - } - payload = { - 'aud': MOCK_CREDENTIAL.project_id, - 'iss': 'https://securetoken.google.com/' + MOCK_CREDENTIAL.project_id, - 'iat': int(time.time()) - 100, - 'exp': int(time.time()) + 3600, - 'sub': '1234567890', - 'admin': True, - } - if header_overrides: - headers = _merge_jwt_claims(headers, header_overrides) - if payload_overrides: - payload = _merge_jwt_claims(payload, payload_overrides) - return jwt.encode(signer, payload, header=headers) - - -TEST_ID_TOKEN = get_id_token() - - -class TestCreateCustomToken(object): - - valid_args = { - 'Basic': (MOCK_UID, {'one': 2, 'three': 'four'}), - 'NoDevClaims': (MOCK_UID, None), - 'EmptyDevClaims': (MOCK_UID, {}), - } - - invalid_args = { - 'NoUid': (None, None, ValueError), - 'EmptyUid': ('', None, ValueError), - 'LongUid': ('x'*129, None, ValueError), - 'BoolUid': (True, None, ValueError), - 'IntUid': (1, None, ValueError), - 'ListUid': ([], None, ValueError), - 'EmptyDictUid': ({}, None, ValueError), - 'NonEmptyDictUid': ({'a':1}, None, ValueError), - 'BoolClaims': (MOCK_UID, True, ValueError), - 'IntClaims': (MOCK_UID, 1, ValueError), - 'StrClaims': (MOCK_UID, 'foo', ValueError), - 'ListClaims': (MOCK_UID, [], ValueError), - 'TupleClaims': (MOCK_UID, (1, 2), ValueError), - 'ReservedClaims': (MOCK_UID, {'sub':'1234'}, ValueError), - } - - @pytest.mark.parametrize('user,claims', valid_args.values(), - ids=list(valid_args)) - def test_valid_params(self, authtest, user, claims): - verify_custom_token(authtest.create_custom_token(user, claims), claims) - - @pytest.mark.parametrize('user,claims,error', invalid_args.values(), - ids=list(invalid_args)) - def test_invalid_params(self, authtest, user, claims, error): - with pytest.raises(error): - authtest.create_custom_token(user, claims) - - def test_noncert_credential(self, non_cert_app): - with pytest.raises(ValueError): - auth.create_custom_token(MOCK_UID, app=non_cert_app) - - -class TestVerifyIdToken(object): - - valid_tokens = { - 'BinaryToken': TEST_ID_TOKEN, - 'TextToken': TEST_ID_TOKEN.decode('utf-8'), - } - - invalid_tokens = { - 'NoKid': get_id_token(header_overrides={'kid': None}), - 'WrongKid': get_id_token(header_overrides={'kid': 'foo'}), - 'BadAudience': get_id_token({'aud': 'bad-audience'}), - 'BadIssuer': get_id_token({ - 'iss': 'https://securetoken.google.com/wrong-issuer' - }), - 'EmptySubject': get_id_token({'sub': ''}), - 'IntSubject': get_id_token({'sub': 10}), - 'LongStrSubject': get_id_token({'sub': 'a' * 129}), - 'FutureToken': get_id_token({'iat': int(time.time()) + 1000}), - 'ExpiredToken': get_id_token({ - 'iat': int(time.time()) - 10000, - 'exp': int(time.time()) - 3600 - }), - 'NoneToken': None, - 'EmptyToken': '', - 'BoolToken': True, - 'IntToken': 1, - 'ListToken': [], - 'EmptyDictToken': {}, - 'NonEmptyDictToken': {'a': 1}, - 'BadFormatToken': 'foobar' - } - - def setup_method(self): - auth._request = testutils.MockRequest(200, MOCK_PUBLIC_CERTS) - - @pytest.mark.parametrize('id_token', valid_tokens.values(), ids=list(valid_tokens)) - def test_valid_token(self, authtest, id_token): - claims = authtest.verify_id_token(id_token) - assert claims['admin'] is True - assert claims['uid'] == claims['sub'] - - @pytest.mark.parametrize('id_token', invalid_tokens.values(), - ids=list(invalid_tokens)) - def test_invalid_token(self, authtest, id_token): - with pytest.raises(ValueError): - authtest.verify_id_token(id_token) - - def test_project_id_env_var(self, non_cert_app): - gcloud_project = os.environ.get(auth.GCLOUD_PROJECT_ENV_VAR) - try: - os.environ[auth.GCLOUD_PROJECT_ENV_VAR] = MOCK_CREDENTIAL.project_id - claims = auth.verify_id_token(TEST_ID_TOKEN, non_cert_app) - assert claims['admin'] is True - finally: - if gcloud_project: - os.environ[auth.GCLOUD_PROJECT_ENV_VAR] = gcloud_project - else: - del os.environ[auth.GCLOUD_PROJECT_ENV_VAR] - - def test_no_project_id(self, non_cert_app): - gcloud_project = os.environ.get(auth.GCLOUD_PROJECT_ENV_VAR) - if gcloud_project: - del os.environ[auth.GCLOUD_PROJECT_ENV_VAR] - try: - with pytest.raises(ValueError): - auth.verify_id_token(TEST_ID_TOKEN, non_cert_app) - finally: - if gcloud_project: - os.environ[auth.GCLOUD_PROJECT_ENV_VAR] = gcloud_project - - def test_custom_token(self, authtest): - id_token = authtest.create_custom_token(MOCK_UID) - with pytest.raises(ValueError): - authtest.verify_id_token(id_token) - - def test_certificate_request_failure(self, authtest): - auth._request = testutils.MockRequest(404, 'not found') - with pytest.raises(exceptions.TransportError): - authtest.verify_id_token(TEST_ID_TOKEN) diff --git a/tests/test_auth_providers.py b/tests/test_auth_providers.py new file mode 100644 index 000000000..106e1cae3 --- /dev/null +++ b/tests/test_auth_providers.py @@ -0,0 +1,755 @@ +# Copyright 2020 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test cases for the firebase_admin._auth_providers module.""" + +import json + +import pytest + +import firebase_admin +from firebase_admin import auth +from firebase_admin import exceptions +from firebase_admin import _utils +from tests import testutils + +ID_TOOLKIT_URL = 'https://identitytoolkit.googleapis.com/v2' +EMULATOR_HOST_ENV_VAR = 'FIREBASE_AUTH_EMULATOR_HOST' +AUTH_EMULATOR_HOST = 'localhost:9099' +EMULATED_ID_TOOLKIT_URL = f'http://{AUTH_EMULATOR_HOST}/identitytoolkit.googleapis.com/v2' +URL_PROJECT_SUFFIX = '/projects/mock-project-id' +USER_MGT_URLS = { + 'ID_TOOLKIT': ID_TOOLKIT_URL, + 'PREFIX': ID_TOOLKIT_URL + URL_PROJECT_SUFFIX, +} +OIDC_PROVIDER_CONFIG_RESPONSE = testutils.resource('oidc_provider_config.json') +SAML_PROVIDER_CONFIG_RESPONSE = testutils.resource('saml_provider_config.json') +LIST_OIDC_PROVIDER_CONFIGS_RESPONSE = testutils.resource('list_oidc_provider_configs.json') +LIST_SAML_PROVIDER_CONFIGS_RESPONSE = testutils.resource('list_saml_provider_configs.json') + +CONFIG_NOT_FOUND_RESPONSE = """{ + "error": { + "message": "CONFIGURATION_NOT_FOUND" + } +}""" + +INVALID_PROVIDER_IDS = [None, True, False, 1, 0, [], tuple(), {}, ''] + + +@pytest.fixture(scope='module', params=[{'emulated': False}, {'emulated': True}]) +def user_mgt_app(request): + monkeypatch = testutils.new_monkeypatch() + if request.param['emulated']: + monkeypatch.setenv(EMULATOR_HOST_ENV_VAR, AUTH_EMULATOR_HOST) + monkeypatch.setitem(USER_MGT_URLS, 'ID_TOOLKIT', EMULATED_ID_TOOLKIT_URL) + monkeypatch.setitem(USER_MGT_URLS, 'PREFIX', EMULATED_ID_TOOLKIT_URL + URL_PROJECT_SUFFIX) + app = firebase_admin.initialize_app(testutils.MockCredential(), name='providerConfig', + options={'projectId': 'mock-project-id'}) + yield app + firebase_admin.delete_app(app) + monkeypatch.undo() + + +def _instrument_provider_mgt(app, status, payload): + client = auth._get_client(app) + provider_manager = client._provider_manager + recorder = [] + provider_manager.http_client.session.mount( + USER_MGT_URLS['ID_TOOLKIT'], + testutils.MockAdapter(payload, status, recorder)) + return recorder + +def _assert_request(request, expected_method, expected_url): + assert request.method == expected_method + assert request.url == expected_url + assert request.headers['X-Client-Version'] == f'Python/Admin/{firebase_admin.__version__}' + expected_metrics_header = [ + _utils.get_metrics_header(), + _utils.get_metrics_header() + ' mock-cred-metric-tag' + ] + assert request.headers['x-goog-api-client'] in expected_metrics_header + +class TestOIDCProviderConfig: + + VALID_CREATE_OPTIONS = { + 'provider_id': 'oidc.provider', + 'client_id': 'CLIENT_ID', + 'issuer': 'https://oidc.com/issuer', + 'display_name': 'oidcProviderName', + 'enabled': True, + 'id_token_response_type': True, + 'code_response_type': True, + 'client_secret': 'CLIENT_SECRET', + } + + OIDC_CONFIG_REQUEST = { + 'displayName': 'oidcProviderName', + 'enabled': True, + 'clientId': 'CLIENT_ID', + 'clientSecret': 'CLIENT_SECRET', + 'issuer': 'https://oidc.com/issuer', + 'responseType': { + 'code': True, + 'idToken': True, + }, + } + + @pytest.mark.parametrize('provider_id', INVALID_PROVIDER_IDS + ['saml.provider']) + def test_get_invalid_provider_id(self, user_mgt_app, provider_id): + with pytest.raises(ValueError) as excinfo: + auth.get_oidc_provider_config(provider_id, app=user_mgt_app) + + assert str(excinfo.value).startswith('Invalid OIDC provider ID') + + def test_get(self, user_mgt_app): + recorder = _instrument_provider_mgt(user_mgt_app, 200, OIDC_PROVIDER_CONFIG_RESPONSE) + + provider_config = auth.get_oidc_provider_config('oidc.provider', app=user_mgt_app) + + self._assert_provider_config(provider_config) + assert len(recorder) == 1 + _assert_request( + recorder[0], 'GET', f'{USER_MGT_URLS["PREFIX"]}/oauthIdpConfigs/oidc.provider') + + @pytest.mark.parametrize('invalid_opts', [ + {'provider_id': None}, {'provider_id': ''}, {'provider_id': 'saml.provider'}, + {'client_id': None}, {'client_id': ''}, + {'issuer': None}, {'issuer': ''}, {'issuer': 'not a url'}, + {'display_name': True}, + {'enabled': 'true'}, + {'id_token_response_type': 'true'}, {'code_response_type': 'true'}, + {'code_response_type': True, 'client_secret': ''}, + {'code_response_type': True, 'client_secret': True}, + {'code_response_type': True, 'client_secret': None}, + {'code_response_type': False, 'id_token_response_type': False}, + ]) + def test_create_invalid_args(self, user_mgt_app, invalid_opts): + options = dict(self.VALID_CREATE_OPTIONS) + options.update(invalid_opts) + with pytest.raises(ValueError): + auth.create_oidc_provider_config(**options, app=user_mgt_app) + + def test_create(self, user_mgt_app): + recorder = _instrument_provider_mgt(user_mgt_app, 200, OIDC_PROVIDER_CONFIG_RESPONSE) + + provider_config = auth.create_oidc_provider_config( + **self.VALID_CREATE_OPTIONS, app=user_mgt_app) + + self._assert_provider_config(provider_config) + assert len(recorder) == 1 + _assert_request(recorder[0], 'POST', + f'{USER_MGT_URLS["PREFIX"]}/oauthIdpConfigs?oauthIdpConfigId=oidc.provider') + got = json.loads(recorder[0].body.decode()) + assert got == self.OIDC_CONFIG_REQUEST + + def test_create_minimal(self, user_mgt_app): + recorder = _instrument_provider_mgt(user_mgt_app, 200, OIDC_PROVIDER_CONFIG_RESPONSE) + options = dict(self.VALID_CREATE_OPTIONS) + del options['display_name'] + del options['enabled'] + del options['client_secret'] + del options['id_token_response_type'] + del options['code_response_type'] + want = dict(self.OIDC_CONFIG_REQUEST) + del want['displayName'] + del want['enabled'] + del want['clientSecret'] + del want['responseType'] + + provider_config = auth.create_oidc_provider_config(**options, app=user_mgt_app) + + self._assert_provider_config(provider_config) + assert len(recorder) == 1 + _assert_request(recorder[0], 'POST', + f'{USER_MGT_URLS["PREFIX"]}/oauthIdpConfigs?oauthIdpConfigId=oidc.provider') + got = json.loads(recorder[0].body.decode()) + assert got == want + + def test_create_empty_values(self, user_mgt_app): + recorder = _instrument_provider_mgt(user_mgt_app, 200, OIDC_PROVIDER_CONFIG_RESPONSE) + options = dict(self.VALID_CREATE_OPTIONS) + options['display_name'] = '' + options['enabled'] = False + options['code_response_type'] = False + want = dict(self.OIDC_CONFIG_REQUEST) + want['displayName'] = '' + want['enabled'] = False + want['responseType'] = { + 'code': False, + 'idToken': True, + } + del want['clientSecret'] + + provider_config = auth.create_oidc_provider_config(**options, app=user_mgt_app) + + self._assert_provider_config(provider_config) + assert len(recorder) == 1 + _assert_request(recorder[0], 'POST', + f'{USER_MGT_URLS["PREFIX"]}/oauthIdpConfigs?oauthIdpConfigId=oidc.provider') + got = json.loads(recorder[0].body.decode()) + assert got == want + + @pytest.mark.parametrize('invalid_opts', [ + {}, + {'provider_id': None}, {'provider_id': ''}, {'provider_id': 'saml.provider'}, + {'client_id': ''}, + {'issuer': ''}, {'issuer': 'not a url'}, + {'display_name': True}, + {'enabled': 'true'}, + {'id_token_response_type': 'true'}, {'code_response_type': 'true'}, + {'code_response_type': True, 'client_secret': ''}, + {'code_response_type': True, 'client_secret': True}, + {'code_response_type': True, 'client_secret': None}, + {'code_response_type': False, 'id_token_response_type': False}, + ]) + def test_update_invalid_args(self, user_mgt_app, invalid_opts): + options = {'provider_id': 'oidc.provider'} + options.update(invalid_opts) + with pytest.raises(ValueError): + auth.update_oidc_provider_config(**options, app=user_mgt_app) + + def test_update(self, user_mgt_app): + recorder = _instrument_provider_mgt(user_mgt_app, 200, OIDC_PROVIDER_CONFIG_RESPONSE) + + provider_config = auth.update_oidc_provider_config( + **self.VALID_CREATE_OPTIONS, app=user_mgt_app) + + self._assert_provider_config(provider_config) + assert len(recorder) == 1 + mask = ['clientId', 'clientSecret', 'displayName', 'enabled', 'issuer', + 'responseType.code', 'responseType.idToken'] + _assert_request(recorder[0], 'PATCH', + f'{USER_MGT_URLS["PREFIX"]}/oauthIdpConfigs/oidc.provider?' + f'updateMask={",".join(mask)}') + got = json.loads(recorder[0].body.decode()) + assert got == self.OIDC_CONFIG_REQUEST + + def test_update_minimal(self, user_mgt_app): + recorder = _instrument_provider_mgt(user_mgt_app, 200, OIDC_PROVIDER_CONFIG_RESPONSE) + + provider_config = auth.update_oidc_provider_config( + 'oidc.provider', display_name='oidcProviderName', app=user_mgt_app) + + self._assert_provider_config(provider_config) + assert len(recorder) == 1 + _assert_request(recorder[0], 'PATCH', + f'{USER_MGT_URLS["PREFIX"]}/oauthIdpConfigs/oidc.provider?' + f'updateMask=displayName') + got = json.loads(recorder[0].body.decode()) + assert got == {'displayName': 'oidcProviderName'} + + def test_update_empty_values(self, user_mgt_app): + recorder = _instrument_provider_mgt(user_mgt_app, 200, OIDC_PROVIDER_CONFIG_RESPONSE) + + provider_config = auth.update_oidc_provider_config( + 'oidc.provider', display_name=auth.DELETE_ATTRIBUTE, enabled=False, + id_token_response_type=False, app=user_mgt_app) + + self._assert_provider_config(provider_config) + assert len(recorder) == 1 + mask = ['displayName', 'enabled', 'responseType.idToken'] + _assert_request(recorder[0], 'PATCH', + f'{USER_MGT_URLS["PREFIX"]}/oauthIdpConfigs/oidc.provider?' + f'updateMask={",".join(mask)}') + got = json.loads(recorder[0].body.decode()) + assert got == {'displayName': None, 'enabled': False, 'responseType': {'idToken': False}} + + @pytest.mark.parametrize('provider_id', INVALID_PROVIDER_IDS + ['saml.provider']) + def test_delete_invalid_provider_id(self, user_mgt_app, provider_id): + with pytest.raises(ValueError) as excinfo: + auth.delete_oidc_provider_config(provider_id, app=user_mgt_app) + + assert str(excinfo.value).startswith('Invalid OIDC provider ID') + + def test_delete(self, user_mgt_app): + recorder = _instrument_provider_mgt(user_mgt_app, 200, '{}') + + auth.delete_oidc_provider_config('oidc.provider', app=user_mgt_app) + + assert len(recorder) == 1 + _assert_request(recorder[0], 'DELETE', + f'{USER_MGT_URLS["PREFIX"]}/oauthIdpConfigs/oidc.provider') + + @pytest.mark.parametrize('arg', [None, 'foo', [], {}, 0, -1, 101, False]) + def test_invalid_max_results(self, user_mgt_app, arg): + with pytest.raises(ValueError): + auth.list_oidc_provider_configs(max_results=arg, app=user_mgt_app) + + @pytest.mark.parametrize('arg', ['', [], {}, 0, -1, 101, False]) + def test_invalid_page_token(self, user_mgt_app, arg): + with pytest.raises(ValueError): + auth.list_oidc_provider_configs(page_token=arg, app=user_mgt_app) + + def test_list_single_page(self, user_mgt_app): + recorder = _instrument_provider_mgt(user_mgt_app, 200, LIST_OIDC_PROVIDER_CONFIGS_RESPONSE) + page = auth.list_oidc_provider_configs(app=user_mgt_app) + + self._assert_page(page) + provider_configs = list(config for config in page.iterate_all()) + assert len(provider_configs) == 2 + + assert len(recorder) == 1 + _assert_request(recorder[0], 'GET', + f'{USER_MGT_URLS["PREFIX"]}/oauthIdpConfigs?pageSize=100') + + def test_list_multiple_pages(self, user_mgt_app): + sample_response = json.loads(OIDC_PROVIDER_CONFIG_RESPONSE) + configs = _create_list_response(sample_response) + + # Page 1 + response = { + 'oauthIdpConfigs': configs[:2], + 'nextPageToken': 'token' + } + recorder = _instrument_provider_mgt(user_mgt_app, 200, json.dumps(response)) + page = auth.list_oidc_provider_configs(max_results=10, app=user_mgt_app) + + self._assert_page(page, next_page_token='token') + assert len(recorder) == 1 + _assert_request(recorder[0], 'GET', + f'{USER_MGT_URLS["PREFIX"]}/oauthIdpConfigs?pageSize=10') + + # Page 2 (also the last page) + response = {'oauthIdpConfigs': configs[2:]} + recorder = _instrument_provider_mgt(user_mgt_app, 200, json.dumps(response)) + page = page.get_next_page() + + self._assert_page(page, count=1, start=2) + assert len(recorder) == 1 + _assert_request(recorder[0], 'GET', + f'{USER_MGT_URLS["PREFIX"]}/oauthIdpConfigs?pageSize=10&pageToken=token') + + def test_paged_iteration(self, user_mgt_app): + sample_response = json.loads(OIDC_PROVIDER_CONFIG_RESPONSE) + configs = _create_list_response(sample_response) + + # Page 1 + response = { + 'oauthIdpConfigs': configs[:2], + 'nextPageToken': 'token' + } + recorder = _instrument_provider_mgt(user_mgt_app, 200, json.dumps(response)) + page = auth.list_oidc_provider_configs(app=user_mgt_app) + iterator = page.iterate_all() + + for index in range(2): + provider_config = next(iterator) + assert provider_config.provider_id == f'oidc.provider{index}' + assert len(recorder) == 1 + _assert_request(recorder[0], 'GET', + f'{USER_MGT_URLS["PREFIX"]}/oauthIdpConfigs?pageSize=100') + + # Page 2 (also the last page) + response = {'oauthIdpConfigs': configs[2:]} + recorder = _instrument_provider_mgt(user_mgt_app, 200, json.dumps(response)) + + provider_config = next(iterator) + assert provider_config.provider_id == 'oidc.provider2' + assert len(recorder) == 1 + _assert_request(recorder[0], 'GET', + f'{USER_MGT_URLS["PREFIX"]}/oauthIdpConfigs?pageSize=100&pageToken=token') + + with pytest.raises(StopIteration): + next(iterator) + + def test_list_empty_response(self, user_mgt_app): + response = {'oauthIdpConfigs': []} + _instrument_provider_mgt(user_mgt_app, 200, json.dumps(response)) + page = auth.list_oidc_provider_configs(app=user_mgt_app) + assert len(page.provider_configs) == 0 + provider_configs = list(config for config in page.iterate_all()) + assert len(provider_configs) == 0 + + def test_list_error(self, user_mgt_app): + _instrument_provider_mgt(user_mgt_app, 500, '{"error":"test"}') + with pytest.raises(exceptions.InternalError) as excinfo: + auth.list_oidc_provider_configs(app=user_mgt_app) + assert str(excinfo.value) == 'Unexpected error response: {"error":"test"}' + + def test_config_not_found(self, user_mgt_app): + _instrument_provider_mgt(user_mgt_app, 500, CONFIG_NOT_FOUND_RESPONSE) + + with pytest.raises(auth.ConfigurationNotFoundError) as excinfo: + auth.get_oidc_provider_config('oidc.provider', app=user_mgt_app) + + error_msg = 'No auth provider found for the given identifier (CONFIGURATION_NOT_FOUND).' + assert excinfo.value.code == exceptions.NOT_FOUND + assert str(excinfo.value) == error_msg + assert excinfo.value.http_response is not None + assert excinfo.value.cause is not None + + def _assert_provider_config(self, provider_config, want_id='oidc.provider'): + assert isinstance(provider_config, auth.OIDCProviderConfig) + assert provider_config.provider_id == want_id + assert provider_config.display_name == 'oidcProviderName' + assert provider_config.enabled is True + assert provider_config.issuer == 'https://oidc.com/issuer' + assert provider_config.client_id == 'CLIENT_ID' + + def _assert_page(self, page, count=2, start=0, next_page_token=''): + assert isinstance(page, auth.ListProviderConfigsPage) + index = start + assert len(page.provider_configs) == count + for provider_config in page.provider_configs: + self._assert_provider_config(provider_config, want_id=f'oidc.provider{index}') + index += 1 + + if next_page_token: + assert page.next_page_token == next_page_token + assert page.has_next_page is True + else: + assert page.next_page_token == '' + assert page.has_next_page is False + assert page.get_next_page() is None + + +class TestSAMLProviderConfig: + + VALID_CREATE_OPTIONS = { + 'provider_id': 'saml.provider', + 'idp_entity_id': 'IDP_ENTITY_ID', + 'sso_url': 'https://example.com/login', + 'x509_certificates': ['CERT1', 'CERT2'], + 'rp_entity_id': 'RP_ENTITY_ID', + 'callback_url': 'https://projectId.firebaseapp.com/__/auth/handler', + 'display_name': 'samlProviderName', + 'enabled': True, + } + + SAML_CONFIG_REQUEST = { + 'displayName': 'samlProviderName', + 'enabled': True, + 'idpConfig': { + 'idpEntityId': 'IDP_ENTITY_ID', + 'ssoUrl': 'https://example.com/login', + 'idpCertificates': [{'x509Certificate': 'CERT1'}, {'x509Certificate': 'CERT2'}] + }, + 'spConfig': { + 'spEntityId': 'RP_ENTITY_ID', + 'callbackUri': 'https://projectId.firebaseapp.com/__/auth/handler', + } + } + + @pytest.mark.parametrize('provider_id', INVALID_PROVIDER_IDS + ['oidc.provider']) + def test_get_invalid_provider_id(self, user_mgt_app, provider_id): + with pytest.raises(ValueError) as excinfo: + auth.get_saml_provider_config(provider_id, app=user_mgt_app) + + assert str(excinfo.value).startswith('Invalid SAML provider ID') + + def test_get(self, user_mgt_app): + recorder = _instrument_provider_mgt(user_mgt_app, 200, SAML_PROVIDER_CONFIG_RESPONSE) + + provider_config = auth.get_saml_provider_config('saml.provider', app=user_mgt_app) + + self._assert_provider_config(provider_config) + assert len(recorder) == 1 + _assert_request(recorder[0], 'GET', + f'{USER_MGT_URLS["PREFIX"]}/inboundSamlConfigs/saml.provider') + + @pytest.mark.parametrize('invalid_opts', [ + {'provider_id': None}, {'provider_id': ''}, {'provider_id': 'oidc.provider'}, + {'idp_entity_id': None}, {'idp_entity_id': ''}, + {'sso_url': None}, {'sso_url': ''}, {'sso_url': 'not a url'}, + {'x509_certificates': None}, {'x509_certificates': []}, {'x509_certificates': 'cert'}, + {'x509_certificates': [None]}, {'x509_certificates': ['foo', {}]}, + {'rp_entity_id': None}, {'rp_entity_id': ''}, + {'callback_url': None}, {'callback_url': ''}, {'callback_url': 'not a url'}, + {'display_name': True}, + {'enabled': 'true'}, + ]) + def test_create_invalid_args(self, user_mgt_app, invalid_opts): + options = dict(self.VALID_CREATE_OPTIONS) + options.update(invalid_opts) + with pytest.raises(ValueError): + auth.create_saml_provider_config(**options, app=user_mgt_app) + + def test_create(self, user_mgt_app): + recorder = _instrument_provider_mgt(user_mgt_app, 200, SAML_PROVIDER_CONFIG_RESPONSE) + + provider_config = auth.create_saml_provider_config( + **self.VALID_CREATE_OPTIONS, app=user_mgt_app) + + self._assert_provider_config(provider_config) + assert len(recorder) == 1 + _assert_request(recorder[0], 'POST', + f'{USER_MGT_URLS["PREFIX"]}/inboundSamlConfigs?' + f'inboundSamlConfigId=saml.provider') + got = json.loads(recorder[0].body.decode()) + assert got == self.SAML_CONFIG_REQUEST + + def test_create_minimal(self, user_mgt_app): + recorder = _instrument_provider_mgt(user_mgt_app, 200, SAML_PROVIDER_CONFIG_RESPONSE) + options = dict(self.VALID_CREATE_OPTIONS) + del options['display_name'] + del options['enabled'] + want = dict(self.SAML_CONFIG_REQUEST) + del want['displayName'] + del want['enabled'] + + provider_config = auth.create_saml_provider_config(**options, app=user_mgt_app) + + self._assert_provider_config(provider_config) + assert len(recorder) == 1 + _assert_request(recorder[0], 'POST', + f'{USER_MGT_URLS["PREFIX"]}/inboundSamlConfigs?' + f'inboundSamlConfigId=saml.provider') + got = json.loads(recorder[0].body.decode()) + assert got == want + + def test_create_empty_values(self, user_mgt_app): + recorder = _instrument_provider_mgt(user_mgt_app, 200, SAML_PROVIDER_CONFIG_RESPONSE) + options = dict(self.VALID_CREATE_OPTIONS) + options['display_name'] = '' + options['enabled'] = False + want = dict(self.SAML_CONFIG_REQUEST) + want['displayName'] = '' + want['enabled'] = False + + provider_config = auth.create_saml_provider_config(**options, app=user_mgt_app) + + self._assert_provider_config(provider_config) + assert len(recorder) == 1 + _assert_request(recorder[0], 'POST', + f'{USER_MGT_URLS["PREFIX"]}/inboundSamlConfigs?' + f'inboundSamlConfigId=saml.provider') + got = json.loads(recorder[0].body.decode()) + assert got == want + + @pytest.mark.parametrize('invalid_opts', [ + {}, + {'provider_id': None}, {'provider_id': ''}, {'provider_id': 'oidc.provider'}, + {'idp_entity_id': ''}, + {'sso_url': ''}, {'sso_url': 'not a url'}, + {'x509_certificates': []}, {'x509_certificates': 'cert'}, + {'x509_certificates': [None]}, {'x509_certificates': ['foo', {}]}, + {'rp_entity_id': ''}, + {'callback_url': ''}, {'callback_url': 'not a url'}, + {'display_name': True}, + {'enabled': 'true'}, + ]) + def test_update_invalid_args(self, user_mgt_app, invalid_opts): + options = {'provider_id': 'saml.provider'} + options.update(invalid_opts) + with pytest.raises(ValueError): + auth.update_saml_provider_config(**options, app=user_mgt_app) + + def test_update(self, user_mgt_app): + recorder = _instrument_provider_mgt(user_mgt_app, 200, SAML_PROVIDER_CONFIG_RESPONSE) + + provider_config = auth.update_saml_provider_config( + **self.VALID_CREATE_OPTIONS, app=user_mgt_app) + + self._assert_provider_config(provider_config) + assert len(recorder) == 1 + mask = [ + 'displayName', 'enabled', 'idpConfig.idpCertificates', 'idpConfig.idpEntityId', + 'idpConfig.ssoUrl', 'spConfig.callbackUri', 'spConfig.spEntityId', + ] + _assert_request(recorder[0], 'PATCH', + f'{USER_MGT_URLS["PREFIX"]}/inboundSamlConfigs/saml.provider?' + f'updateMask={",".join(mask)}') + got = json.loads(recorder[0].body.decode()) + assert got == self.SAML_CONFIG_REQUEST + + def test_update_minimal(self, user_mgt_app): + recorder = _instrument_provider_mgt(user_mgt_app, 200, SAML_PROVIDER_CONFIG_RESPONSE) + + provider_config = auth.update_saml_provider_config( + 'saml.provider', display_name='samlProviderName', app=user_mgt_app) + + self._assert_provider_config(provider_config) + assert len(recorder) == 1 + _assert_request(recorder[0], 'PATCH', + f'{USER_MGT_URLS["PREFIX"]}/inboundSamlConfigs/saml.provider?' + f'updateMask=displayName') + got = json.loads(recorder[0].body.decode()) + assert got == {'displayName': 'samlProviderName'} + + def test_update_empty_values(self, user_mgt_app): + recorder = _instrument_provider_mgt(user_mgt_app, 200, SAML_PROVIDER_CONFIG_RESPONSE) + + provider_config = auth.update_saml_provider_config( + 'saml.provider', display_name=auth.DELETE_ATTRIBUTE, enabled=False, app=user_mgt_app) + + self._assert_provider_config(provider_config) + assert len(recorder) == 1 + mask = ['displayName', 'enabled'] + _assert_request(recorder[0], 'PATCH', + f'{USER_MGT_URLS["PREFIX"]}/inboundSamlConfigs/saml.provider?' + f'updateMask={",".join(mask)}') + got = json.loads(recorder[0].body.decode()) + assert got == {'displayName': None, 'enabled': False} + + @pytest.mark.parametrize('provider_id', INVALID_PROVIDER_IDS + ['oidc.provider']) + def test_delete_invalid_provider_id(self, user_mgt_app, provider_id): + with pytest.raises(ValueError) as excinfo: + auth.delete_saml_provider_config(provider_id, app=user_mgt_app) + + assert str(excinfo.value).startswith('Invalid SAML provider ID') + + def test_delete(self, user_mgt_app): + recorder = _instrument_provider_mgt(user_mgt_app, 200, '{}') + + auth.delete_saml_provider_config('saml.provider', app=user_mgt_app) + + assert len(recorder) == 1 + _assert_request( + recorder[0], 'DELETE', f'{USER_MGT_URLS["PREFIX"]}/inboundSamlConfigs/saml.provider') + + def test_config_not_found(self, user_mgt_app): + _instrument_provider_mgt(user_mgt_app, 500, CONFIG_NOT_FOUND_RESPONSE) + + with pytest.raises(auth.ConfigurationNotFoundError) as excinfo: + auth.get_saml_provider_config('saml.provider', app=user_mgt_app) + + error_msg = 'No auth provider found for the given identifier (CONFIGURATION_NOT_FOUND).' + assert excinfo.value.code == exceptions.NOT_FOUND + assert str(excinfo.value) == error_msg + assert excinfo.value.http_response is not None + assert excinfo.value.cause is not None + + @pytest.mark.parametrize('arg', [None, 'foo', [], {}, 0, -1, 101, False]) + def test_invalid_max_results(self, user_mgt_app, arg): + with pytest.raises(ValueError): + auth.list_saml_provider_configs(max_results=arg, app=user_mgt_app) + + @pytest.mark.parametrize('arg', ['', [], {}, 0, -1, 101, False]) + def test_invalid_page_token(self, user_mgt_app, arg): + with pytest.raises(ValueError): + auth.list_saml_provider_configs(page_token=arg, app=user_mgt_app) + + def test_list_single_page(self, user_mgt_app): + recorder = _instrument_provider_mgt(user_mgt_app, 200, LIST_SAML_PROVIDER_CONFIGS_RESPONSE) + page = auth.list_saml_provider_configs(app=user_mgt_app) + + self._assert_page(page) + provider_configs = list(config for config in page.iterate_all()) + assert len(provider_configs) == 2 + + assert len(recorder) == 1 + _assert_request( + recorder[0], 'GET', f'{USER_MGT_URLS["PREFIX"]}/inboundSamlConfigs?pageSize=100') + + def test_list_multiple_pages(self, user_mgt_app): + sample_response = json.loads(SAML_PROVIDER_CONFIG_RESPONSE) + configs = _create_list_response(sample_response) + + # Page 1 + response = { + 'inboundSamlConfigs': configs[:2], + 'nextPageToken': 'token' + } + recorder = _instrument_provider_mgt(user_mgt_app, 200, json.dumps(response)) + page = auth.list_saml_provider_configs(max_results=10, app=user_mgt_app) + + self._assert_page(page, next_page_token='token') + assert len(recorder) == 1 + _assert_request( + recorder[0], 'GET', f'{USER_MGT_URLS["PREFIX"]}/inboundSamlConfigs?pageSize=10') + + # Page 2 (also the last page) + response = {'inboundSamlConfigs': configs[2:]} + recorder = _instrument_provider_mgt(user_mgt_app, 200, json.dumps(response)) + page = page.get_next_page() + + self._assert_page(page, count=1, start=2) + assert len(recorder) == 1 + _assert_request( + recorder[0], 'GET', + f'{USER_MGT_URLS["PREFIX"]}/inboundSamlConfigs?pageSize=10&pageToken=token') + + def test_paged_iteration(self, user_mgt_app): + sample_response = json.loads(SAML_PROVIDER_CONFIG_RESPONSE) + configs = _create_list_response(sample_response) + + # Page 1 + response = { + 'inboundSamlConfigs': configs[:2], + 'nextPageToken': 'token' + } + recorder = _instrument_provider_mgt(user_mgt_app, 200, json.dumps(response)) + page = auth.list_saml_provider_configs(app=user_mgt_app) + iterator = page.iterate_all() + + for index in range(2): + provider_config = next(iterator) + assert provider_config.provider_id == f'saml.provider{index}' + assert len(recorder) == 1 + _assert_request( + recorder[0], 'GET', f'{USER_MGT_URLS["PREFIX"]}/inboundSamlConfigs?pageSize=100') + + # Page 2 (also the last page) + response = {'inboundSamlConfigs': configs[2:]} + recorder = _instrument_provider_mgt(user_mgt_app, 200, json.dumps(response)) + + provider_config = next(iterator) + assert provider_config.provider_id == 'saml.provider2' + assert len(recorder) == 1 + _assert_request( + recorder[0], 'GET', + f'{USER_MGT_URLS["PREFIX"]}/inboundSamlConfigs?pageSize=100&pageToken=token') + + with pytest.raises(StopIteration): + next(iterator) + + def test_list_empty_response(self, user_mgt_app): + response = {'inboundSamlConfigs': []} + _instrument_provider_mgt(user_mgt_app, 200, json.dumps(response)) + page = auth.list_saml_provider_configs(app=user_mgt_app) + assert len(page.provider_configs) == 0 + provider_configs = list(config for config in page.iterate_all()) + assert len(provider_configs) == 0 + + def test_list_error(self, user_mgt_app): + _instrument_provider_mgt(user_mgt_app, 500, '{"error":"test"}') + with pytest.raises(exceptions.InternalError) as excinfo: + auth.list_saml_provider_configs(app=user_mgt_app) + assert str(excinfo.value) == 'Unexpected error response: {"error":"test"}' + + def _assert_provider_config(self, provider_config, want_id='saml.provider'): + assert isinstance(provider_config, auth.SAMLProviderConfig) + assert provider_config.provider_id == want_id + assert provider_config.display_name == 'samlProviderName' + assert provider_config.enabled is True + assert provider_config.idp_entity_id == 'IDP_ENTITY_ID' + assert provider_config.sso_url == 'https://example.com/login' + assert provider_config.x509_certificates == ['CERT1', 'CERT2'] + assert provider_config.rp_entity_id == 'RP_ENTITY_ID' + assert provider_config.callback_url == 'https://projectId.firebaseapp.com/__/auth/handler' + + def _assert_page(self, page, count=2, start=0, next_page_token=''): + assert isinstance(page, auth.ListProviderConfigsPage) + index = start + assert len(page.provider_configs) == count + for provider_config in page.provider_configs: + self._assert_provider_config(provider_config, want_id=f'saml.provider{index}') + index += 1 + + if next_page_token: + assert page.next_page_token == next_page_token + assert page.has_next_page is True + else: + assert page.next_page_token == '' + assert page.has_next_page is False + assert page.get_next_page() is None + + +def _create_list_response(sample_response, count=3): + configs = [] + for idx in range(count): + config = dict(sample_response) + config['name'] += str(idx) + configs.append(config) + return configs diff --git a/tests/test_credentials.py b/tests/test_credentials.py index 988b013fe..1e1db6460 100644 --- a/tests/test_credentials.py +++ b/tests/test_credentials.py @@ -16,23 +16,25 @@ import datetime import json import os +import pathlib import google.auth from google.auth import crypt +from google.auth import exceptions from google.oauth2 import credentials as gcredentials from google.oauth2 import service_account -from firebase_admin import credentials import pytest +from firebase_admin import credentials from tests import testutils def check_scopes(g_credential): - assert isinstance(g_credential, google.auth.credentials.Scoped) + assert isinstance(g_credential, google.auth.credentials.ReadOnlyScoped) assert sorted(credentials._scopes) == sorted(g_credential.scopes) -class TestCertificate(object): +class TestCertificate: invalid_certs = { 'NonExistingFile': ('non_existing.json', IOError), @@ -44,6 +46,30 @@ class TestCertificate(object): def test_init_from_file(self): credential = credentials.Certificate( testutils.resource_filename('service_account.json')) + self._verify_credential(credential) + + def test_init_from_path_like(self): + path = pathlib.Path(testutils.resource_filename('service_account.json')) + credential = credentials.Certificate(path) + self._verify_credential(credential) + + + def test_init_from_dict(self): + parsed_json = json.loads(testutils.resource('service_account.json')) + credential = credentials.Certificate(parsed_json) + self._verify_credential(credential) + + @pytest.mark.parametrize('file_name,error', invalid_certs.values(), ids=list(invalid_certs)) + def test_init_from_invalid_certificate(self, file_name, error): + with pytest.raises(error): + credentials.Certificate(testutils.resource_filename(file_name)) + + @pytest.mark.parametrize('arg', [None, 0, 1, True, False, [], tuple(), {}]) + def test_invalid_args(self, arg): + with pytest.raises(ValueError): + credentials.Certificate(arg) + + def _verify_credential(self, credential): assert credential.project_id == 'mock-project-id' assert credential.service_account_email == 'mock-email@mock-project.iam.gserviceaccount.com' assert isinstance(credential.signer, crypt.Signer) @@ -59,11 +85,6 @@ def test_init_from_file(self): assert access_token.access_token == 'mock_access_token' assert isinstance(access_token.expiry, datetime.datetime) - @pytest.mark.parametrize('file_name,error', invalid_certs.values(), ids=list(invalid_certs)) - def test_init_from_invalid_certificate(self, file_name, error): - with pytest.raises(error): - credentials.Certificate(testutils.resource_filename(file_name)) - @pytest.fixture def app_default(request): @@ -77,11 +98,12 @@ def app_default(request): del os.environ[var_name] -class TestApplicationDefault(object): +class TestApplicationDefault: @pytest.mark.parametrize('app_default', [testutils.resource_filename('service_account.json')], indirect=True) - def test_init(self, app_default): # pylint: disable=unused-argument + def test_init(self, app_default): + del app_default credential = credentials.ApplicationDefault() assert credential.project_id == 'mock-project-id' @@ -98,16 +120,60 @@ def test_init(self, app_default): # pylint: disable=unused-argument @pytest.mark.parametrize('app_default', [testutils.resource_filename('non_existing.json')], indirect=True) - def test_nonexisting_path(self, app_default): # pylint: disable=unused-argument - with pytest.raises(IOError): - credentials.ApplicationDefault() + def test_nonexisting_path(self, app_default): + del app_default + # This does not yet throw because the credentials are lazily loaded. + creds = credentials.ApplicationDefault() + + with pytest.raises(exceptions.DefaultCredentialsError): + creds.get_credential() # This now throws. -class TestRefreshToken(object): +class TestRefreshToken: def test_init_from_file(self): credential = credentials.RefreshToken( testutils.resource_filename('refresh_token.json')) + self._verify_credential(credential) + + def test_init_from_path_like(self): + path = pathlib.Path(testutils.resource_filename('refresh_token.json')) + credential = credentials.RefreshToken(path) + self._verify_credential(credential) + + def test_init_from_dict(self): + parsed_json = json.loads(testutils.resource('refresh_token.json')) + credential = credentials.RefreshToken(parsed_json) + self._verify_credential(credential) + + def test_init_from_nonexisting_file(self): + with pytest.raises(IOError): + credentials.RefreshToken( + testutils.resource_filename('non_existing.json')) + + def test_init_from_invalid_file(self): + with pytest.raises(ValueError): + credentials.RefreshToken( + testutils.resource_filename('service_account.json')) + + @pytest.mark.parametrize('arg', [None, 0, 1, True, False, [], tuple(), {}]) + def test_invalid_args(self, arg): + with pytest.raises(ValueError): + credentials.RefreshToken(arg) + + @pytest.mark.parametrize('key', ['client_id', 'client_secret', 'refresh_token']) + def test_required_field(self, key): + data = { + 'client_id': 'value', + 'client_secret': 'value', + 'refresh_token': 'value', + 'type': 'authorized_user' + } + del data[key] + with pytest.raises(ValueError): + credentials.RefreshToken(data) + + def _verify_credential(self, credential): assert credential.client_id == 'mock.apps.googleusercontent.com' assert credential.client_secret == 'mock-secret' assert credential.refresh_token == 'mock-refresh-token' @@ -125,13 +191,3 @@ def test_init_from_file(self): access_token = credential.get_access_token() assert access_token.access_token == 'mock_access_token' assert isinstance(access_token.expiry, datetime.datetime) - - def test_init_from_nonexisting_file(self): - with pytest.raises(IOError): - credentials.RefreshToken( - testutils.resource_filename('non_existing.json')) - - def test_init_from_invalid_file(self): - with pytest.raises(ValueError): - credentials.RefreshToken( - testutils.resource_filename('service_account.json')) diff --git a/tests/test_db.py b/tests/test_db.py new file mode 100644 index 000000000..abba3baa8 --- /dev/null +++ b/tests/test_db.py @@ -0,0 +1,1135 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for firebase_admin.db.""" +import collections +import json +import os +import sys +import time + +import pytest + +import firebase_admin +from firebase_admin import db +from firebase_admin import exceptions +from firebase_admin import _http_client +from firebase_admin import _sseclient +from firebase_admin import _utils +from tests import testutils + + +_EMULATOR_HOST_ENV_VAR = 'FIREBASE_DATABASE_EMULATOR_HOST' + + +class MockAdapter(testutils.MockAdapter): + """A mock HTTP adapter that mimics RTDB server behavior.""" + + ETAG = '0' + + def __init__(self, data, status, recorder, etag=ETAG): + testutils.MockAdapter.__init__(self, data, status, recorder) + self._etag = etag + + def send(self, request, **kwargs): + if_match = request.headers.get('if-match') + if_none_match = request.headers.get('if-none-match') + resp = super().send(request, **kwargs) + resp.headers = {'ETag': self._etag} + if if_match and if_match != MockAdapter.ETAG: + resp.status_code = 412 + elif if_none_match == MockAdapter.ETAG: + resp.status_code = 304 + return resp + + +class MockSSEClient: + """A mock SSE client that mimics long-lived HTTP connections.""" + + def __init__(self, events): + self.events = events + self.closed = False + + def __iter__(self): + return iter(self.events) + + def close(self): + self.closed = True + + +class _Object: + pass + + +class TestReferencePath: + """Test cases for Reference paths.""" + + # path => (fullstr, key, parent) + valid_paths = { + '/' : ('/', None, None), + '' : ('/', None, None), + '/foo' : ('/foo', 'foo', '/'), + 'foo' : ('/foo', 'foo', '/'), + '/foo/bar' : ('/foo/bar', 'bar', '/foo'), + 'foo/bar' : ('/foo/bar', 'bar', '/foo'), + '/foo/bar/' : ('/foo/bar', 'bar', '/foo'), + } + + invalid_paths = [ + None, True, False, 0, 1, {}, [], tuple(), _Object(), + 'foo#', 'foo.', 'foo$', 'foo[', 'foo]', + ] + + valid_children = { + 'foo': ('/test/foo', 'foo', '/test'), + 'foo/bar' : ('/test/foo/bar', 'bar', '/test/foo'), + 'foo/bar/' : ('/test/foo/bar', 'bar', '/test/foo'), + } + + invalid_children = [ + None, '', '/foo', '/foo/bar', True, False, 0, 1, {}, [], tuple(), + 'foo#', 'foo.', 'foo$', 'foo[', 'foo]', _Object() + ] + + @pytest.mark.parametrize('path, expected', valid_paths.items()) + def test_valid_path(self, path, expected): + ref = db.Reference(path=path) + fullstr, key, parent = expected + assert ref.path == fullstr + assert ref.key == key + if parent is None: + assert ref.parent is None + else: + assert ref.parent.path == parent + + @pytest.mark.parametrize('path', invalid_paths) + def test_invalid_key(self, path): + with pytest.raises(ValueError): + db.Reference(path=path) + + @pytest.mark.parametrize('child, expected', valid_children.items()) + def test_valid_child(self, child, expected): + fullstr, key, parent = expected + childref = db.Reference(path='/test').child(child) + assert childref.path == fullstr + assert childref.key == key + assert childref.parent.path == parent + + @pytest.mark.parametrize('child', invalid_children) + def test_invalid_child(self, child): + parent = db.Reference(path='/test') + with pytest.raises(ValueError): + parent.child(child) + + +class _RefOperations: + """A collection of operations that can be performed using a ``db.Reference``. + + This can be used to test any functionality that is common across multiple API calls. + """ + + @classmethod + def get(cls, ref): + ref.get() + + @classmethod + def push(cls, ref): + ref.push() + + @classmethod + def set(cls, ref): + ref.set({'foo': 'bar'}) + + @classmethod + def delete(cls, ref): + ref.delete() + + @classmethod + def query(cls, ref): + query = ref.order_by_key() + query.get() + + @classmethod + def get_ops(cls): + return [cls.get, cls.push, cls.set, cls.delete, cls.query] + + +class TestReference: + """Test cases for database queries via References.""" + + test_url = 'https://test.firebaseio.com' + valid_values = [ + '', 'foo', 0, 1, 100, 1.2, True, False, [], [1, 2], {}, {'foo' : 'bar'} + ] + error_codes = { + 400: exceptions.InvalidArgumentError, + 401: exceptions.UnauthenticatedError, + 404: exceptions.NotFoundError, + 500: exceptions.InternalError, + } + + @classmethod + def setup_class(cls): + firebase_admin.initialize_app(testutils.MockCredential(), {'databaseURL' : cls.test_url}) + + @classmethod + def teardown_class(cls): + testutils.cleanup_apps() + + def instrument(self, ref, payload, status=200, etag=MockAdapter.ETAG): + recorder = [] + adapter = MockAdapter(payload, status, recorder, etag) + ref._client.session.mount(self.test_url, adapter) + return recorder + + def _assert_request(self, request, expected_method, expected_url): + assert request.method == expected_method + assert request.url == expected_url + assert request.headers['Authorization'] == 'Bearer mock-token' + assert request.headers['User-Agent'] == db._USER_AGENT + expected_metrics_header = _utils.get_metrics_header() + ' mock-cred-metric-tag' + assert request.headers['x-goog-api-client'] == expected_metrics_header + + @pytest.mark.parametrize('data', valid_values) + def test_get_value(self, data): + ref = db.reference('/test') + recorder = self.instrument(ref, json.dumps(data)) + assert ref.get() == data + assert len(recorder) == 1 + self._assert_request(recorder[0], 'GET', 'https://test.firebaseio.com/test.json') + assert 'X-Firebase-ETag' not in recorder[0].headers + + @pytest.mark.parametrize('data', valid_values) + def test_get_with_etag(self, data): + ref = db.reference('/test') + recorder = self.instrument(ref, json.dumps(data)) + assert ref.get(etag=True) == (data, MockAdapter.ETAG) + assert len(recorder) == 1 + self._assert_request(recorder[0], 'GET', 'https://test.firebaseio.com/test.json') + assert recorder[0].headers['X-Firebase-ETag'] == 'true' + + @pytest.mark.parametrize('data', valid_values) + def test_get_shallow(self, data): + ref = db.reference('/test') + recorder = self.instrument(ref, json.dumps(data)) + assert ref.get(shallow=True) == data + assert len(recorder) == 1 + self._assert_request( + recorder[0], 'GET', 'https://test.firebaseio.com/test.json?shallow=true') + + def test_get_with_etag_and_shallow(self): + ref = db.reference('/test') + with pytest.raises(ValueError): + ref.get(etag=True, shallow=True) + + @pytest.mark.parametrize('data', valid_values) + def test_get_if_changed(self, data): + ref = db.reference('/test') + recorder = self.instrument(ref, json.dumps(data)) + + assert ref.get_if_changed('invalid-etag') == (True, data, MockAdapter.ETAG) + assert len(recorder) == 1 + self._assert_request(recorder[0], 'GET', 'https://test.firebaseio.com/test.json') + assert recorder[0].headers['if-none-match'] == 'invalid-etag' + + assert ref.get_if_changed(MockAdapter.ETAG) == (False, None, None) + assert len(recorder) == 2 + self._assert_request(recorder[1], 'GET', 'https://test.firebaseio.com/test.json') + assert recorder[1].headers['if-none-match'] == MockAdapter.ETAG + + @pytest.mark.parametrize('etag', [0, 1, True, False, {}, [], tuple()]) + def test_get_if_changed_invalid_etag(self, etag): + ref = db.reference('/test') + with pytest.raises(ValueError): + ref.get_if_changed(etag) + + @pytest.mark.parametrize('data', valid_values) + def test_order_by_query(self, data): + ref = db.reference('/test') + recorder = self.instrument(ref, json.dumps(data)) + query = ref.order_by_child('foo') + query_str = 'orderBy=%22foo%22' + assert query.get() == data + assert len(recorder) == 1 + self._assert_request( + recorder[0], 'GET', 'https://test.firebaseio.com/test.json?' + query_str) + + @pytest.mark.parametrize('data', valid_values) + def test_limit_query(self, data): + ref = db.reference('/test') + recorder = self.instrument(ref, json.dumps(data)) + query = ref.order_by_child('foo') + query.limit_to_first(100) + query_str = 'limitToFirst=100&orderBy=%22foo%22' + assert query.get() == data + assert len(recorder) == 1 + self._assert_request( + recorder[0], 'GET', 'https://test.firebaseio.com/test.json?' + query_str) + + @pytest.mark.parametrize('data', valid_values) + def test_range_query(self, data): + ref = db.reference('/test') + recorder = self.instrument(ref, json.dumps(data)) + query = ref.order_by_child('foo') + query.start_at(100) + query.end_at(200) + query_str = 'endAt=200&orderBy=%22foo%22&startAt=100' + assert query.get() == data + assert len(recorder) == 1 + self._assert_request( + recorder[0], 'GET', 'https://test.firebaseio.com/test.json?' + query_str) + + @pytest.mark.parametrize('data', valid_values) + def test_set_value(self, data): + ref = db.reference('/test') + recorder = self.instrument(ref, '') + ref.set(data) + assert len(recorder) == 1 + self._assert_request( + recorder[0], 'PUT', 'https://test.firebaseio.com/test.json?print=silent') + assert json.loads(recorder[0].body.decode()) == data + + def test_set_none_value(self): + ref = db.reference('/test') + self.instrument(ref, '') + with pytest.raises(ValueError): + ref.set(None) + + @pytest.mark.parametrize('value', [ + _Object(), {'foo': _Object()}, [_Object()] + ]) + def test_set_non_json_value(self, value): + ref = db.reference('/test') + self.instrument(ref, '') + with pytest.raises(TypeError): + ref.set(value) + + @pytest.mark.parametrize('data', [{'foo': 'bar'}, {'foo': None}]) + def test_update_children(self, data): + ref = db.reference('/test') + recorder = self.instrument(ref, json.dumps(data)) + ref.update(data) + assert len(recorder) == 1 + self._assert_request( + recorder[0], 'PATCH', 'https://test.firebaseio.com/test.json?print=silent') + assert json.loads(recorder[0].body.decode()) == data + + @pytest.mark.parametrize('data', valid_values) + def test_set_if_unchanged_success(self, data): + ref = db.reference('/test') + recorder = self.instrument(ref, json.dumps(data)) + vals = ref.set_if_unchanged(MockAdapter.ETAG, data) + assert vals == (True, data, MockAdapter.ETAG) + assert len(recorder) == 1 + self._assert_request(recorder[0], 'PUT', 'https://test.firebaseio.com/test.json') + assert json.loads(recorder[0].body.decode()) == data + assert recorder[0].headers['if-match'] == MockAdapter.ETAG + + @pytest.mark.parametrize('data', valid_values) + def test_set_if_unchanged_failure(self, data): + ref = db.reference('/test') + recorder = self.instrument(ref, json.dumps({'foo':'bar'})) + vals = ref.set_if_unchanged('invalid-etag', data) + assert vals == (False, {'foo':'bar'}, MockAdapter.ETAG) + assert len(recorder) == 1 + self._assert_request(recorder[0], 'PUT', 'https://test.firebaseio.com/test.json') + assert json.loads(recorder[0].body.decode()) == data + assert recorder[0].headers['if-match'] == 'invalid-etag' + + @pytest.mark.parametrize('etag', [0, 1, True, False, {}, [], tuple()]) + def test_set_if_unchanged_invalid_etag(self, etag): + ref = db.reference('/test') + with pytest.raises(ValueError): + ref.set_if_unchanged(etag, 'value') + + def test_set_if_unchanged_none_value(self): + ref = db.reference('/test') + self.instrument(ref, '') + with pytest.raises(ValueError): + ref.set_if_unchanged(MockAdapter.ETAG, None) + + @pytest.mark.parametrize('value', [ + _Object(), {'foo': _Object()}, [_Object()] + ]) + def test_set_if_unchanged_non_json_value(self, value): + ref = db.reference('/test') + self.instrument(ref, '') + with pytest.raises(TypeError): + ref.set_if_unchanged(MockAdapter.ETAG, value) + + @pytest.mark.parametrize('update', [ + None, {}, {None:'foo'}, '', 'foo', 0, 1, [], tuple(), _Object() + ]) + def test_set_invalid_update(self, update): + ref = db.reference('/test') + recorder = self.instrument(ref, '') + with pytest.raises(ValueError): + ref.update(update) + assert len(recorder) == 0 + + @pytest.mark.parametrize('data', valid_values) + def test_push(self, data): + ref = db.reference('/test') + recorder = self.instrument(ref, json.dumps({'name' : 'testkey'})) + child = ref.push(data) + assert isinstance(child, db.Reference) + assert child.key == 'testkey' + assert len(recorder) == 1 + self._assert_request(recorder[0], 'POST', 'https://test.firebaseio.com/test.json') + assert json.loads(recorder[0].body.decode()) == data + + def test_push_default(self): + ref = db.reference('/test') + recorder = self.instrument(ref, json.dumps({'name' : 'testkey'})) + assert ref.push().key == 'testkey' + assert len(recorder) == 1 + self._assert_request(recorder[0], 'POST', 'https://test.firebaseio.com/test.json') + assert json.loads(recorder[0].body.decode()) == '' + + def test_push_none_value(self): + ref = db.reference('/test') + self.instrument(ref, '') + with pytest.raises(ValueError): + ref.push(None) + + def test_delete(self): + ref = db.reference('/test') + recorder = self.instrument(ref, '') + ref.delete() + assert len(recorder) == 1 + self._assert_request(recorder[0], 'DELETE', 'https://test.firebaseio.com/test.json') + + def test_transaction(self): + ref = db.reference('/test') + data = {'foo1': 'bar1'} + recorder = self.instrument(ref, json.dumps(data)) + + def transaction_update(data): + data['foo2'] = 'bar2' + return data + + new_value = ref.transaction(transaction_update) + assert new_value == {'foo1' : 'bar1', 'foo2' : 'bar2'} + assert len(recorder) == 2 + self._assert_request(recorder[0], 'GET', 'https://test.firebaseio.com/test.json') + self._assert_request(recorder[1], 'PUT', 'https://test.firebaseio.com/test.json') + assert json.loads(recorder[1].body.decode()) == {'foo1': 'bar1', 'foo2': 'bar2'} + + def test_transaction_scalar(self): + ref = db.reference('/test/count') + data = 42 + recorder = self.instrument(ref, json.dumps(data)) + + new_value = ref.transaction(lambda x: x + 1 if x else 1) + assert new_value == 43 + assert len(recorder) == 2 + self._assert_request(recorder[0], 'GET', 'https://test.firebaseio.com/test/count.json') + self._assert_request(recorder[1], 'PUT', 'https://test.firebaseio.com/test/count.json') + assert json.loads(recorder[1].body.decode()) == 43 + + def test_transaction_error(self): + ref = db.reference('/test') + data = {'foo1': 'bar1'} + recorder = self.instrument(ref, json.dumps(data)) + + def transaction_update(data): + del data + raise ValueError('test error') + + with pytest.raises(ValueError) as excinfo: + ref.transaction(transaction_update) + assert str(excinfo.value) == 'test error' + assert len(recorder) == 1 + self._assert_request(recorder[0], 'GET', 'https://test.firebaseio.com/test.json') + + def test_transaction_abort(self): + ref = db.reference('/test/count') + data = 42 + recorder = self.instrument(ref, json.dumps(data), etag='1') + + with pytest.raises(db.TransactionAbortedError) as excinfo: + ref.transaction(lambda x: x + 1 if x else 1) + assert isinstance(excinfo.value, exceptions.AbortedError) + assert str(excinfo.value) == 'Transaction aborted after failed retries.' + assert excinfo.value.cause is None + assert excinfo.value.http_response is None + assert len(recorder) == 1 + 25 + + @pytest.mark.parametrize('func', [None, 0, 1, True, False, 'foo', {}, [], tuple()]) + def test_transaction_invalid_function(self, func): + ref = db.reference('/test') + with pytest.raises(ValueError): + ref.transaction(func) + + def test_get_root_reference(self): + ref = db.reference() + assert ref.key is None + assert ref.path == '/' + + @pytest.mark.parametrize('path, expected', TestReferencePath.valid_paths.items()) + def test_get_reference(self, path, expected): + ref = db.reference(path) + fullstr, key, parent = expected + assert ref.path == fullstr + assert ref.key == key + if parent is None: + assert ref.parent is None + else: + assert ref.parent.path == parent + + @pytest.mark.parametrize('error_code', error_codes.keys()) + @pytest.mark.parametrize('func', _RefOperations.get_ops()) + def test_server_error(self, error_code, func): + ref = db.reference('/test') + self.instrument(ref, json.dumps({'error' : 'json error message'}), error_code) + exc_type = self.error_codes[error_code] + with pytest.raises(exc_type) as excinfo: + func(ref) + assert str(excinfo.value) == 'json error message' + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + + @pytest.mark.parametrize('error_code', error_codes.keys()) + @pytest.mark.parametrize('func', _RefOperations.get_ops()) + def test_other_error(self, error_code, func): + ref = db.reference('/test') + self.instrument(ref, 'custom error message', error_code) + exc_type = self.error_codes[error_code] + with pytest.raises(exc_type) as excinfo: + func(ref) + assert str(excinfo.value) == 'Unexpected response from database: custom error message' + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + + +class TestListenerRegistration: + """Test cases for receiving events via ListenerRegistrations.""" + + def test_listen_error(self): + test_url = 'https://test.firebaseio.com' + firebase_admin.initialize_app(testutils.MockCredential(), { + 'databaseURL' : test_url, + }) + try: + ref = db.reference() + adapter = MockAdapter(json.dumps({'error' : 'json error message'}), 500, []) + session = ref._client.session + session.mount(test_url, adapter) + def callback(_): + pass + with pytest.raises(exceptions.InternalError) as excinfo: + ref._listen_with_session(callback, session) + assert str(excinfo.value) == 'json error message' + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + finally: + testutils.cleanup_apps() + + @pytest.mark.parametrize( + 'url,emulator_host,expected_base_url,expected_namespace', + [ + # Production URLs with no override: + ('https://test.firebaseio.com', None, 'https://test.firebaseio.com/.json', None), + ('https://test.firebaseio.com/', None, 'https://test.firebaseio.com/.json', None), + + # Production URLs with emulator_host override: + ('https://test.firebaseio.com', 'localhost:9000', 'http://localhost:9000/.json', + 'test'), + ('https://test.firebaseio.com/', 'localhost:9000', 'http://localhost:9000/.json', + 'test'), + + # Emulator URL with no override. + ('http://localhost:8000/?ns=test', None, 'http://localhost:8000/.json', 'test'), + + # emulator_host is ignored when the original URL is already emulator. + ('http://localhost:8000/?ns=test', 'localhost:9999', 'http://localhost:8000/.json', + 'test'), + ] + ) + def test_listen_sse_client(self, url, emulator_host, expected_base_url, expected_namespace, + mocker): + if emulator_host: + os.environ[_EMULATOR_HOST_ENV_VAR] = emulator_host + + try: + firebase_admin.initialize_app(testutils.MockCredential(), {'databaseURL' : url}) + ref = db.reference() + mock_sse_client = mocker.patch('firebase_admin._sseclient.SSEClient') + mock_callback = mocker.Mock() + ref.listen(mock_callback) + args, kwargs = mock_sse_client.call_args + assert args[0] == expected_base_url + if expected_namespace: + assert kwargs.get('params') == {'ns': expected_namespace} + else: + assert kwargs.get('params') == {} + finally: + if _EMULATOR_HOST_ENV_VAR in os.environ: + del os.environ[_EMULATOR_HOST_ENV_VAR] + testutils.cleanup_apps() + + def test_listener_session(self): + firebase_admin.initialize_app(testutils.MockCredential(), { + 'databaseURL' : 'https://test.firebaseio.com', + }) + try: + ref = db.reference() + session = ref._client.create_listener_session() + assert isinstance(session, _sseclient.KeepAuthSession) + finally: + testutils.cleanup_apps() + + def test_single_event(self): + self.events = [] + def callback(event): + self.events.append(event) + sse = MockSSEClient([ + _sseclient.Event.parse('event: put\ndata: {"path":"/","data":"testevent"}\n\n') + ]) + registration = db.ListenerRegistration(callback, sse) + self.wait_for(self.events) + registration.close() + assert sse.closed + assert len(self.events) == 1 + event = self.events[0] + assert event.event_type == 'put' + assert event.path == '/' + assert event.data == 'testevent' + + def test_multiple_events(self): + self.events = [] + def callback(event): + self.events.append(event) + sse = MockSSEClient([ + _sseclient.Event.parse('event: put\ndata: {"path":"/foo","data":"testevent1"}\n\n'), + _sseclient.Event.parse('event: put\ndata: {"path":"/bar","data":{"a": 1}}\n\n'), + ]) + registration = db.ListenerRegistration(callback, sse) + self.wait_for(self.events, count=2) + registration.close() + assert sse.closed + assert len(self.events) == 2 + event = self.events[0] + assert event.event_type == 'put' + assert event.path == '/foo' + assert event.data == 'testevent1' + event = self.events[1] + assert event.event_type == 'put' + assert event.path == '/bar' + assert event.data == {'a': 1} + + @classmethod + def wait_for(cls, events, count=1, timeout_seconds=5): + must_end = time.time() + timeout_seconds + while time.time() < must_end: + if len(events) >= count: + return + raise pytest.fail('Timed out while waiting for events') + + +class TestReferenceWithAuthOverride: + """Test cases for database queries via References.""" + + test_url = 'https://test.firebaseio.com' + encoded_override = '%7B%22uid%22:%22user1%22%7D' + + @classmethod + def setup_class(cls): + firebase_admin.initialize_app(testutils.MockCredential(), { + 'databaseURL' : cls.test_url, + 'databaseAuthVariableOverride' : {'uid':'user1'} + }) + + @classmethod + def teardown_class(cls): + testutils.cleanup_apps() + + def instrument(self, ref, payload, status=200): + recorder = [] + adapter = MockAdapter(payload, status, recorder) + ref._client.session.mount(self.test_url, adapter) + return recorder + + def _assert_request(self, request, expected_method, expected_url): + assert request.method == expected_method + assert request.url == expected_url + assert request.headers['Authorization'] == 'Bearer mock-token' + assert request.headers['User-Agent'] == db._USER_AGENT + expected_metrics_header = _utils.get_metrics_header() + ' mock-cred-metric-tag' + assert request.headers['x-goog-api-client'] == expected_metrics_header + + def test_get_value(self): + ref = db.reference('/test') + recorder = self.instrument(ref, json.dumps('data')) + query_str = f'auth_variable_override={self.encoded_override}' + assert ref.get() == 'data' + assert len(recorder) == 1 + self._assert_request( + recorder[0], 'GET', 'https://test.firebaseio.com/test.json?' + query_str) + + def test_set_value(self): + ref = db.reference('/test') + recorder = self.instrument(ref, '') + data = {'foo' : 'bar'} + ref.set(data) + query_str = f'print=silent&auth_variable_override={self.encoded_override}' + assert len(recorder) == 1 + self._assert_request( + recorder[0], 'PUT', 'https://test.firebaseio.com/test.json?' + query_str) + assert json.loads(recorder[0].body.decode()) == data + + def test_order_by_query(self): + ref = db.reference('/test') + recorder = self.instrument(ref, json.dumps('data')) + query = ref.order_by_child('foo') + query_str = f'orderBy=%22foo%22&auth_variable_override={self.encoded_override}' + assert query.get() == 'data' + assert len(recorder) == 1 + self._assert_request( + recorder[0], 'GET', 'https://test.firebaseio.com/test.json?' + query_str) + + def test_range_query(self): + ref = db.reference('/test') + recorder = self.instrument(ref, json.dumps('data')) + query = ref.order_by_child('foo').start_at(1).end_at(10) + query_str = ( + f'endAt=10&orderBy=%22foo%22&startAt=1&auth_variable_override={self.encoded_override}' + ) + assert query.get() == 'data' + assert len(recorder) == 1 + self._assert_request( + recorder[0], 'GET', 'https://test.firebaseio.com/test.json?' + query_str) + + +class TestDatabaseInitialization: + """Test cases for database initialization.""" + + def teardown_method(self): + testutils.cleanup_apps() + + def test_no_app(self): + with pytest.raises(ValueError): + db.reference() + + def test_no_db_url(self): + firebase_admin.initialize_app(testutils.MockCredential()) + with pytest.raises(ValueError): + db.reference() + + @pytest.mark.parametrize( + 'url,emulator_host,expected_base_url,expected_namespace', + [ + # Production URLs with no override: + ('https://test.firebaseio.com', None, 'https://test.firebaseio.com', None), + ('https://test.firebaseio.com/', None, 'https://test.firebaseio.com', None), + + # Production URLs with emulator_host override: + ('https://test.firebaseio.com', 'localhost:9000', 'http://localhost:9000', 'test'), + ('https://test.firebaseio.com/', 'localhost:9000', 'http://localhost:9000', 'test'), + + # Emulator URL with no override. + ('http://localhost:8000/?ns=test', None, 'http://localhost:8000', 'test'), + + # emulator_host is ignored when the original URL is already emulator. + ('http://localhost:8000/?ns=test', 'localhost:9999', 'http://localhost:8000', 'test'), + ] + ) + def test_parse_db_url(self, url, emulator_host, expected_base_url, expected_namespace): + if emulator_host: + os.environ[_EMULATOR_HOST_ENV_VAR] = emulator_host + + try: + firebase_admin.initialize_app(testutils.MockCredential(), {'databaseURL' : url}) + ref = db.reference() + assert ref._client._base_url == expected_base_url + assert ref._client.params.get('ns') == expected_namespace + if expected_base_url.startswith('http://localhost'): + assert isinstance(ref._client.credential, _utils.EmulatorAdminCredentials) + else: + assert isinstance(ref._client.credential, testutils.MockGoogleCredential) + finally: + if _EMULATOR_HOST_ENV_VAR in os.environ: + del os.environ[_EMULATOR_HOST_ENV_VAR] + + @pytest.mark.parametrize('url', [ + '', + None, + 42, + 'test.firebaseio.com', # Not a URL. + 'http://test.firebaseio.com', # Use of non-HTTPs in production URLs. + 'ftp://test.firebaseio.com', # Use of non-HTTPs in production URLs. + 'http://localhost:9000/', # No ns specified. + 'http://localhost:9000/?ns=', # No ns specified. + 'http://localhost:9000/?ns=test1&ns=test2', # Two ns parameters specified. + 'ftp://localhost:9000/?ns=test', # Neither HTTP or HTTPS. + ]) + def test_parse_db_url_errors(self, url): + firebase_admin.initialize_app(testutils.MockCredential(), {'databaseURL' : url}) + with pytest.raises(ValueError): + db.reference() + + @pytest.mark.parametrize('url', [ + 'https://test.firebaseio.com', 'https://test.firebaseio.com/', + 'https://test.eu-west1.firebasdatabase.app', 'https://test.eu-west1.firebasdatabase.app/' + ]) + def test_valid_db_url(self, url): + firebase_admin.initialize_app(testutils.MockCredential(), {'databaseURL' : url}) + ref = db.reference() + expected_url = url + if url.endswith('/'): + expected_url = url[:-1] + assert ref._client.base_url == expected_url + assert 'auth_variable_override' not in ref._client.params + assert 'ns' not in ref._client.params + + @pytest.mark.parametrize('url', [ + None, '', 'foo', 'http://test.firebaseio.com', 'http://test.firebasedatabase.app', + True, False, 1, 0, {}, [], tuple(), _Object() + ]) + def test_invalid_db_url(self, url): + firebase_admin.initialize_app(testutils.MockCredential(), {'databaseURL' : url}) + with pytest.raises(ValueError): + db.reference() + other_app = firebase_admin.initialize_app(testutils.MockCredential(), name='otherApp') + with pytest.raises(ValueError): + db.reference(app=other_app, url=url) + + def test_multi_db_support(self): + default_url = 'https://test.firebaseio.com' + firebase_admin.initialize_app(testutils.MockCredential(), { + 'databaseURL' : default_url, + }) + ref = db.reference() + assert ref._client.base_url == default_url + assert 'auth_variable_override' not in ref._client.params + assert ref._client is db.reference()._client + assert ref._client is db.reference(url=default_url)._client + + other_url = 'https://other.firebaseio.com' + other_ref = db.reference(url=other_url) + assert other_ref._client.base_url == other_url + assert 'auth_variable_override' not in ref._client.params + assert other_ref._client is db.reference(url=other_url)._client + assert other_ref._client is db.reference(url=other_url + '/')._client + + @pytest.mark.parametrize('override', [{}, {'uid':'user1'}, None]) + def test_valid_auth_override(self, override): + firebase_admin.initialize_app(testutils.MockCredential(), { + 'databaseURL' : 'https://test.firebaseio.com', + 'databaseAuthVariableOverride': override + }) + default_ref = db.reference() + other_ref = db.reference(url='https://other.firebaseio.com') + for ref in [default_ref, other_ref]: + if override == {}: + assert 'auth_variable_override' not in ref._client.params + else: + encoded = json.dumps(override, separators=(',', ':')) + assert ref._client.params['auth_variable_override'] == encoded + + @pytest.mark.parametrize('override', [ + '', 'foo', 0, 1, True, False, [], tuple(), _Object()]) + def test_invalid_auth_override(self, override): + firebase_admin.initialize_app(testutils.MockCredential(), { + 'databaseURL' : 'https://test.firebaseio.com', + 'databaseAuthVariableOverride': override + }) + with pytest.raises(ValueError): + db.reference() + other_app = firebase_admin.initialize_app(testutils.MockCredential(), { + 'databaseAuthVariableOverride': override + }, name='otherApp') + with pytest.raises(ValueError): + db.reference(app=other_app, url='https://other.firebaseio.com') + + @pytest.mark.parametrize('options, timeout', [ + ({'httpTimeout': 4}, 4), + ({'httpTimeout': None}, None), + ({}, _http_client.DEFAULT_TIMEOUT_SECONDS), + ]) + def test_http_timeout(self, options, timeout): + test_url = 'https://test.firebaseio.com' + all_options = { + 'databaseURL' : test_url, + } + all_options.update(options) + firebase_admin.initialize_app(testutils.MockCredential(), all_options) + default_ref = db.reference() + other_ref = db.reference(url='https://other.firebaseio.com') + for ref in [default_ref, other_ref]: + self._check_timeout(ref, timeout) + + def test_app_delete(self): + app = firebase_admin.initialize_app( + testutils.MockCredential(), {'databaseURL' : 'https://test.firebaseio.com'}) + ref = db.reference() + other_ref = db.reference(url='https://other.firebaseio.com') + assert ref._client.session is not None + assert other_ref._client.session is not None + firebase_admin.delete_app(app) + with pytest.raises(ValueError): + db.reference() + with pytest.raises(ValueError): + db.reference(url='https://other.firebaseio.com') + assert ref._client.session is None + assert other_ref._client.session is None + + def test_user_agent_format(self): + expected = ( + f'Firebase/HTTP/{firebase_admin.__version__}/{sys.version_info.major}.' + f'{sys.version_info.minor}/AdminPython' + ) + assert db._USER_AGENT == expected + + def _check_timeout(self, ref, timeout): + assert ref._client.timeout == timeout + recorder = [] + adapter = MockAdapter('{}', 200, recorder) + ref._client.session.mount(ref._client.base_url, adapter) + assert ref.get() == {} + assert len(recorder) == 1 + if timeout is None: + assert recorder[0]._extra_kwargs['timeout'] is None + else: + assert recorder[0]._extra_kwargs['timeout'] == pytest.approx(timeout, 0.001) + + +@pytest.fixture(params=['foo', '$key', '$value']) +def initquery(request): + ref = db.Reference(path='foo') + if request.param == '$key': + return ref.order_by_key(), request.param + if request.param == '$value': + return ref.order_by_value(), request.param + + return ref.order_by_child(request.param), request.param + + +class TestQuery: + """Test cases for db.Query class.""" + + valid_paths = { + 'foo' : 'foo', + 'foo/bar' : 'foo/bar', + 'foo/bar/' : 'foo/bar' + } + + ref = db.Reference(path='foo') + + @pytest.mark.parametrize('path', [ + '', None, '/', '/foo', 0, 1, True, False, {}, [], tuple(), _Object(), + '$foo', '.foo', '#foo', '[foo', 'foo]', '$key', '$value', '$priority' + ]) + def test_invalid_path(self, path): + with pytest.raises(ValueError): + self.ref.order_by_child(path) + + @pytest.mark.parametrize('path, expected', valid_paths.items()) + def test_order_by_valid_path(self, path, expected): + query = self.ref.order_by_child(path) + assert query._querystr == f'orderBy="{expected}"' + + @pytest.mark.parametrize('path, expected', valid_paths.items()) + def test_filter_by_valid_path(self, path, expected): + query = self.ref.order_by_child(path) + query.equal_to(10) + assert query._querystr == f'equalTo=10&orderBy="{expected}"' + + def test_order_by_key(self): + query = self.ref.order_by_key() + assert query._querystr == 'orderBy="$key"' + + def test_key_filter(self): + query = self.ref.order_by_key() + query.equal_to(10) + assert query._querystr == 'equalTo=10&orderBy="$key"' + + def test_order_by_value(self): + query = self.ref.order_by_value() + assert query._querystr == 'orderBy="$value"' + + def test_value_filter(self): + query = self.ref.order_by_value() + query.equal_to(10) + assert query._querystr == 'equalTo=10&orderBy="$value"' + + def test_multiple_limits(self): + query = self.ref.order_by_child('foo') + query.limit_to_first(1) + with pytest.raises(ValueError): + query.limit_to_last(2) + + query = self.ref.order_by_child('foo') + query.limit_to_last(2) + with pytest.raises(ValueError): + query.limit_to_first(1) + + @pytest.mark.parametrize('limit', [None, -1, 'foo', 1.2, [], {}, tuple(), _Object()]) + def test_invalid_limit(self, limit): + query = self.ref.order_by_child('foo') + with pytest.raises(ValueError): + query.limit_to_first(limit) + with pytest.raises(ValueError): + query.limit_to_last(limit) + + def test_start_at_none(self): + query = self.ref.order_by_child('foo') + with pytest.raises(ValueError): + query.start_at(None) + + @pytest.mark.parametrize('arg', ['', 'foo', True, False, 0, 1, {}]) + def test_valid_start_at(self, arg): + query = self.ref.order_by_child('foo').start_at(arg) + assert query._querystr == f'orderBy="foo"&startAt={json.dumps(arg)}' + + def test_end_at_none(self): + query = self.ref.order_by_child('foo') + with pytest.raises(ValueError): + query.end_at(None) + + @pytest.mark.parametrize('arg', ['', 'foo', True, False, 0, 1, {}]) + def test_valid_end_at(self, arg): + query = self.ref.order_by_child('foo').end_at(arg) + assert query._querystr == f'endAt={json.dumps(arg)}&orderBy="foo"' + + def test_equal_to_none(self): + query = self.ref.order_by_child('foo') + with pytest.raises(ValueError): + query.equal_to(None) + + @pytest.mark.parametrize('arg', ['', 'foo', True, False, 0, 1, {}]) + def test_valid_equal_to(self, arg): + query = self.ref.order_by_child('foo').equal_to(arg) + assert query._querystr == f'equalTo={json.dumps(arg)}&orderBy="foo"' + + def test_range_query(self, initquery): + query, order_by = initquery + query.start_at(1) + query.equal_to(2) + query.end_at(3) + assert query._querystr == f'endAt=3&equalTo=2&orderBy="{order_by}"&startAt=1' + + def test_limit_first_query(self, initquery): + query, order_by = initquery + query.limit_to_first(1) + assert query._querystr == f'limitToFirst=1&orderBy="{order_by}"' + + def test_limit_last_query(self, initquery): + query, order_by = initquery + query.limit_to_last(1) + assert query._querystr == f'limitToLast=1&orderBy="{order_by}"' + + def test_all_in(self, initquery): + query, order_by = initquery + query.start_at(1) + query.equal_to(2) + query.end_at(3) + query.limit_to_first(10) + expected = f'endAt=3&equalTo=2&limitToFirst=10&orderBy="{order_by}"&startAt=1' + assert query._querystr == expected + + def test_invalid_query_args(self): + ref = db.Reference(path='foo') + with pytest.raises(ValueError): + db.Query(order_by='$key', client=ref._client, pathurl=ref._add_suffix(), foo='bar') + + +class TestSorter: + """Test cases for db._Sorter class.""" + + value_test_cases = [ + ({'k1' : 1, 'k2' : 2, 'k3' : 3}, ['k1', 'k2', 'k3']), + ({'k1' : 3, 'k2' : 2, 'k3' : 1}, ['k3', 'k2', 'k1']), + ({'k1' : 3, 'k2' : 1, 'k3' : 2}, ['k2', 'k3', 'k1']), + ({'k1' : 3, 'k2' : 1, 'k3' : 1}, ['k2', 'k3', 'k1']), + ({'k1' : 1, 'k2' : 2, 'k3' : 1}, ['k1', 'k3', 'k2']), + ({'k1' : 2, 'k2' : 2, 'k3' : 1}, ['k3', 'k1', 'k2']), + ({'k1' : 'foo', 'k2' : 'bar', 'k3' : 'baz'}, ['k2', 'k3', 'k1']), + ({'k1' : 'foo', 'k2' : 'bar', 'k3' : 10}, ['k3', 'k2', 'k1']), + ({'k1' : 'foo', 'k2' : 'bar', 'k3' : None}, ['k3', 'k2', 'k1']), + ({'k1' : 5, 'k2' : 'bar', 'k3' : None}, ['k3', 'k1', 'k2']), + ({'k1' : False, 'k2' : 'bar', 'k3' : None}, ['k3', 'k1', 'k2']), + ({'k1' : False, 'k2' : 1, 'k3' : None}, ['k3', 'k1', 'k2']), + ({'k1' : True, 'k2' : 0, 'k3' : None, 'k4' : 'foo'}, ['k3', 'k1', 'k2', 'k4']), + ({'k1' : True, 'k2' : 0, 'k3' : None, 'k4' : 'foo', 'k5' : False, 'k6' : {}}, + ['k3', 'k5', 'k1', 'k2', 'k4', 'k6']), + ({'k1' : True, 'k2' : 0, 'k3' : 'foo', 'k4' : 'foo', 'k5' : False, 'k6' : {}}, + ['k5', 'k1', 'k2', 'k3', 'k4', 'k6']), + ] + + list_test_cases = [ + ([], []), + ([1, 2, 3], [1, 2, 3]), + ([3, 2, 1], [1, 2, 3]), + ([1, 3, 2], [1, 2, 3]), + ([1, 3, 3], [1, 3, 3]), + ([2, 3, 2], [2, 2, 3]), + (['foo', 'bar', 'baz'], ['bar', 'baz', 'foo']), + (['foo', 1, False, None, 0, True], [None, False, True, 0, 1, 'foo']), + ] + + @pytest.mark.parametrize('result, expected', value_test_cases) + def test_order_by_value(self, result, expected): + ordered = db._Sorter(result, '$value').get() + assert isinstance(ordered, collections.OrderedDict) + assert list(ordered.keys()) == expected + + @pytest.mark.parametrize('result, expected', list_test_cases) + def test_order_by_value_with_list(self, result, expected): + ordered = db._Sorter(result, '$value').get() + assert isinstance(ordered, list) + assert ordered == expected + + @pytest.mark.parametrize('value', [None, False, True, 0, 1, 'foo']) + def test_invalid_sort(self, value): + with pytest.raises(ValueError): + db._Sorter(value, '$value') + + @pytest.mark.parametrize('result, expected', [ + ({'k1' : 1, 'k2' : 2, 'k3' : 3}, ['k1', 'k2', 'k3']), + ({'k3' : 3, 'k2' : 2, 'k1' : 1}, ['k1', 'k2', 'k3']), + ({'k1' : 3, 'k3' : 1, 'k2' : 2}, ['k1', 'k2', 'k3']), + ]) + def test_order_by_key(self, result, expected): + ordered = db._Sorter(result, '$key').get() + assert isinstance(ordered, collections.OrderedDict) + assert list(ordered.keys()) == expected + + @pytest.mark.parametrize('result, expected', value_test_cases) + def test_order_by_child(self, result, expected): + nested = {} + for key, val in result.items(): + nested[key] = {'child' : val} + ordered = db._Sorter(nested, 'child').get() + assert isinstance(ordered, collections.OrderedDict) + assert list(ordered.keys()) == expected + + @pytest.mark.parametrize('result, expected', value_test_cases) + def test_order_by_grand_child(self, result, expected): + nested = {} + for key, val in result.items(): + nested[key] = {'child' : {'grandchild' : val}} + ordered = db._Sorter(nested, 'child/grandchild').get() + assert isinstance(ordered, collections.OrderedDict) + assert list(ordered.keys()) == expected + + @pytest.mark.parametrize('result, expected', [ + ({'k1': {'child': 1}, 'k2': {}}, ['k2', 'k1']), + ({'k1': {'child': 1}, 'k2': {'child': 0}}, ['k2', 'k1']), + ({'k1': {'child': 1}, 'k2': {'child': {}}, 'k3': {}}, ['k3', 'k1', 'k2']), + ]) + def test_child_path_resolution(self, result, expected): + ordered = db._Sorter(result, 'child').get() + assert isinstance(ordered, collections.OrderedDict) + assert list(ordered.keys()) == expected diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py new file mode 100644 index 000000000..fa1276feb --- /dev/null +++ b/tests/test_exceptions.py @@ -0,0 +1,175 @@ +# Copyright 2019 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io +import json + +import requests +from requests import models + +from firebase_admin import exceptions +from firebase_admin import _utils + + +_NOT_FOUND_ERROR_DICT = { + 'status': 'NOT_FOUND', + 'message': 'test error' +} + + +_NOT_FOUND_PAYLOAD = json.dumps({ + 'error': _NOT_FOUND_ERROR_DICT, +}) + + +class TestRequests: + + def test_timeout_error(self): + error = requests.exceptions.Timeout('Test error') + firebase_error = _utils.handle_requests_error(error) + assert isinstance(firebase_error, exceptions.DeadlineExceededError) + assert str(firebase_error) == 'Timed out while making an API call: Test error' + assert firebase_error.cause is error + assert firebase_error.http_response is None + + def test_requests_connection_error(self): + error = requests.exceptions.ConnectionError('Test error') + firebase_error = _utils.handle_requests_error(error) + assert isinstance(firebase_error, exceptions.UnavailableError) + assert str(firebase_error) == 'Failed to establish a connection: Test error' + assert firebase_error.cause is error + assert firebase_error.http_response is None + + def test_unknown_transport_error(self): + error = requests.exceptions.RequestException('Test error') + firebase_error = _utils.handle_requests_error(error) + assert isinstance(firebase_error, exceptions.UnknownError) + assert str(firebase_error) == 'Unknown error while making a remote service call: Test error' + assert firebase_error.cause is error + assert firebase_error.http_response is None + + def test_http_response(self): + resp, error = self._create_response() + firebase_error = _utils.handle_requests_error(error) + assert isinstance(firebase_error, exceptions.InternalError) + assert str(firebase_error) == 'Test error' + assert firebase_error.cause is error + assert firebase_error.http_response is resp + + def test_http_response_with_unknown_status(self): + resp, error = self._create_response(status=501) + firebase_error = _utils.handle_requests_error(error) + assert isinstance(firebase_error, exceptions.UnknownError) + assert str(firebase_error) == 'Test error' + assert firebase_error.cause is error + assert firebase_error.http_response is resp + + def test_http_response_with_message(self): + resp, error = self._create_response() + firebase_error = _utils.handle_requests_error(error, message='Explicit error message') + assert isinstance(firebase_error, exceptions.InternalError) + assert str(firebase_error) == 'Explicit error message' + assert firebase_error.cause is error + assert firebase_error.http_response is resp + + def test_http_response_with_code(self): + resp, error = self._create_response() + firebase_error = _utils.handle_requests_error(error, code=exceptions.UNAVAILABLE) + assert isinstance(firebase_error, exceptions.UnavailableError) + assert str(firebase_error) == 'Test error' + assert firebase_error.cause is error + assert firebase_error.http_response is resp + + def test_http_response_with_message_and_code(self): + resp, error = self._create_response() + firebase_error = _utils.handle_requests_error( + error, message='Explicit error message', code=exceptions.UNAVAILABLE) + assert isinstance(firebase_error, exceptions.UnavailableError) + assert str(firebase_error) == 'Explicit error message' + assert firebase_error.cause is error + assert firebase_error.http_response is resp + + def test_handle_platform_error(self): + resp, error = self._create_response(payload=_NOT_FOUND_PAYLOAD) + firebase_error = _utils.handle_platform_error_from_requests(error) + assert isinstance(firebase_error, exceptions.NotFoundError) + assert str(firebase_error) == 'test error' + assert firebase_error.cause is error + assert firebase_error.http_response is resp + + def test_handle_platform_error_with_no_response(self): + error = requests.exceptions.RequestException('Test error') + firebase_error = _utils.handle_platform_error_from_requests(error) + assert isinstance(firebase_error, exceptions.UnknownError) + assert str(firebase_error) == 'Unknown error while making a remote service call: Test error' + assert firebase_error.cause is error + assert firebase_error.http_response is None + + def test_handle_platform_error_with_no_error_code(self): + resp, error = self._create_response(payload='no error code') + firebase_error = _utils.handle_platform_error_from_requests(error) + assert isinstance(firebase_error, exceptions.InternalError) + message = 'Unexpected HTTP response with status: 500; body: no error code' + assert str(firebase_error) == message + assert firebase_error.cause is error + assert firebase_error.http_response is resp + + def test_handle_platform_error_with_custom_handler(self): + resp, error = self._create_response(payload=_NOT_FOUND_PAYLOAD) + invocations = [] + + def _custom_handler(cause, message, error_dict): + invocations.append((cause, message, error_dict)) + return exceptions.InvalidArgumentError('Custom message', cause, cause.response) + + firebase_error = _utils.handle_platform_error_from_requests(error, _custom_handler) + + assert isinstance(firebase_error, exceptions.InvalidArgumentError) + assert str(firebase_error) == 'Custom message' + assert firebase_error.cause is error + assert firebase_error.http_response is resp + assert len(invocations) == 1 + args = invocations[0] + assert len(args) == 3 + assert args[0] is error + assert args[1] == 'test error' + assert args[2] == _NOT_FOUND_ERROR_DICT + + def test_handle_platform_error_with_custom_handler_ignore(self): + resp, error = self._create_response(payload=_NOT_FOUND_PAYLOAD) + invocations = [] + + def _custom_handler(cause, message, error_dict): + invocations.append((cause, message, error_dict)) + + firebase_error = _utils.handle_platform_error_from_requests(error, _custom_handler) + + assert isinstance(firebase_error, exceptions.NotFoundError) + assert str(firebase_error) == 'test error' + assert firebase_error.cause is error + assert firebase_error.http_response is resp + assert len(invocations) == 1 + args = invocations[0] + assert len(args) == 3 + assert args[0] is error + assert args[1] == 'test error' + assert args[2] == _NOT_FOUND_ERROR_DICT + + def _create_response(self, status=500, payload=None): + resp = models.Response() + resp.status_code = status + if payload: + resp.raw = io.BytesIO(payload.encode()) + exc = requests.exceptions.RequestException('Test error', response=resp) + return resp, exc diff --git a/tests/test_firestore.py b/tests/test_firestore.py new file mode 100644 index 000000000..47debd54b --- /dev/null +++ b/tests/test_firestore.py @@ -0,0 +1,167 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for firebase_admin.firestore.""" + +import platform + +import pytest + +import firebase_admin +from firebase_admin import credentials +try: + from firebase_admin import firestore +except ImportError: + pass +from tests import testutils + + +@pytest.mark.skipif( + platform.python_implementation() == 'PyPy', + reason='Firestore is not supported on PyPy') +class TestFirestore: + """Test class Firestore APIs.""" + + def teardown_method(self, method): + del method + testutils.cleanup_apps() + + def test_no_project_id(self): + def evaluate(): + firebase_admin.initialize_app(testutils.MockCredential()) + with pytest.raises(ValueError): + firestore.client() + testutils.run_without_project_id(evaluate) + + def test_project_id(self): + cred = credentials.Certificate(testutils.resource_filename('service_account.json')) + firebase_admin.initialize_app(cred, {'projectId': 'explicit-project-id'}) + client = firestore.client() + assert client is not None + assert client.project == 'explicit-project-id' + assert client._database == '(default)' + + def test_project_id_with_explicit_app(self): + cred = credentials.Certificate(testutils.resource_filename('service_account.json')) + app = firebase_admin.initialize_app(cred, {'projectId': 'explicit-project-id'}) + client = firestore.client(app=app) + assert client is not None + assert client.project == 'explicit-project-id' + assert client._database == '(default)' + + def test_service_account(self): + cred = credentials.Certificate(testutils.resource_filename('service_account.json')) + firebase_admin.initialize_app(cred) + client = firestore.client() + assert client is not None + assert client.project == 'mock-project-id' + assert client._database == '(default)' + + def test_service_account_with_explicit_app(self): + cred = credentials.Certificate(testutils.resource_filename('service_account.json')) + app = firebase_admin.initialize_app(cred) + client = firestore.client(app=app) + assert client is not None + assert client.project == 'mock-project-id' + assert client._database == '(default)' + + @pytest.mark.parametrize('database_id', [123, False, True, {}, []]) + def test_invalid_database_id(self, database_id): + cred = credentials.Certificate(testutils.resource_filename('service_account.json')) + firebase_admin.initialize_app(cred) + with pytest.raises(ValueError) as excinfo: + firestore.client(database_id=database_id) + assert str(excinfo.value) == f'database_id "{database_id}" must be a string or None.' + + def test_database_id(self): + cred = credentials.Certificate(testutils.resource_filename('service_account.json')) + firebase_admin.initialize_app(cred) + database_id = 'mock-database-id' + client = firestore.client(database_id=database_id) + assert client is not None + assert client.project == 'mock-project-id' + assert client._database == 'mock-database-id' + + @pytest.mark.parametrize('database_id', ['', '(default)', None]) + def test_database_id_with_default_id(self, database_id): + cred = credentials.Certificate(testutils.resource_filename('service_account.json')) + firebase_admin.initialize_app(cred) + client = firestore.client(database_id=database_id) + assert client is not None + assert client.project == 'mock-project-id' + assert client._database == '(default)' + + def test_database_id_with_explicit_app(self): + cred = credentials.Certificate(testutils.resource_filename('service_account.json')) + app = firebase_admin.initialize_app(cred) + database_id = 'mock-database-id' + client = firestore.client(app, database_id) + assert client is not None + assert client.project == 'mock-project-id' + assert client._database == 'mock-database-id' + + def test_database_id_with_multi_db(self): + cred = credentials.Certificate(testutils.resource_filename('service_account.json')) + firebase_admin.initialize_app(cred) + database_id_1 = 'mock-database-id-1' + database_id_2 = 'mock-database-id-2' + client_1 = firestore.client(database_id=database_id_1) + client_2 = firestore.client(database_id=database_id_2) + assert (client_1 is not None) and (client_2 is not None) + assert client_1 is not client_2 + assert client_1.project == 'mock-project-id' + assert client_2.project == 'mock-project-id' + assert client_1._database == 'mock-database-id-1' + assert client_2._database == 'mock-database-id-2' + + def test_database_id_with_multi_db_uses_cache(self): + cred = credentials.Certificate(testutils.resource_filename('service_account.json')) + firebase_admin.initialize_app(cred) + database_id = 'mock-database-id' + client_1 = firestore.client(database_id=database_id) + client_2 = firestore.client(database_id=database_id) + assert (client_1 is not None) and (client_2 is not None) + assert client_1 is client_2 + assert client_1.project == 'mock-project-id' + assert client_2.project == 'mock-project-id' + assert client_1._database == 'mock-database-id' + assert client_2._database == 'mock-database-id' + + def test_database_id_with_multi_db_uses_cache_default(self): + cred = credentials.Certificate(testutils.resource_filename('service_account.json')) + firebase_admin.initialize_app(cred) + database_id_1 = '' + database_id_2 = '(default)' + client_1 = firestore.client(database_id=database_id_1) + client_2 = firestore.client(database_id=database_id_2) + client_3 = firestore.client() + assert (client_1 is not None) and (client_2 is not None) and (client_3 is not None) + assert client_1 is client_2 + assert client_1 is client_3 + assert client_2 is client_3 + assert client_1.project == 'mock-project-id' + assert client_2.project == 'mock-project-id' + assert client_3.project == 'mock-project-id' + assert client_1._database == '(default)' + assert client_2._database == '(default)' + assert client_3._database == '(default)' + + + def test_geo_point(self): + geo_point = firestore.GeoPoint(10, 20) # pylint: disable=no-member + assert geo_point.latitude == 10 + assert geo_point.longitude == 20 + + def test_server_timestamp(self): + assert firestore.SERVER_TIMESTAMP is not None # pylint: disable=no-member diff --git a/tests/test_firestore_async.py b/tests/test_firestore_async.py new file mode 100644 index 000000000..3d17cbfc5 --- /dev/null +++ b/tests/test_firestore_async.py @@ -0,0 +1,167 @@ +# Copyright 2022 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for firebase_admin.firestore_async.""" + +import platform + +import pytest + +import firebase_admin +from firebase_admin import credentials +try: + from firebase_admin import firestore_async +except ImportError: + pass +from tests import testutils + + +@pytest.mark.skipif( + platform.python_implementation() == 'PyPy', + reason='Firestore is not supported on PyPy') +class TestFirestoreAsync: + """Test class Firestore Async APIs.""" + + def teardown_method(self, method): + del method + testutils.cleanup_apps() + + def test_no_project_id(self): + def evaluate(): + firebase_admin.initialize_app(testutils.MockCredential()) + with pytest.raises(ValueError): + firestore_async.client() + testutils.run_without_project_id(evaluate) + + def test_project_id(self): + cred = credentials.Certificate(testutils.resource_filename('service_account.json')) + firebase_admin.initialize_app(cred, {'projectId': 'explicit-project-id'}) + client = firestore_async.client() + assert client is not None + assert client.project == 'explicit-project-id' + assert client._database == '(default)' + + def test_project_id_with_explicit_app(self): + cred = credentials.Certificate(testutils.resource_filename('service_account.json')) + app = firebase_admin.initialize_app(cred, {'projectId': 'explicit-project-id'}) + client = firestore_async.client(app=app) + assert client is not None + assert client.project == 'explicit-project-id' + assert client._database == '(default)' + + def test_service_account(self): + cred = credentials.Certificate(testutils.resource_filename('service_account.json')) + firebase_admin.initialize_app(cred) + client = firestore_async.client() + assert client is not None + assert client.project == 'mock-project-id' + assert client._database == '(default)' + + def test_service_account_with_explicit_app(self): + cred = credentials.Certificate(testutils.resource_filename('service_account.json')) + app = firebase_admin.initialize_app(cred) + client = firestore_async.client(app=app) + assert client is not None + assert client.project == 'mock-project-id' + assert client._database == '(default)' + + @pytest.mark.parametrize('database_id', [123, False, True, {}, []]) + def test_invalid_database_id(self, database_id): + cred = credentials.Certificate(testutils.resource_filename('service_account.json')) + firebase_admin.initialize_app(cred) + with pytest.raises(ValueError) as excinfo: + firestore_async.client(database_id=database_id) + assert str(excinfo.value) == f'database_id "{database_id}" must be a string or None.' + + def test_database_id(self): + cred = credentials.Certificate(testutils.resource_filename('service_account.json')) + firebase_admin.initialize_app(cred) + database_id = 'mock-database-id' + client = firestore_async.client(database_id=database_id) + assert client is not None + assert client.project == 'mock-project-id' + assert client._database == 'mock-database-id' + + @pytest.mark.parametrize('database_id', ['', '(default)', None]) + def test_database_id_with_default_id(self, database_id): + cred = credentials.Certificate(testutils.resource_filename('service_account.json')) + firebase_admin.initialize_app(cred) + client = firestore_async.client(database_id=database_id) + assert client is not None + assert client.project == 'mock-project-id' + assert client._database == '(default)' + + def test_database_id_with_explicit_app(self): + cred = credentials.Certificate(testutils.resource_filename('service_account.json')) + app = firebase_admin.initialize_app(cred) + database_id = 'mock-database-id' + client = firestore_async.client(app, database_id) + assert client is not None + assert client.project == 'mock-project-id' + assert client._database == 'mock-database-id' + + def test_database_id_with_multi_db(self): + cred = credentials.Certificate(testutils.resource_filename('service_account.json')) + firebase_admin.initialize_app(cred) + database_id_1 = 'mock-database-id-1' + database_id_2 = 'mock-database-id-2' + client_1 = firestore_async.client(database_id=database_id_1) + client_2 = firestore_async.client(database_id=database_id_2) + assert (client_1 is not None) and (client_2 is not None) + assert client_1 is not client_2 + assert client_1.project == 'mock-project-id' + assert client_2.project == 'mock-project-id' + assert client_1._database == 'mock-database-id-1' + assert client_2._database == 'mock-database-id-2' + + def test_database_id_with_multi_db_uses_cache(self): + cred = credentials.Certificate(testutils.resource_filename('service_account.json')) + firebase_admin.initialize_app(cred) + database_id = 'mock-database-id' + client_1 = firestore_async.client(database_id=database_id) + client_2 = firestore_async.client(database_id=database_id) + assert (client_1 is not None) and (client_2 is not None) + assert client_1 is client_2 + assert client_1.project == 'mock-project-id' + assert client_2.project == 'mock-project-id' + assert client_1._database == 'mock-database-id' + assert client_2._database == 'mock-database-id' + + def test_database_id_with_multi_db_uses_cache_default(self): + cred = credentials.Certificate(testutils.resource_filename('service_account.json')) + firebase_admin.initialize_app(cred) + database_id_1 = '' + database_id_2 = '(default)' + client_1 = firestore_async.client(database_id=database_id_1) + client_2 = firestore_async.client(database_id=database_id_2) + client_3 = firestore_async.client() + assert (client_1 is not None) and (client_2 is not None) and (client_3 is not None) + assert client_1 is client_2 + assert client_1 is client_3 + assert client_2 is client_3 + assert client_1.project == 'mock-project-id' + assert client_2.project == 'mock-project-id' + assert client_3.project == 'mock-project-id' + assert client_1._database == '(default)' + assert client_2._database == '(default)' + assert client_3._database == '(default)' + + + def test_geo_point(self): + geo_point = firestore_async.GeoPoint(10, 20) # pylint: disable=no-member + assert geo_point.latitude == 10 + assert geo_point.longitude == 20 + + def test_server_timestamp(self): + assert firestore_async.SERVER_TIMESTAMP is not None # pylint: disable=no-member diff --git a/tests/test_functions.py b/tests/test_functions.py new file mode 100644 index 000000000..0f766767a --- /dev/null +++ b/tests/test_functions.py @@ -0,0 +1,433 @@ +# Copyright 2024 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test cases for the firebase_admin.functions module.""" + +from datetime import datetime, timedelta, timezone +import json +import time +import pytest + +import firebase_admin +from firebase_admin import functions +from firebase_admin import _utils +from tests import testutils + + +_DEFAULT_DATA = {'city': 'Seattle'} +_CLOUD_TASKS_URL = 'https://cloudtasks.googleapis.com/v2/' +_DEFAULT_TASK_PATH = \ + 'projects/test-project/locations/us-central1/queues/test-function-name/tasks/test-task-id' +_DEFAULT_REQUEST_URL = \ + _CLOUD_TASKS_URL + 'projects/test-project/locations/us-central1/queues/test-function-name/tasks' +_DEFAULT_TASK_URL = _CLOUD_TASKS_URL + _DEFAULT_TASK_PATH +_DEFAULT_RESPONSE = json.dumps({'name': _DEFAULT_TASK_PATH}) + +class TestTaskQueue: + @classmethod + def setup_class(cls): + cred = testutils.MockCredential() + firebase_admin.initialize_app(cred, {'projectId': 'test-project'}) + + @classmethod + def teardown_class(cls): + testutils.cleanup_apps() + + def _instrument_functions_service( + self, app=None, status=200, payload=_DEFAULT_RESPONSE, mounted_url=_CLOUD_TASKS_URL): + if not app: + app = firebase_admin.get_app() + functions_service = functions._get_functions_service(app) + recorder = [] + functions_service._http_client.session.mount( + mounted_url, + testutils.MockAdapter(payload, status, recorder)) + return functions_service, recorder + + def test_task_queue_no_project_id(self): + def evaluate(): + app = firebase_admin.initialize_app(testutils.MockCredential(), name='no-project-id') + with pytest.raises(ValueError): + functions.task_queue('test-function-name', app=app) + testutils.run_without_project_id(evaluate) + + @pytest.mark.parametrize('function_name', [ + 'projects/test-project/locations/us-central1/functions/test-function-name', + 'locations/us-central1/functions/test-function-name', + 'test-function-name', + ]) + def test_task_queue_function_name(self, function_name): + queue = functions.task_queue(function_name) + assert queue._resource.resource_id == 'test-function-name' + assert queue._resource.project_id == 'test-project' + assert queue._resource.location_id == 'us-central1' + + def test_task_queue_empty_function_name_error(self): + with pytest.raises(ValueError) as excinfo: + functions.task_queue('') + assert str(excinfo.value) == 'function_name "" must be a non-empty string.' + + def test_task_queue_non_string_function_name_error(self): + with pytest.raises(ValueError) as excinfo: + functions.task_queue(1234) + assert str(excinfo.value) == 'function_name "1234" must be a string.' + + @pytest.mark.parametrize('function_name', [ + '/test', + 'test/', + 'test-project/us-central1/test-function-name', + 'projects/test-project/functions/test-function-name', + 'functions/test-function-name', + ]) + def test_task_queue_invalid_function_name_error(self, function_name): + with pytest.raises(ValueError) as excinfo: + functions.task_queue(function_name) + assert str(excinfo.value) == 'Invalid resource name format.' + + def test_task_queue_extension_id(self): + queue = functions.task_queue("test-function-name", "test-extension-id") + assert queue._resource.resource_id == 'ext-test-extension-id-test-function-name' + assert queue._resource.project_id == 'test-project' + assert queue._resource.location_id == 'us-central1' + + def test_task_queue_empty_extension_id_error(self): + with pytest.raises(ValueError) as excinfo: + functions.task_queue('test-function-name', '') + assert str(excinfo.value) == 'extension_id "" must be a non-empty string.' + + def test_task_queue_non_string_extension_id_error(self): + with pytest.raises(ValueError) as excinfo: + functions.task_queue('test-function-name', 1234) + assert str(excinfo.value) == 'extension_id "1234" must be a string.' + + + def test_task_enqueue(self): + _, recorder = self._instrument_functions_service() + queue = functions.task_queue('test-function-name') + task_id = queue.enqueue(_DEFAULT_DATA) + assert len(recorder) == 1 + assert recorder[0].method == 'POST' + assert recorder[0].url == _DEFAULT_REQUEST_URL + assert recorder[0].headers['Content-Type'] == 'application/json' + assert recorder[0].headers['Authorization'] == 'Bearer mock-token' + expected_metrics_header = _utils.get_metrics_header() + ' mock-cred-metric-tag' + assert recorder[0].headers['x-goog-api-client'] == expected_metrics_header + assert task_id == 'test-task-id' + + task = json.loads(recorder[0].body.decode())['task'] + assert task['httpRequest']['oidcToken'] == {'serviceAccountEmail': 'mock-email'} + assert task['httpRequest']['headers'] == {'Content-Type': 'application/json'} + + def test_task_enqueue_with_extension(self): + resource_name = ( + 'projects/test-project/locations/us-central1/queues/' + 'ext-test-extension-id-test-function-name/tasks' + ) + extension_response = json.dumps({'name': resource_name + '/test-task-id'}) + _, recorder = self._instrument_functions_service(payload=extension_response) + queue = functions.task_queue('test-function-name', 'test-extension-id') + task_id = queue.enqueue(_DEFAULT_DATA) + assert len(recorder) == 1 + assert recorder[0].method == 'POST' + assert recorder[0].url == _CLOUD_TASKS_URL + resource_name + assert recorder[0].headers['Content-Type'] == 'application/json' + assert recorder[0].headers['Authorization'] == 'Bearer mock-token' + expected_metrics_header = _utils.get_metrics_header() + ' mock-cred-metric-tag' + assert recorder[0].headers['x-goog-api-client'] == expected_metrics_header + assert task_id == 'test-task-id' + + task = json.loads(recorder[0].body.decode())['task'] + assert task['httpRequest']['oidcToken'] == {'serviceAccountEmail': 'mock-email'} + assert task['httpRequest']['headers'] == {'Content-Type': 'application/json'} + + def test_task_enqueue_compute_engine(self): + app = firebase_admin.initialize_app( + testutils.MockComputeEngineCredential(), + options={'projectId': 'test-project'}, + name='test-project-gce') + _, recorder = self._instrument_functions_service(app) + queue = functions.task_queue('test-function-name', app=app) + task_id = queue.enqueue(_DEFAULT_DATA) + assert len(recorder) == 1 + assert recorder[0].method == 'POST' + assert recorder[0].url == _DEFAULT_REQUEST_URL + assert recorder[0].headers['Content-Type'] == 'application/json' + assert recorder[0].headers['Authorization'] == 'Bearer mock-compute-engine-token' + expected_metrics_header = _utils.get_metrics_header() + ' mock-gce-cred-metric-tag' + assert recorder[0].headers['x-goog-api-client'] == expected_metrics_header + assert task_id == 'test-task-id' + + task = json.loads(recorder[0].body.decode())['task'] + assert task['httpRequest']['oidcToken'] == {'serviceAccountEmail': 'mock-gce-email'} + assert task['httpRequest']['headers'] == {'Content-Type': 'application/json'} + + def test_task_enqueue_with_extension_compute_engine(self): + resource_name = ( + 'projects/test-project/locations/us-central1/queues/' + 'ext-test-extension-id-test-function-name/tasks' + ) + extension_response = json.dumps({'name': resource_name + '/test-task-id'}) + app = firebase_admin.initialize_app( + testutils.MockComputeEngineCredential(), + options={'projectId': 'test-project'}, + name='test-project-gce-extensions') + _, recorder = self._instrument_functions_service(app, payload=extension_response) + queue = functions.task_queue('test-function-name', 'test-extension-id', app) + task_id = queue.enqueue(_DEFAULT_DATA) + assert len(recorder) == 1 + assert recorder[0].method == 'POST' + assert recorder[0].url == _CLOUD_TASKS_URL + resource_name + assert recorder[0].headers['Content-Type'] == 'application/json' + assert recorder[0].headers['Authorization'] == 'Bearer mock-compute-engine-token' + expected_metrics_header = _utils.get_metrics_header() + ' mock-gce-cred-metric-tag' + assert recorder[0].headers['x-goog-api-client'] == expected_metrics_header + assert task_id == 'test-task-id' + + task = json.loads(recorder[0].body.decode())['task'] + assert 'oidcToken' not in task['httpRequest'] + assert task['httpRequest']['headers'] == { + 'Content-Type': 'application/json', + 'Authorization': 'Bearer mock-compute-engine-token'} + + def test_task_delete(self): + _, recorder = self._instrument_functions_service() + queue = functions.task_queue('test-function-name') + queue.delete('test-task-id') + assert len(recorder) == 1 + assert recorder[0].method == 'DELETE' + assert recorder[0].url == _DEFAULT_TASK_URL + expected_metrics_header = _utils.get_metrics_header() + ' mock-cred-metric-tag' + assert recorder[0].headers['x-goog-api-client'] == expected_metrics_header + + def test_task_enqueue_with_emulator_host(self, monkeypatch): + emulator_host = 'localhost:8124' + emulator_url = f'http://{emulator_host}/' + request_url = emulator_url + _DEFAULT_TASK_PATH.replace('/tasks/test-task-id', '/tasks') + + monkeypatch.setenv('CLOUD_TASKS_EMULATOR_HOST', emulator_host) + app = firebase_admin.initialize_app( + _utils.EmulatorAdminCredentials(), {'projectId': 'test-project'}, name='emulator-app') + + expected_task_name = ( + '/projects/test-project/locations/us-central1' + '/queues/test-function-name/tasks/test-task-id' + ) + expected_response = json.dumps({'task': {'name': expected_task_name}}) + _, recorder = self._instrument_functions_service( + app, payload=expected_response, mounted_url=emulator_url) + + queue = functions.task_queue('test-function-name', app=app) + task_id = queue.enqueue(_DEFAULT_DATA) + + assert len(recorder) == 1 + assert recorder[0].method == 'POST' + assert recorder[0].url == request_url + assert recorder[0].headers['Content-Type'] == 'application/json' + + task = json.loads(recorder[0].body.decode())['task'] + assert task['httpRequest']['oidcToken'] == { + 'serviceAccountEmail': 'emulated-service-acct@email.com' + } + assert task_id == 'test-task-id' + + def test_task_enqueue_without_emulator_host_error(self, monkeypatch): + app = firebase_admin.initialize_app( + _utils.EmulatorAdminCredentials(), + {'projectId': 'test-project'}, name='no-emulator-app') + + _, recorder = self._instrument_functions_service(app) + monkeypatch.delenv('CLOUD_TASKS_EMULATOR_HOST', raising=False) + queue = functions.task_queue('test-function-name', app=app) + with pytest.raises(ValueError) as excinfo: + queue.enqueue(_DEFAULT_DATA) + assert "Failed to determine service account" in str(excinfo.value) + assert len(recorder) == 0 + + def test_get_emulator_url_invalid_format(self, monkeypatch): + monkeypatch.setenv('CLOUD_TASKS_EMULATOR_HOST', 'http://localhost:8124') + app = firebase_admin.initialize_app( + testutils.MockCredential(), {'projectId': 'test-project'}, name='invalid-host-app') + with pytest.raises(ValueError) as excinfo: + functions.task_queue('test-function-name', app=app) + assert 'Invalid CLOUD_TASKS_EMULATOR_HOST' in str(excinfo.value) + +class TestTaskQueueOptions: + + _DEFAULT_TASK_OPTS = {'schedule_delay_seconds': None, 'schedule_time': None, \ + 'dispatch_deadline_seconds': None, 'task_id': None, 'headers': None} + + non_alphanumeric_chars = [ + ',', '.', '?', '!', ':', ';', "'", '"', '(', ')', '[', ']', '{', '}', + '@', '&', '*', '+', '=', '$', '%', '#', '~', '\\', '/', '|', '^', + '\t', '\n', '\r', '\f', '\v', '\0', '\a', '\b', + 'é', 'ç', 'ö', '❤️', '€', '¥', '£', '←', '→', '↑', '↓', 'π', 'Ω', 'ß' + ] + + @classmethod + def setup_class(cls): + cred = testutils.MockCredential() + firebase_admin.initialize_app(cred, {'projectId': 'test-project'}) + + @classmethod + def teardown_class(cls): + testutils.cleanup_apps() + + def _instrument_functions_service(self, app=None, status=200, payload=_DEFAULT_RESPONSE): + if not app: + app = firebase_admin.get_app() + functions_service = functions._get_functions_service(app) + recorder = [] + functions_service._http_client.session.mount( + _CLOUD_TASKS_URL, + testutils.MockAdapter(payload, status, recorder)) + return functions_service, recorder + + def test_task_options_delay_seconds(self): + _, recorder = self._instrument_functions_service() + enqueue_time = datetime.now(timezone.utc) + expected_schedule_time = enqueue_time + timedelta(seconds=100) + task_opts_params = { + 'schedule_delay_seconds': 100, + 'schedule_time': None, + 'dispatch_deadline_seconds': 200, + 'task_id': 'test-task-id', + 'headers': {'x-test-header': 'test-header-value'}, + 'uri': 'https://google.com' + } + queue = functions.task_queue('test-function-name') + task_opts = functions.TaskOptions(**task_opts_params) + queue.enqueue(_DEFAULT_DATA, task_opts) + + assert len(recorder) == 1 + task = json.loads(recorder[0].body.decode())['task'] + + task_schedule_time = datetime.fromisoformat(task['scheduleTime'].replace('Z', '+00:00')) + delta = abs(task_schedule_time - expected_schedule_time) + assert delta <= timedelta(seconds=1) + + assert task['dispatchDeadline'] == '200s' + assert task['httpRequest']['headers']['x-test-header'] == 'test-header-value' + assert task['httpRequest']['url'] in ['http://google.com', 'https://google.com'] + assert task['name'] == _DEFAULT_TASK_PATH + + def test_task_options_utc_time(self): + _, recorder = self._instrument_functions_service() + enqueue_time = datetime.now(timezone.utc) + expected_schedule_time = enqueue_time + timedelta(seconds=100) + task_opts_params = { + 'schedule_delay_seconds': None, + 'schedule_time': expected_schedule_time, + 'dispatch_deadline_seconds': 200, + 'task_id': 'test-task-id', + 'headers': {'x-test-header': 'test-header-value'}, + 'uri': 'http://google.com' + } + queue = functions.task_queue('test-function-name') + task_opts = functions.TaskOptions(**task_opts_params) + queue.enqueue(_DEFAULT_DATA, task_opts) + + assert len(recorder) == 1 + task = json.loads(recorder[0].body.decode())['task'] + + task_schedule_time = datetime.fromisoformat(task['scheduleTime'].replace('Z', '+00:00')) + assert task_schedule_time == expected_schedule_time + + assert task['dispatchDeadline'] == '200s' + assert task['httpRequest']['headers']['x-test-header'] == 'test-header-value' + assert task['httpRequest']['url'] in ['http://google.com', 'https://google.com'] + assert task['name'] == _DEFAULT_TASK_PATH + + def test_schedule_set_twice_error(self): + _, recorder = self._instrument_functions_service() + opts = functions.TaskOptions( + schedule_delay_seconds=100, schedule_time=datetime.now(timezone.utc)) + queue = functions.task_queue('test-function-name') + with pytest.raises(ValueError) as excinfo: + queue.enqueue(_DEFAULT_DATA, opts) + assert len(recorder) == 0 + assert str(excinfo.value) == \ + 'Both schedule_delay_seconds and schedule_time cannot be set at the same time.' + + + @pytest.mark.parametrize('schedule_time', [ + time.time(), + str(datetime.now(timezone.utc)), + datetime.now(timezone.utc).isoformat(), + datetime.now(timezone.utc).isoformat() + 'Z', + '', ' ' + ]) + def test_invalid_schedule_time_error(self, schedule_time): + _, recorder = self._instrument_functions_service() + opts = functions.TaskOptions(schedule_time=schedule_time) + queue = functions.task_queue('test-function-name') + with pytest.raises(ValueError) as excinfo: + queue.enqueue(_DEFAULT_DATA, opts) + assert len(recorder) == 0 + assert str(excinfo.value) == 'schedule_time should be UTC datetime.' + + + @pytest.mark.parametrize('schedule_delay_seconds', [ + -1, '100', '-1', '', ' ', -1.23, 1.23 + ]) + def test_invalid_schedule_delay_seconds_error(self, schedule_delay_seconds): + _, recorder = self._instrument_functions_service() + opts = functions.TaskOptions(schedule_delay_seconds=schedule_delay_seconds) + queue = functions.task_queue('test-function-name') + with pytest.raises(ValueError) as excinfo: + queue.enqueue(_DEFAULT_DATA, opts) + assert len(recorder) == 0 + assert str(excinfo.value) == 'schedule_delay_seconds should be positive int.' + + + @pytest.mark.parametrize('dispatch_deadline_seconds', [ + 14, 1801, -15, -1800, 0, '100', '-1', '', ' ', -1.23, 1.23, + ]) + def test_invalid_dispatch_deadline_seconds_error(self, dispatch_deadline_seconds): + _, recorder = self._instrument_functions_service() + opts = functions.TaskOptions(dispatch_deadline_seconds=dispatch_deadline_seconds) + queue = functions.task_queue('test-function-name') + with pytest.raises(ValueError) as excinfo: + queue.enqueue(_DEFAULT_DATA, opts) + assert len(recorder) == 0 + assert str(excinfo.value) == \ + 'dispatch_deadline_seconds should be int in the range of 15s to 1800s (30 mins).' + + + @pytest.mark.parametrize('task_id', [ + '', ' ', 'task/1', 'task.1', 'a'*501, *non_alphanumeric_chars + ]) + def test_invalid_task_id_error(self, task_id): + _, recorder = self._instrument_functions_service() + opts = functions.TaskOptions(task_id=task_id) + queue = functions.task_queue('test-function-name') + with pytest.raises(ValueError) as excinfo: + queue.enqueue(_DEFAULT_DATA, opts) + assert len(recorder) == 0 + assert str(excinfo.value) == ( + 'task_id can contain only letters ([A-Za-z]), numbers ([0-9]), ' + 'hyphens (-), or underscores (_). The maximum length is 500 characters.' + ) + + @pytest.mark.parametrize('uri', [ + '', ' ', 'a', 'foo', 'image.jpg', [], {}, True, 'google.com', 'www.google.com' + ]) + def test_invalid_uri_error(self, uri): + _, recorder = self._instrument_functions_service() + opts = functions.TaskOptions(uri=uri) + queue = functions.task_queue('test-function-name') + with pytest.raises(ValueError) as excinfo: + queue.enqueue(_DEFAULT_DATA, opts) + assert len(recorder) == 0 + assert str(excinfo.value) == \ + 'uri must be a valid RFC3986 URI string using the https or http schema.' diff --git a/tests/test_http_client.py b/tests/test_http_client.py new file mode 100644 index 000000000..f1e7f6a64 --- /dev/null +++ b/tests/test_http_client.py @@ -0,0 +1,670 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for firebase_admin._http_client.""" +from typing import Dict, Optional, Union +import pytest +import httpx +import respx +from pytest_localserver import http +from pytest_mock import MockerFixture +import requests + +from firebase_admin import _http_client, _utils +from firebase_admin._retry import HttpxRetry, HttpxRetryTransport +from firebase_admin._http_client import ( + HttpxAsyncClient, + GoogleAuthCredentialFlow, + DEFAULT_TIMEOUT_SECONDS +) +from tests import testutils + + +_TEST_URL = 'http://firebase.test.url/' + +@pytest.fixture +def default_retry_config() -> HttpxRetry: + """Provides a fresh copy of the default retry config instance.""" + return _http_client.DEFAULT_HTTPX_RETRY_CONFIG + +class TestHttpClient: + def test_http_client_default_session(self): + client = _http_client.HttpClient() + assert client.session is not None + assert client.base_url == '' + recorder = self._instrument(client, 'body') + resp = client.request('get', _TEST_URL) + assert resp.status_code == 200 + assert resp.text == 'body' + assert len(recorder) == 1 + assert recorder[0].method == 'GET' + assert recorder[0].url == _TEST_URL + + def test_http_client_custom_session(self): + session = requests.Session() + client = _http_client.HttpClient(session=session) + assert client.session is session + assert client.base_url == '' + recorder = self._instrument(client, 'body') + resp = client.request('get', _TEST_URL) + assert resp.status_code == 200 + assert resp.text == 'body' + assert len(recorder) == 1 + assert recorder[0].method == 'GET' + assert recorder[0].url == _TEST_URL + + def test_base_url(self): + client = _http_client.HttpClient(base_url=_TEST_URL) + assert client.session is not None + assert client.base_url == _TEST_URL + recorder = self._instrument(client, 'body') + resp = client.request('get', 'foo') + assert resp.status_code == 200 + assert resp.text == 'body' + assert len(recorder) == 1 + assert recorder[0].method == 'GET' + assert recorder[0].url == _TEST_URL + 'foo' + + def test_metrics_headers(self): + client = _http_client.HttpClient() + assert client.session is not None + recorder = self._instrument(client, 'body') + resp = client.request('get', _TEST_URL) + assert resp.status_code == 200 + assert resp.text == 'body' + assert len(recorder) == 1 + assert recorder[0].method == 'GET' + assert recorder[0].url == _TEST_URL + assert recorder[0].headers['x-goog-api-client'] == _utils.get_metrics_header() + + def test_metrics_headers_with_credentials(self): + client = _http_client.HttpClient( + credential=testutils.MockGoogleCredential()) + assert client.session is not None + recorder = self._instrument(client, 'body') + resp = client.request('get', _TEST_URL) + assert resp.status_code == 200 + assert resp.text == 'body' + assert len(recorder) == 1 + assert recorder[0].method == 'GET' + assert recorder[0].url == _TEST_URL + expected_metrics_header = _utils.get_metrics_header() + ' mock-cred-metric-tag' + assert recorder[0].headers['x-goog-api-client'] == expected_metrics_header + + def test_credential(self): + client = _http_client.HttpClient( + credential=testutils.MockGoogleCredential()) + assert client.session is not None + recorder = self._instrument(client, 'body') + resp = client.request('get', _TEST_URL) + assert resp.status_code == 200 + assert resp.text == 'body' + assert len(recorder) == 1 + assert recorder[0].method == 'GET' + assert recorder[0].url == _TEST_URL + assert recorder[0].headers['Authorization'] == 'Bearer mock-token' + + @pytest.mark.parametrize('options, timeout', [ + ({}, _http_client.DEFAULT_TIMEOUT_SECONDS), + ({'timeout': 7}, 7), + ({'timeout': 0}, 0), + ({'timeout': None}, None), + ]) + def test_timeout(self, options, timeout): + client = _http_client.HttpClient(**options) + assert client.timeout == timeout + recorder = self._instrument(client, 'body') + client.request('get', _TEST_URL) + assert len(recorder) == 1 + if timeout is None: + assert recorder[0]._extra_kwargs['timeout'] is None + else: + assert recorder[0]._extra_kwargs['timeout'] == pytest.approx(timeout, 0.001) + + + def _instrument(self, client, payload, status=200): + recorder = [] + adapter = testutils.MockAdapter(payload, status, recorder) + client.session.mount(_TEST_URL, adapter) + return recorder + + +class TestHttpRetry: + """Unit tests for the default HTTP retry configuration.""" + + ENTITY_ENCLOSING_METHODS = ['post', 'put', 'patch'] + ALL_METHODS = ENTITY_ENCLOSING_METHODS + ['get', 'delete', 'head', 'options'] + + @classmethod + def setup_class(cls): + # Turn off exponential backoff for faster execution. + _http_client.DEFAULT_RETRY_CONFIG.backoff_factor = 0 + + # Start a test server instance scoped to the class. + server = http.ContentServer() + server.start() + cls.httpserver = server + + @classmethod + def teardown_class(cls): + cls.httpserver.stop() + + def setup_method(self): + # Clean up any state in the server before starting a new test case. + self.httpserver.requests = [] + + @pytest.mark.parametrize('method', ALL_METHODS) + def test_retry_on_503(self, method): + self.httpserver.serve_content({}, 503) + client = _http_client.JsonHttpClient( + credential=testutils.MockGoogleCredential(), base_url=self.httpserver.url) + body = None + if method in self.ENTITY_ENCLOSING_METHODS: + body = {'key': 'value'} + with pytest.raises(requests.exceptions.HTTPError) as excinfo: + client.request(method, '/', json=body) + assert excinfo.value.response.status_code == 503 + assert len(self.httpserver.requests) == 5 + + @pytest.mark.parametrize('method', ALL_METHODS) + def test_retry_on_500(self, method): + self.httpserver.serve_content({}, 500) + client = _http_client.JsonHttpClient( + credential=testutils.MockGoogleCredential(), base_url=self.httpserver.url) + body = None + if method in self.ENTITY_ENCLOSING_METHODS: + body = {'key': 'value'} + with pytest.raises(requests.exceptions.HTTPError) as excinfo: + client.request(method, '/', json=body) + assert excinfo.value.response.status_code == 500 + assert len(self.httpserver.requests) == 5 + + def test_no_retry_on_404(self): + self.httpserver.serve_content({}, 404) + client = _http_client.JsonHttpClient( + credential=testutils.MockGoogleCredential(), base_url=self.httpserver.url) + with pytest.raises(requests.exceptions.HTTPError) as excinfo: + client.request('get', '/') + assert excinfo.value.response.status_code == 404 + assert len(self.httpserver.requests) == 1 + +class TestHttpxAsyncClient: + def test_init_default(self, mocker: MockerFixture, default_retry_config: HttpxRetry): + """Test client initialization with default settings (no credentials).""" + + # Mock httpx.AsyncClient and HttpxRetryTransport init to check args passed to them + mock_async_client_init = mocker.patch('httpx.AsyncClient.__init__', return_value=None) + mock_transport_init = mocker.patch( + 'firebase_admin._retry.HttpxRetryTransport.__init__', return_value=None + ) + + client = HttpxAsyncClient() + + assert client.base_url == '' + assert client.timeout == DEFAULT_TIMEOUT_SECONDS + assert client._headers == _http_client.METRICS_HEADERS + assert client._retry_config == default_retry_config + + # Check httpx.AsyncClient call args + _, init_kwargs = mock_async_client_init.call_args + assert init_kwargs.get('http2') is True + assert init_kwargs.get('timeout') == DEFAULT_TIMEOUT_SECONDS + assert init_kwargs.get('headers') == _http_client.METRICS_HEADERS + assert init_kwargs.get('auth') is None + assert 'mounts' in init_kwargs + assert 'http://' in init_kwargs['mounts'] + assert 'https://' in init_kwargs['mounts'] + assert isinstance(init_kwargs['mounts']['http://'], HttpxRetryTransport) + assert isinstance(init_kwargs['mounts']['https://'], HttpxRetryTransport) + + # Check that HttpxRetryTransport was initialized with the default retry config + assert mock_transport_init.call_count >= 1 + _, transport_call_kwargs = mock_transport_init.call_args_list[0] + assert transport_call_kwargs.get('retry') == default_retry_config + assert transport_call_kwargs.get('http2') is True + + def test_init_with_credentials(self, mocker: MockerFixture, default_retry_config: HttpxRetry): + """Test client initialization with credentials.""" + + # Mock GoogleAuthCredentialFlow, httpx.AsyncClient and HttpxRetryTransport init to + # check args passed to them + mock_auth_flow_init = mocker.patch( + 'firebase_admin._http_client.GoogleAuthCredentialFlow.__init__', return_value=None + ) + mock_async_client_init = mocker.patch('httpx.AsyncClient.__init__', return_value=None) + mock_transport_init = mocker.patch( + 'firebase_admin._retry.HttpxRetryTransport.__init__', return_value=None + ) + + mock_credential = testutils.MockGoogleCredential() + client = HttpxAsyncClient(credential=mock_credential) + + assert client.base_url == '' + assert client.timeout == DEFAULT_TIMEOUT_SECONDS + assert client._headers == _http_client.METRICS_HEADERS + assert client._retry_config == default_retry_config + + # Verify GoogleAuthCredentialFlow was initialized with the credential + mock_auth_flow_init.assert_called_once_with(mock_credential) + + # Check httpx.AsyncClient call args + _, init_kwargs = mock_async_client_init.call_args + assert init_kwargs.get('http2') is True + assert init_kwargs.get('timeout') == DEFAULT_TIMEOUT_SECONDS + assert init_kwargs.get('headers') == _http_client.METRICS_HEADERS + assert isinstance(init_kwargs.get('auth'), GoogleAuthCredentialFlow) + assert 'mounts' in init_kwargs + assert 'http://' in init_kwargs['mounts'] + assert 'https://' in init_kwargs['mounts'] + assert isinstance(init_kwargs['mounts']['http://'], HttpxRetryTransport) + assert isinstance(init_kwargs['mounts']['https://'], HttpxRetryTransport) + + # Check that HttpxRetryTransport was initialized with the default retry config + assert mock_transport_init.call_count >= 1 + _, transport_call_kwargs = mock_transport_init.call_args_list[0] + assert transport_call_kwargs.get('retry') == default_retry_config + assert transport_call_kwargs.get('http2') is True + + def test_init_with_custom_settings(self, mocker: MockerFixture): + """Test client initialization with custom settings.""" + + # Mock httpx.AsyncClient and HttpxRetryTransport init to check args passed to them + mock_auth_flow_init = mocker.patch( + 'firebase_admin._http_client.GoogleAuthCredentialFlow.__init__', return_value=None + ) + mock_async_client_init = mocker.patch('httpx.AsyncClient.__init__', return_value=None) + mock_transport_init = mocker.patch( + 'firebase_admin._retry.HttpxRetryTransport.__init__', return_value=None + ) + + mock_credential = testutils.MockGoogleCredential() + headers = {'X-Custom': 'Test'} + custom_retry = HttpxRetry(max_retries=1, status_forcelist=[429], backoff_factor=0) + timeout = 60 + http2 = False + + expected_headers = {**headers, **_http_client.METRICS_HEADERS} + + client = HttpxAsyncClient( + credential=mock_credential, base_url=_TEST_URL, headers=headers, + retry_config=custom_retry, timeout=timeout, http2=http2) + + assert client.base_url == _TEST_URL + assert client._headers == expected_headers + assert client._retry_config == custom_retry + assert client.timeout == timeout + + # Verify GoogleAuthCredentialFlow was initialized with the credential + mock_auth_flow_init.assert_called_once_with(mock_credential) + # Verify original headers are not mutated + assert headers == {'X-Custom': 'Test'} + + # Check httpx.AsyncClient call args + _, init_kwargs = mock_async_client_init.call_args + assert init_kwargs.get('http2') is False + assert init_kwargs.get('timeout') == timeout + assert init_kwargs.get('headers') == expected_headers + assert isinstance(init_kwargs.get('auth'), GoogleAuthCredentialFlow) + assert 'mounts' in init_kwargs + assert 'http://' in init_kwargs['mounts'] + assert 'https://' in init_kwargs['mounts'] + assert isinstance(init_kwargs['mounts']['http://'], HttpxRetryTransport) + assert isinstance(init_kwargs['mounts']['https://'], HttpxRetryTransport) + + # Check that HttpxRetryTransport was initialized with the default retry config + assert mock_transport_init.call_count >= 1 + _, transport_call_kwargs = mock_transport_init.call_args_list[0] + assert transport_call_kwargs.get('retry') == custom_retry + assert transport_call_kwargs.get('http2') is False + + + @respx.mock + @pytest.mark.asyncio + async def test_request(self): + """Test client request.""" + + client = HttpxAsyncClient() + + responses = [ + respx.MockResponse(200, http_version='HTTP/2', content='body'), + ] + route = respx.request('POST', _TEST_URL).mock(side_effect=responses) + + resp = await client.request('post', _TEST_URL) + assert resp.status_code == 200 + assert resp.text == 'body' + assert route.call_count == 1 + + request = route.calls.last.request + assert request.method == 'POST' + assert request.url == _TEST_URL + self.check_headers(request.headers, has_auth=False) + + @respx.mock + @pytest.mark.asyncio + async def test_request_raise_for_status(self): + """Test client request raise for status error.""" + + client = HttpxAsyncClient() + + responses = [ + respx.MockResponse(404, http_version='HTTP/2', content='Status error'), + ] + route = respx.request('POST', _TEST_URL).mock(side_effect=responses) + + with pytest.raises(httpx.HTTPStatusError) as exc_info: + resp = await client.request('post', _TEST_URL) + resp = exc_info.value.response + assert resp.status_code == 404 + assert resp.text == 'Status error' + assert route.call_count == 1 + + request = route.calls.last.request + assert request.method == 'POST' + assert request.url == _TEST_URL + self.check_headers(request.headers, has_auth=False) + + + @respx.mock + @pytest.mark.asyncio + async def test_request_with_base_url(self): + """Test client request with base_url.""" + + client = HttpxAsyncClient(base_url=_TEST_URL) + + url_extension = 'post/123' + responses = [ + respx.MockResponse(200, http_version='HTTP/2', content='body'), + ] + route = respx.request('POST', _TEST_URL + url_extension).mock(side_effect=responses) + + resp = await client.request('POST', url_extension) + assert resp.status_code == 200 + assert resp.text == 'body' + assert route.call_count == 1 + + request = route.calls.last.request + assert request.method == 'POST' + assert request.url == _TEST_URL + url_extension + self.check_headers(request.headers, has_auth=False) + + @respx.mock + @pytest.mark.asyncio + async def test_request_with_timeout(self): + """Test client request with timeout.""" + + timeout = 60 + client = HttpxAsyncClient(timeout=timeout) + responses = [ + respx.MockResponse(200, http_version='HTTP/2', content='body'), + ] + route = respx.request('POST', _TEST_URL).mock(side_effect=responses) + + resp = await client.request('POST', _TEST_URL) + assert resp.status_code == 200 + assert resp.text == 'body' + assert route.call_count == 1 + + request = route.calls.last.request + assert request.method == 'POST' + assert request.url == _TEST_URL + self.check_headers(request.headers, has_auth=False) + + @respx.mock + @pytest.mark.asyncio + async def test_request_with_credential(self): + """Test client request with credentials.""" + + mock_credential = testutils.MockGoogleCredential() + client = HttpxAsyncClient(credential=mock_credential) + + responses = [ + respx.MockResponse(200, http_version='HTTP/2', content='test'), + ] + route = respx.request('POST', _TEST_URL).mock(side_effect=responses) + + resp = await client.request('post', _TEST_URL) + + assert resp.status_code == 200 + assert resp.text == 'test' + assert route.call_count == 1 + + request = route.calls.last.request + assert request.method == 'POST' + assert request.url == _TEST_URL + self.check_headers(request.headers) + + @respx.mock + @pytest.mark.asyncio + async def test_request_with_headers(self): + """Test client request with credentials.""" + + mock_credential = testutils.MockGoogleCredential() + headers = httpx.Headers({'X-Custom': 'Test'}) + client = HttpxAsyncClient(credential=mock_credential, headers=headers) + + responses = [ + respx.MockResponse(200, http_version='HTTP/2', content='body'), + ] + route = respx.request('POST', _TEST_URL).mock(side_effect=responses) + + resp = await client.request('post', _TEST_URL) + assert resp.status_code == 200 + assert resp.text == 'body' + assert route.call_count == 1 + + request = route.calls.last.request + assert request.method == 'POST' + assert request.url == _TEST_URL + self.check_headers(request.headers, expected_headers=headers) + + + @respx.mock + @pytest.mark.asyncio + async def test_response_get_headers(self): + """Test the headers() helper method.""" + + client = HttpxAsyncClient() + expected_headers = {'X-Custom': 'Test'} + + responses = [ + respx.MockResponse(200, http_version='HTTP/2', headers=expected_headers), + ] + route = respx.request('POST', _TEST_URL).mock(side_effect=responses) + + headers = await client.headers('post', _TEST_URL) + + self.check_headers( + headers, expected_headers=expected_headers, has_auth=False, has_metrics=False + ) + assert route.call_count == 1 + + request = route.calls.last.request + assert request.method == 'POST' + assert request.url == _TEST_URL + self.check_headers(request.headers, has_auth=False) + + @respx.mock + @pytest.mark.asyncio + async def test_response_get_body_and_response(self): + """Test the body_and_response() helper method.""" + + client = HttpxAsyncClient() + expected_body = {'key': 'value'} + + responses = [ + respx.MockResponse(200, http_version='HTTP/2', json=expected_body), + ] + route = respx.request('POST', _TEST_URL).mock(side_effect=responses) + + body, resp = await client.body_and_response('post', _TEST_URL) + + assert resp.status_code == 200 + assert body == expected_body + assert route.call_count == 1 + + request = route.calls.last.request + assert request.method == 'POST' + assert request.url == _TEST_URL + self.check_headers(request.headers, has_auth=False) + + + @respx.mock + @pytest.mark.asyncio + async def test_response_get_body(self): + """Test the body() helper method.""" + + client = HttpxAsyncClient() + expected_body = {'key': 'value'} + + responses = [ + respx.MockResponse(200, http_version='HTTP/2', json=expected_body), + ] + route = respx.request('POST', _TEST_URL).mock(side_effect=responses) + + body = await client.body('post', _TEST_URL) + + assert body == expected_body + assert route.call_count == 1 + + request = route.calls.last.request + assert request.method == 'POST' + assert request.url == _TEST_URL + self.check_headers(request.headers, has_auth=False) + + @respx.mock + @pytest.mark.asyncio + async def test_response_get_headers_and_body(self): + """Test the headers_and_body() helper method.""" + + client = HttpxAsyncClient() + expected_headers = {'X-Custom': 'Test'} + expected_body = {'key': 'value'} + + responses = [ + respx.MockResponse( + 200, http_version='HTTP/2', json=expected_body, headers=expected_headers), + ] + route = respx.request('POST', _TEST_URL).mock(side_effect=responses) + + headers, body = await client.headers_and_body('post', _TEST_URL) + + assert body == expected_body + self.check_headers( + headers, expected_headers=expected_headers, has_auth=False, has_metrics=False + ) + assert route.call_count == 1 + + request = route.calls.last.request + assert request.method == 'POST' + assert request.url == _TEST_URL + self.check_headers(request.headers, has_auth=False) + + @pytest.mark.asyncio + async def test_aclose(self): + """Test that aclose calls the underlying client's aclose.""" + + client = HttpxAsyncClient() + assert client._async_client.is_closed is False + await client.aclose() + assert client._async_client.is_closed is True + + + def check_headers( + self, + headers: Union[httpx.Headers, Dict[str, str]], + expected_headers: Optional[Union[httpx.Headers, Dict[str, str]]] = None, + has_auth: bool = True, + has_metrics: bool = True + ): + if expected_headers: + for header_key in expected_headers.keys(): + assert header_key in headers + assert headers.get(header_key) == expected_headers.get(header_key) + + if has_auth: + assert 'Authorization' in headers + assert headers.get('Authorization') == 'Bearer mock-token' + + if has_metrics: + for header_key in _http_client.METRICS_HEADERS: + assert header_key in headers + expected_metrics_header = _http_client.METRICS_HEADERS.get(header_key, '') + if has_auth: + expected_metrics_header += ' mock-cred-metric-tag' + assert headers.get(header_key) == expected_metrics_header + + +class TestGoogleAuthCredentialFlow: + + @respx.mock + @pytest.mark.asyncio + async def test_auth_headers_retry(self): + """Test invalid credential retry.""" + + mock_credential = testutils.MockGoogleCredential() + client = HttpxAsyncClient(credential=mock_credential) + + responses = [ + respx.MockResponse(401, http_version='HTTP/2', content='Auth error'), + respx.MockResponse(401, http_version='HTTP/2', content='Auth error'), + respx.MockResponse(200, http_version='HTTP/2', content='body'), + ] + route = respx.request('POST', _TEST_URL).mock(side_effect=responses) + + resp = await client.request('post', _TEST_URL) + assert resp.status_code == 200 + assert resp.text == 'body' + assert route.call_count == 3 + + request = route.calls.last.request + assert request.method == 'POST' + assert request.url == _TEST_URL + headers = request.headers + assert 'Authorization' in headers + assert headers.get('Authorization') == 'Bearer mock-token' + + @respx.mock + @pytest.mark.asyncio + async def test_auth_headers_retry_exhausted(self, mocker: MockerFixture): + """Test invalid credential retry exhausted.""" + + mock_credential = testutils.MockGoogleCredential() + mock_credential_patch = mocker.spy(mock_credential, 'refresh') + client = HttpxAsyncClient(credential=mock_credential) + + responses = [ + respx.MockResponse(401, http_version='HTTP/2', content='Auth error'), + respx.MockResponse(401, http_version='HTTP/2', content='Auth error'), + respx.MockResponse(401, http_version='HTTP/2', content='Auth error'), + # Should stop after previous response + respx.MockResponse(200, http_version='HTTP/2', content='body'), + ] + route = respx.request('POST', _TEST_URL).mock(side_effect=responses) + + with pytest.raises(httpx.HTTPStatusError) as exc_info: + resp = await client.request('post', _TEST_URL) + resp = exc_info.value.response + assert resp.status_code == 401 + assert resp.text == 'Auth error' + assert route.call_count == 3 + + assert mock_credential_patch.call_count == 3 + + request = route.calls.last.request + assert request.method == 'POST' + assert request.url == _TEST_URL + headers = request.headers + assert 'Authorization' in headers + assert headers.get('Authorization') == 'Bearer mock-token' diff --git a/tests/test_instance_id.py b/tests/test_instance_id.py new file mode 100644 index 000000000..2b0e21079 --- /dev/null +++ b/tests/test_instance_id.py @@ -0,0 +1,148 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for firebase_admin.instance_id.""" + +import pytest + +import firebase_admin +from firebase_admin import exceptions +from firebase_admin import instance_id +from firebase_admin import _http_client +from firebase_admin import _utils +from tests import testutils + + +http_errors = { + 400: ( + 'Instance ID "test_iid": Malformed instance ID argument.', + exceptions.InvalidArgumentError), + 401: ( + 'Instance ID "test_iid": Request not authorized.', + exceptions.UnauthenticatedError), + 403: ( + ('Instance ID "test_iid": Project does not match instance ID or the client does not have ' + 'sufficient privileges.'), + exceptions.PermissionDeniedError), + 404: ( + 'Instance ID "test_iid": Failed to find the instance ID.', + exceptions.NotFoundError), + 409: ( + 'Instance ID "test_iid": Already deleted.', + exceptions.ConflictError), + 429: ( + 'Instance ID "test_iid": Request throttled out by the backend server.', + exceptions.ResourceExhaustedError), + 500: ( + 'Instance ID "test_iid": Internal server error.', + exceptions.InternalError), + 503: ( + 'Instance ID "test_iid": Backend servers are over capacity. Try again later.', + exceptions.UnavailableError), +} + +class TestDeleteInstanceId: + + def teardown_method(self): + testutils.cleanup_apps() + + def _instrument_iid_service(self, app, status=200, payload='True'): + iid_service = instance_id._get_iid_service(app) + recorder = [] + iid_service._client.session.mount( + instance_id._IID_SERVICE_URL, + testutils.MockAdapter(payload, status, recorder)) + return iid_service, recorder + + def _assert_request(self, request, expected_method, expected_url): + assert request.method == expected_method + assert request.url == expected_url + expected_metrics_header = _utils.get_metrics_header() + ' mock-cred-metric-tag' + assert request.headers['x-goog-api-client'] == expected_metrics_header + + def _get_url(self, project_id, iid): + return instance_id._IID_SERVICE_URL + f'project/{project_id}/instanceId/{iid}' + + def test_no_project_id(self): + def evaluate(): + firebase_admin.initialize_app(testutils.MockCredential()) + with pytest.raises(ValueError): + instance_id.delete_instance_id('test') + testutils.run_without_project_id(evaluate) + + def test_default_timeout(self): + cred = testutils.MockCredential() + app = firebase_admin.initialize_app(cred, {'projectId': 'explicit-project-id'}) + iid_service = instance_id._get_iid_service(app) + assert iid_service._client.timeout == _http_client.DEFAULT_TIMEOUT_SECONDS + + def test_delete_instance_id(self): + cred = testutils.MockCredential() + app = firebase_admin.initialize_app(cred, {'projectId': 'explicit-project-id'}) + _, recorder = self._instrument_iid_service(app) + instance_id.delete_instance_id('test_iid') + assert len(recorder) == 1 + self._assert_request( + recorder[0], 'DELETE', self._get_url('explicit-project-id', 'test_iid')) + + def test_delete_instance_id_with_explicit_app(self): + cred = testutils.MockCredential() + app = firebase_admin.initialize_app(cred, {'projectId': 'explicit-project-id'}) + _, recorder = self._instrument_iid_service(app) + instance_id.delete_instance_id('test_iid', app) + assert len(recorder) == 1 + self._assert_request( + recorder[0], 'DELETE', self._get_url('explicit-project-id', 'test_iid')) + + @pytest.mark.parametrize('status', http_errors.keys()) + def test_delete_instance_id_error(self, status): + cred = testutils.MockCredential() + app = firebase_admin.initialize_app(cred, {'projectId': 'explicit-project-id'}) + _, recorder = self._instrument_iid_service(app, status, 'some error') + msg, exc = http_errors.get(status) + with pytest.raises(exc) as excinfo: + instance_id.delete_instance_id('test_iid') + assert str(excinfo.value) == msg + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + if status != 401: + assert len(recorder) == 1 + else: + # 401 responses are automatically retried by google-auth + assert len(recorder) == 3 + self._assert_request( + recorder[0], 'DELETE', self._get_url('explicit-project-id', 'test_iid')) + + def test_delete_instance_id_unexpected_error(self): + cred = testutils.MockCredential() + app = firebase_admin.initialize_app(cred, {'projectId': 'explicit-project-id'}) + _, recorder = self._instrument_iid_service(app, 501, 'some error') + with pytest.raises(exceptions.UnknownError) as excinfo: + instance_id.delete_instance_id('test_iid') + url = self._get_url('explicit-project-id', 'test_iid') + message = f'Instance ID "test_iid": 501 Server Error: None for url: {url}' + assert str(excinfo.value) == message + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + assert len(recorder) == 1 + self._assert_request(recorder[0], 'DELETE', url) + + @pytest.mark.parametrize('iid', [None, '', 0, 1, True, False, [], {}, tuple()]) + def test_invalid_instance_id(self, iid): + cred = testutils.MockCredential() + app = firebase_admin.initialize_app(cred, {'projectId': 'explicit-project-id'}) + _, recorder = self._instrument_iid_service(app) + with pytest.raises(ValueError): + instance_id.delete_instance_id(iid) + assert len(recorder) == 0 diff --git a/tests/test_messaging.py b/tests/test_messaging.py new file mode 100644 index 000000000..b30790f14 --- /dev/null +++ b/tests/test_messaging.py @@ -0,0 +1,2441 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test cases for the firebase_admin.messaging module.""" +import datetime +from itertools import chain, repeat +import json +import numbers +import httpx +import respx + +import pytest + +import firebase_admin +from firebase_admin import exceptions +from firebase_admin import messaging +from firebase_admin import _http_client +from firebase_admin import _utils +from tests import testutils + + +NON_STRING_ARGS = [[], tuple(), {}, True, False, 1, 0] +NON_DICT_ARGS = ['', [], tuple(), True, False, 1, 0, {1: 'foo'}, {'foo': 1}] +NON_OBJECT_ARGS = [[], tuple(), {}, 'foo', 0, 1, True, False] +NON_LIST_ARGS = ['', tuple(), {}, True, False, 1, 0, [1], ['foo', 1]] +NON_UINT_ARGS = ['1.23s', [], tuple(), {}, -1.23] +NON_BOOL_ARGS = ['', [], tuple(), {}, 1, 0, [1], ['foo', 1], {1: 'foo'}, {'foo': 1}] +HTTP_ERROR_CODES = { + 400: exceptions.InvalidArgumentError, + 403: exceptions.PermissionDeniedError, + 404: exceptions.NotFoundError, + 500: exceptions.InternalError, + 503: exceptions.UnavailableError, +} +FCM_ERROR_CODES = { + 'APNS_AUTH_ERROR': messaging.ThirdPartyAuthError, + 'QUOTA_EXCEEDED': messaging.QuotaExceededError, + 'SENDER_ID_MISMATCH': messaging.SenderIdMismatchError, + 'THIRD_PARTY_AUTH_ERROR': messaging.ThirdPartyAuthError, + 'UNREGISTERED': messaging.UnregisteredError, +} + + +def check_encoding(msg, expected=None): + encoded = messaging._MessagingService.encode_message(msg) + if expected: + assert encoded == expected + +def check_exception(exception, message, status): + assert isinstance(exception, exceptions.FirebaseError) + assert str(exception) == message + assert exception.cause is not None + assert exception.http_response is not None + assert exception.http_response.status_code == status + + +class TestMessageStr: + + @pytest.mark.parametrize('msg', [ + messaging.Message(), + messaging.Message(topic='topic', token='token'), + messaging.Message(topic='topic', condition='condition'), + messaging.Message(condition='condition', token='token'), + messaging.Message(topic='topic', token='token', condition='condition'), + ]) + def test_invalid_target_message(self, msg): + with pytest.raises(ValueError) as excinfo: + str(msg) + assert str( + excinfo.value) == 'Exactly one of token, topic or condition must be specified.' + + def test_empty_message(self): + assert str(messaging.Message(token='value')) == '{"token": "value"}' + assert str(messaging.Message(topic='value')) == '{"topic": "value"}' + assert str(messaging.Message(condition='value') + ) == '{"condition": "value"}' + + def test_data_message(self): + assert str(messaging.Message(topic='topic', data={}) + ) == '{"topic": "topic"}' + assert str(messaging.Message(topic='topic', data={ + 'k1': 'v1', 'k2': 'v2'})) == '{"data": {"k1": "v1", "k2": "v2"}, "topic": "topic"}' + + +class TestMulticastMessage: + + @pytest.mark.parametrize('tokens', NON_LIST_ARGS) + def test_invalid_tokens_type(self, tokens): + with pytest.raises(ValueError) as excinfo: + messaging.MulticastMessage(tokens=tokens) + if isinstance(tokens, list): + expected = 'MulticastMessage.tokens must not contain non-string values.' + assert str(excinfo.value) == expected + else: + expected = 'MulticastMessage.tokens must be a list of strings.' + assert str(excinfo.value) == expected + + def test_tokens_over_500(self): + with pytest.raises(ValueError) as excinfo: + messaging.MulticastMessage(tokens=['token' for _ in range(0, 501)]) + expected = 'MulticastMessage.tokens must not contain more than 500 tokens.' + assert str(excinfo.value) == expected + + def test_tokens_type(self): + message = messaging.MulticastMessage(tokens=['token']) + assert len(message.tokens) == 1 + + message = messaging.MulticastMessage(tokens=['token' for _ in range(0, 500)]) + assert len(message.tokens) == 500 + + +class TestMessageEncoder: + + @pytest.mark.parametrize('msg', [ + messaging.Message(), + messaging.Message(topic='topic', token='token'), + messaging.Message(topic='topic', condition='condition'), + messaging.Message(condition='condition', token='token'), + messaging.Message(topic='topic', token='token', condition='condition'), + ]) + def test_invalid_target_message(self, msg): + with pytest.raises(ValueError) as excinfo: + check_encoding(msg) + assert str(excinfo.value) == 'Exactly one of token, topic or condition must be specified.' + + @pytest.mark.parametrize('target', NON_STRING_ARGS + ['']) + def test_invalid_token(self, target): + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message(token=target)) + assert str(excinfo.value) == 'Message.token must be a non-empty string.' + + @pytest.mark.parametrize('target', NON_STRING_ARGS + ['']) + def test_invalid_topic(self, target): + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message(topic=target)) + assert str(excinfo.value) == 'Message.topic must be a non-empty string.' + + @pytest.mark.parametrize('target', NON_STRING_ARGS + ['']) + def test_invalid_condition(self, target): + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message(condition=target)) + assert str(excinfo.value) == 'Message.condition must be a non-empty string.' + + @pytest.mark.parametrize('topic', ['/topics/', '/foo/bar', 'foo bar']) + def test_malformed_topic_name(self, topic): + with pytest.raises(ValueError): + check_encoding(messaging.Message(topic=topic)) + + def test_empty_message(self): + check_encoding(messaging.Message(token='value'), {'token': 'value'}) + check_encoding(messaging.Message(topic='value'), {'topic': 'value'}) + check_encoding(messaging.Message(condition='value'), {'condition': 'value'}) + + @pytest.mark.parametrize('data', NON_DICT_ARGS) + def test_invalid_data_message(self, data): + with pytest.raises(ValueError): + check_encoding(messaging.Message(topic='topic', data=data)) + + def test_data_message(self): + check_encoding(messaging.Message(topic='topic', data={}), {'topic': 'topic'}) + check_encoding( + messaging.Message(topic='topic', data={'k1': 'v1', 'k2': 'v2'}), + {'topic': 'topic', 'data': {'k1': 'v1', 'k2': 'v2'}}) + + def test_prefixed_topic(self): + check_encoding(messaging.Message(topic='/topics/topic'), {'topic': 'topic'}) + + def test_fcm_options(self): + check_encoding( + messaging.Message( + topic='topic', fcm_options=messaging.FCMOptions('analytics_label_v1')), + {'topic': 'topic', 'fcm_options': {'analytics_label': 'analytics_label_v1'}}) + check_encoding( + messaging.Message(topic='topic', fcm_options=messaging.FCMOptions()), + {'topic': 'topic'}) + + +class TestNotificationEncoder: + + @pytest.mark.parametrize('data', NON_OBJECT_ARGS) + def test_invalid_notification(self, data): + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message( + topic='topic', notification=data)) + expected = 'Message.notification must be an instance of Notification class.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_title(self, data): + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message( + topic='topic', notification=messaging.Notification(title=data))) + assert str(excinfo.value) == 'Notification.title must be a string.' + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_body(self, data): + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message( + topic='topic', notification=messaging.Notification(body=data))) + assert str(excinfo.value) == 'Notification.body must be a string.' + + def test_notification_message(self): + check_encoding( + messaging.Message(topic='topic', notification=messaging.Notification()), + {'topic': 'topic'}) + check_encoding( + messaging.Message(topic='topic', notification=messaging.Notification('t', 'b')), + {'topic': 'topic', 'notification': {'title': 't', 'body': 'b'}}) + check_encoding( + messaging.Message(topic='topic', notification=messaging.Notification('t')), + {'topic': 'topic', 'notification': {'title': 't'}}) + + +class TestFcmOptionEncoder: + + @pytest.mark.parametrize('label', [ + '!', + 'THIS_IS_LONGER_THAN_50_CHARACTERS_WHICH_IS_NOT_ALLOWED', + '', + ]) + def test_invalid_fcm_options(self, label): + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message( + topic='topic', + fcm_options=messaging.FCMOptions(label) + )) + expected = 'Malformed FCMOptions.analytics_label.' + assert str(excinfo.value) == expected + + def test_fcm_options(self): + check_encoding( + messaging.Message( + topic='topic', + fcm_options=messaging.FCMOptions(), + android=messaging.AndroidConfig(fcm_options=messaging.AndroidFCMOptions()), + apns=messaging.APNSConfig(fcm_options=messaging.APNSFCMOptions()) + ), + {'topic': 'topic'}) + check_encoding( + messaging.Message( + topic='topic', + fcm_options=messaging.FCMOptions('message-label'), + android=messaging.AndroidConfig( + fcm_options=messaging.AndroidFCMOptions('android-label'), + direct_boot_ok=False), + apns=messaging.APNSConfig(fcm_options= + messaging.APNSFCMOptions( + analytics_label='apns-label', + image='https://images.unsplash.com/photo-14944386399' + '46-1ebd1d20bf85?fit=crop&w=900&q=60')) + ), + { + 'topic': 'topic', + 'fcm_options': {'analytics_label': 'message-label'}, + 'android': {'fcm_options': {'analytics_label': 'android-label'}, + 'direct_boot_ok': False}, + 'apns': {'fcm_options': {'analytics_label': 'apns-label', + 'image': 'https://images.unsplash.com/photo-14944386399' + '46-1ebd1d20bf85?fit=crop&w=900&q=60'}}, + }) + + +class TestAndroidConfigEncoder: + + @pytest.mark.parametrize('data', NON_OBJECT_ARGS) + def test_invalid_android(self, data): + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message( + topic='topic', android=data)) + expected = 'Message.android must be an instance of AndroidConfig class.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_collapse_key(self, data): + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message( + topic='topic', android=messaging.AndroidConfig(collapse_key=data))) + assert str(excinfo.value) == 'AndroidConfig.collapse_key must be a string.' + + @pytest.mark.parametrize('data', NON_STRING_ARGS + ['foo']) + def test_invalid_priority(self, data): + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message( + topic='topic', android=messaging.AndroidConfig(priority=data))) + if isinstance(data, str): + assert str(excinfo.value) == 'AndroidConfig.priority must be "high" or "normal".' + else: + assert str(excinfo.value) == 'AndroidConfig.priority must be a non-empty string.' + + @pytest.mark.parametrize('data', NON_UINT_ARGS) + def test_invalid_ttl(self, data): + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message( + topic='topic', android=messaging.AndroidConfig(ttl=data))) + if isinstance(data, numbers.Number): + assert str(excinfo.value) == ('AndroidConfig.ttl must not be negative.') + else: + assert str(excinfo.value) == ('AndroidConfig.ttl must be a duration in seconds or ' + 'an instance of datetime.timedelta.') + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_package_name(self, data): + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message( + topic='topic', android=messaging.AndroidConfig(restricted_package_name=data))) + assert str(excinfo.value) == 'AndroidConfig.restricted_package_name must be a string.' + + @pytest.mark.parametrize('data', NON_DICT_ARGS) + def test_invalid_data(self, data): + with pytest.raises(ValueError): + check_encoding(messaging.Message( + topic='topic', android=messaging.AndroidConfig(data=data))) + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_analytics_label(self, data): + with pytest.raises(ValueError): + check_encoding(messaging.Message( + topic='topic', android=messaging.AndroidConfig( + fcm_options=messaging.AndroidFCMOptions(analytics_label=data)))) + + @pytest.mark.parametrize('data', NON_BOOL_ARGS) + def test_invalid_direct_boot_ok(self, data): + with pytest.raises(ValueError): + check_encoding(messaging.Message( + topic='topic', android=messaging.AndroidConfig(direct_boot_ok=data))) + + @pytest.mark.parametrize('data', NON_BOOL_ARGS) + def test_invalid_bandwidth_constrained_ok(self, data): + with pytest.raises(ValueError): + check_encoding(messaging.Message( + topic='topic', android=messaging.AndroidConfig(bandwidth_constrained_ok=data))) + + @pytest.mark.parametrize('data', NON_BOOL_ARGS) + def test_invalid_restricted_satellite_ok(self, data): + with pytest.raises(ValueError): + check_encoding(messaging.Message( + topic='topic', android=messaging.AndroidConfig(restricted_satellite_ok=data))) + + + def test_android_config(self): + msg = messaging.Message( + topic='topic', + android=messaging.AndroidConfig( + collapse_key='key', + restricted_package_name='package', + priority='high', + ttl=123, + data={'k1': 'v1', 'k2': 'v2'}, + fcm_options=messaging.AndroidFCMOptions('analytics_label_v1'), + direct_boot_ok=True, + bandwidth_constrained_ok=True, + restricted_satellite_ok=True, + ) + ) + expected = { + 'topic': 'topic', + 'android': { + 'collapse_key': 'key', + 'restricted_package_name': 'package', + 'priority': 'high', + 'ttl': '123s', + 'data': { + 'k1': 'v1', + 'k2': 'v2', + }, + 'fcm_options': { + 'analytics_label': 'analytics_label_v1', + }, + 'direct_boot_ok': True, + 'bandwidth_constrained_ok': True, + 'restricted_satellite_ok': True, + }, + } + check_encoding(msg, expected) + + @pytest.mark.parametrize('ttl', [ + (0.5, '0.500000000s'), + (123, '123s'), + (123.45, '123.450000000s'), + (datetime.timedelta(days=1, seconds=100), '86500s'), + ]) + def test_android_ttl(self, ttl): + msg = messaging.Message( + topic='topic', + android=messaging.AndroidConfig(ttl=ttl[0]) + ) + expected = { + 'topic': 'topic', + 'android': { + 'ttl': ttl[1], + }, + } + check_encoding(msg, expected) + + +class TestAndroidNotificationEncoder: + + def _check_notification(self, notification): + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message( + topic='topic', android=messaging.AndroidConfig(notification=notification))) + return excinfo + + @pytest.mark.parametrize('data', NON_OBJECT_ARGS) + def test_invalid_android_notification(self, data): + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message( + topic='topic', android=messaging.AndroidConfig(notification=data))) + expected = 'AndroidConfig.notification must be an instance of AndroidNotification class.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_title(self, data): + notification = messaging.AndroidNotification(title=data) + excinfo = self._check_notification(notification) + assert str(excinfo.value) == 'AndroidNotification.title must be a string.' + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_body(self, data): + notification = messaging.AndroidNotification(body=data) + excinfo = self._check_notification(notification) + assert str(excinfo.value) == 'AndroidNotification.body must be a string.' + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_icon(self, data): + notification = messaging.AndroidNotification(icon=data) + excinfo = self._check_notification(notification) + assert str(excinfo.value) == 'AndroidNotification.icon must be a string.' + + @pytest.mark.parametrize('data', NON_STRING_ARGS + ['foo', '#xxyyzz', '112233', '#11223']) + def test_invalid_color(self, data): + notification = messaging.AndroidNotification(color=data) + excinfo = self._check_notification(notification) + if isinstance(data, str): + assert str(excinfo.value) == 'AndroidNotification.color must be in the form #RRGGBB.' + else: + assert str(excinfo.value) == 'AndroidNotification.color must be a non-empty string.' + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_sound(self, data): + notification = messaging.AndroidNotification(sound=data) + excinfo = self._check_notification(notification) + assert str(excinfo.value) == 'AndroidNotification.sound must be a string.' + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_tag(self, data): + notification = messaging.AndroidNotification(tag=data) + excinfo = self._check_notification(notification) + assert str(excinfo.value) == 'AndroidNotification.tag must be a string.' + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_click_action(self, data): + notification = messaging.AndroidNotification(click_action=data) + excinfo = self._check_notification(notification) + assert str(excinfo.value) == 'AndroidNotification.click_action must be a string.' + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_title_loc_key(self, data): + notification = messaging.AndroidNotification(title_loc_key=data) + excinfo = self._check_notification(notification) + assert str(excinfo.value) == 'AndroidNotification.title_loc_key must be a string.' + + @pytest.mark.parametrize('data', NON_LIST_ARGS) + def test_invalid_title_loc_args(self, data): + notification = messaging.AndroidNotification(title_loc_key='foo', title_loc_args=data) + excinfo = self._check_notification(notification) + if isinstance(data, list): + expected = 'AndroidNotification.title_loc_args must not contain non-string values.' + assert str(excinfo.value) == expected + else: + expected = 'AndroidNotification.title_loc_args must be a list of strings.' + assert str(excinfo.value) == expected + + def test_no_title_loc_key(self): + notification = messaging.AndroidNotification(title_loc_args=['foo']) + excinfo = self._check_notification(notification) + expected = 'AndroidNotification.title_loc_key is required when specifying title_loc_args.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_body_loc_key(self, data): + notification = messaging.AndroidNotification(body_loc_key=data) + excinfo = self._check_notification(notification) + assert str(excinfo.value) == 'AndroidNotification.body_loc_key must be a string.' + + @pytest.mark.parametrize('data', NON_LIST_ARGS) + def test_invalid_body_loc_args(self, data): + notification = messaging.AndroidNotification(body_loc_key='foo', body_loc_args=data) + excinfo = self._check_notification(notification) + if isinstance(data, list): + expected = 'AndroidNotification.body_loc_args must not contain non-string values.' + assert str(excinfo.value) == expected + else: + expected = 'AndroidNotification.body_loc_args must be a list of strings.' + assert str(excinfo.value) == expected + + def test_no_body_loc_key(self): + notification = messaging.AndroidNotification(body_loc_args=['foo']) + excinfo = self._check_notification(notification) + expected = 'AndroidNotification.body_loc_key is required when specifying body_loc_args.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_channel_id(self, data): + notification = messaging.AndroidNotification(channel_id=data) + excinfo = self._check_notification(notification) + assert str(excinfo.value) == 'AndroidNotification.channel_id must be a string.' + + @pytest.mark.parametrize('timestamp', [100, '', 'foo', {}, []]) + def test_invalid_event_timestamp(self, timestamp): + notification = messaging.AndroidNotification(event_timestamp=timestamp) + excinfo = self._check_notification(notification) + expected = 'AndroidNotification.event_timestamp must be a datetime.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('priority', NON_STRING_ARGS + ['foo']) + def test_invalid_priority(self, priority): + notification = messaging.AndroidNotification(priority=priority) + excinfo = self._check_notification(notification) + if isinstance(priority, str): + if not priority: + expected = 'AndroidNotification.priority must be a non-empty string.' + else: + expected = ('AndroidNotification.priority must be "default", "min", "low", "high" ' + 'or "max".') + else: + expected = 'AndroidNotification.priority must be a non-empty string.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('visibility', NON_STRING_ARGS + ['foo']) + def test_invalid_visibility(self, visibility): + notification = messaging.AndroidNotification(visibility=visibility) + excinfo = self._check_notification(notification) + if isinstance(visibility, str): + if not visibility: + expected = 'AndroidNotification.visibility must be a non-empty string.' + else: + expected = ('AndroidNotification.visibility must be "private", "public" or' + ' "secret".') + else: + expected = 'AndroidNotification.visibility must be a non-empty string.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('proxy', NON_STRING_ARGS + ['foo']) + def test_invalid_proxy(self, proxy): + notification = messaging.AndroidNotification(proxy=proxy) + excinfo = self._check_notification(notification) + if isinstance(proxy, str): + if not proxy: + expected = 'AndroidNotification.proxy must be a non-empty string.' + else: + expected = ('AndroidNotification.proxy must be "allow", "deny" or' + ' "if_priority_lowered".') + else: + expected = 'AndroidNotification.proxy must be a non-empty string.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('vibrate_timings', ['', 1, True, 'msec', ['500', 500], [0, 'abc']]) + def test_invalid_vibrate_timings_millis(self, vibrate_timings): + notification = messaging.AndroidNotification(vibrate_timings_millis=vibrate_timings) + excinfo = self._check_notification(notification) + if isinstance(vibrate_timings, list): + expected = ('AndroidNotification.vibrate_timings_millis must not contain non-number ' + 'values.') + else: + expected = 'AndroidNotification.vibrate_timings_millis must be a list of numbers.' + assert str(excinfo.value) == expected + + def test_negative_vibrate_timings_millis(self): + notification = messaging.AndroidNotification( + vibrate_timings_millis=[100, -20, 15]) + excinfo = self._check_notification(notification) + expected = 'AndroidNotification.vibrate_timings_millis must not be negative.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('notification_count', ['', 'foo', [], tuple(), {}]) + def test_invalid_notification_count(self, notification_count): + notification = messaging.AndroidNotification(notification_count=notification_count) + excinfo = self._check_notification(notification) + assert str(excinfo.value) == 'AndroidNotification.notification_count must be a number.' + + def test_android_notification(self): + msg = messaging.Message( + topic='topic', + android=messaging.AndroidConfig( + notification=messaging.AndroidNotification( + title='t', body='b', icon='i', color='#112233', sound='s', tag='t', + click_action='ca', title_loc_key='tlk', body_loc_key='blk', + title_loc_args=['t1', 't2'], body_loc_args=['b1', 'b2'], channel_id='c', + ticker='ticker', sticky=True, + event_timestamp=datetime.datetime( + 2019, 10, 20, 15, 12, 23, 123, + tzinfo=datetime.timezone(datetime.timedelta(hours=-5)) + ), + local_only=False, + priority='high', vibrate_timings_millis=[100, 50, 250], + default_vibrate_timings=False, default_sound=True, + light_settings=messaging.LightSettings( + color='#AABBCCDD', light_on_duration_millis=200, + light_off_duration_millis=300, + ), + default_light_settings=False, visibility='public', notification_count=1, + proxy='if_priority_lowered', + ) + ) + ) + expected = { + 'topic': 'topic', + 'android': { + 'notification': { + 'title': 't', + 'body': 'b', + 'icon': 'i', + 'color': '#112233', + 'sound': 's', + 'tag': 't', + 'click_action': 'ca', + 'title_loc_key': 'tlk', + 'body_loc_key': 'blk', + 'title_loc_args': ['t1', 't2'], + 'body_loc_args': ['b1', 'b2'], + 'channel_id': 'c', + 'ticker': 'ticker', + 'sticky': True, + 'event_time': '2019-10-20T20:12:23.000123Z', + 'local_only': False, + 'notification_priority': 'PRIORITY_HIGH', + 'vibrate_timings': ['0.100000000s', '0.050000000s', '0.250000000s'], + 'default_vibrate_timings': False, + 'default_sound': 1, + 'light_settings': { + 'color': { + 'red': 0.6666666666666666, + 'green': 0.7333333333333333, + 'blue': 0.8, + 'alpha': 0.8666666666666667, + }, + 'light_on_duration': '0.200000000s', + 'light_off_duration': '0.300000000s', + }, + 'default_light_settings': False, + 'visibility': 'PUBLIC', + 'notification_count': 1, + 'proxy': 'IF_PRIORITY_LOWERED' + }, + }, + } + check_encoding(msg, expected) + + def test_android_notification_naive_event_timestamp(self): + event_time = datetime.datetime.now() + msg = messaging.Message( + topic='topic', + android=messaging.AndroidConfig( + notification=messaging.AndroidNotification( + title='t', + event_timestamp=event_time, + ) + ) + ) + expected = { + 'topic': 'topic', + 'android': { + 'notification': { + 'title': 't', + 'event_time': event_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + }, + }, + } + check_encoding(msg, expected) + + +class TestLightSettingsEncoder: + + def _check_light_settings(self, light_settings): + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message( + topic='topic', android=messaging.AndroidConfig( + notification=messaging.AndroidNotification( + light_settings=light_settings + )))) + return excinfo + + @pytest.mark.parametrize('data', NON_OBJECT_ARGS) + def test_invalid_light_settings(self, data): + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message( + topic='topic', android=messaging.AndroidConfig( + notification=messaging.AndroidNotification( + light_settings=data + )))) + expected = 'AndroidNotification.light_settings must be an instance of LightSettings class.' + assert str(excinfo.value) == expected + + def test_no_color(self): + light_settings = messaging.LightSettings(color=None, light_on_duration_millis=200, + light_off_duration_millis=200) + excinfo = self._check_light_settings(light_settings) + expected = 'LightSettings.color is required.' + assert str(excinfo.value) == expected + + def test_no_light_on_duration_millis(self): + light_settings = messaging.LightSettings(color='#aabbcc', light_on_duration_millis=None, + light_off_duration_millis=200) + excinfo = self._check_light_settings(light_settings) + expected = 'LightSettings.light_on_duration_millis is required.' + assert str(excinfo.value) == expected + + def test_no_light_off_duration_millis(self): + light_settings = messaging.LightSettings(color='#aabbcc', light_on_duration_millis=200, + light_off_duration_millis=None) + excinfo = self._check_light_settings(light_settings) + expected = 'LightSettings.light_off_duration_millis is required.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', NON_UINT_ARGS) + def test_invalid_light_off_duration_millis(self, data): + light_settings = messaging.LightSettings(color='#aabbcc', + light_on_duration_millis=200, + light_off_duration_millis=data) + excinfo = self._check_light_settings(light_settings) + if isinstance(data, numbers.Number): + assert str(excinfo.value) == ('LightSettings.light_off_duration_millis must not be ' + 'negative.') + else: + assert str(excinfo.value) == ('LightSettings.light_off_duration_millis must be a ' + 'duration in milliseconds or ' + 'an instance of datetime.timedelta.') + + @pytest.mark.parametrize('data', NON_UINT_ARGS) + def test_invalid_light_on_duration_millis(self, data): + light_settings = messaging.LightSettings(color='#aabbcc', + light_on_duration_millis=data, + light_off_duration_millis=200) + excinfo = self._check_light_settings(light_settings) + if isinstance(data, numbers.Number): + assert str(excinfo.value) == ('LightSettings.light_on_duration_millis must not be ' + 'negative.') + else: + assert str(excinfo.value) == ('LightSettings.light_on_duration_millis must be a ' + 'duration in milliseconds or ' + 'an instance of datetime.timedelta.') + + @pytest.mark.parametrize('data', NON_STRING_ARGS + ['foo', '#xxyyzz', '112233', '#11223']) + def test_invalid_color(self, data): + notification = messaging.LightSettings(color=data, light_on_duration_millis=300, + light_off_duration_millis=200) + excinfo = self._check_light_settings(notification) + if isinstance(data, str): + assert str(excinfo.value) == ('LightSettings.color must be in the form #RRGGBB or ' + '#RRGGBBAA.') + else: + assert str( + excinfo.value) == 'LightSettings.color must be a non-empty string.' + + def test_light_settings(self): + msg = messaging.Message( + topic='topic', android=messaging.AndroidConfig( + notification=messaging.AndroidNotification( + light_settings=messaging.LightSettings( + color="#aabbcc", + light_on_duration_millis=200, + light_off_duration_millis=300, + ) + )) + ) + expected = { + 'topic': 'topic', + 'android': { + 'notification': { + 'light_settings': { + 'color': { + 'red': 0.6666666666666666, + 'green': 0.7333333333333333, + 'blue': 0.8, + 'alpha': 1, + }, + 'light_on_duration': '0.200000000s', + 'light_off_duration': '0.300000000s', + } + }, + }, + } + check_encoding(msg, expected) + + +class TestWebpushConfigEncoder: + + @pytest.mark.parametrize('data', NON_OBJECT_ARGS) + def test_invalid_webpush(self, data): + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message( + topic='topic', webpush=data)) + expected = 'Message.webpush must be an instance of WebpushConfig class.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', NON_DICT_ARGS) + def test_invalid_headers(self, data): + with pytest.raises(ValueError): + check_encoding(messaging.Message( + topic='topic', webpush=messaging.WebpushConfig(headers=data))) + + @pytest.mark.parametrize('data', NON_DICT_ARGS) + def test_invalid_data(self, data): + with pytest.raises(ValueError): + check_encoding(messaging.Message( + topic='topic', webpush=messaging.WebpushConfig(data=data))) + + def test_webpush_config(self): + msg = messaging.Message( + topic='topic', + webpush=messaging.WebpushConfig( + headers={'h1': 'v1', 'h2': 'v2'}, + data={'k1': 'v1', 'k2': 'v2'}, + ) + ) + expected = { + 'topic': 'topic', + 'webpush': { + 'headers': { + 'h1': 'v1', + 'h2': 'v2', + }, + 'data': { + 'k1': 'v1', + 'k2': 'v2', + }, + }, + } + check_encoding(msg, expected) + + +class TestWebpushFCMOptionsEncoder: + + @pytest.mark.parametrize('data', NON_OBJECT_ARGS) + def test_invalid_webpush_fcm_options(self, data): + with pytest.raises(AttributeError): + check_encoding(messaging.Message( + topic='topic', webpush=messaging.WebpushConfig(fcm_options=data))) + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_link_type(self, data): + options = messaging.WebpushFCMOptions(link=data) + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message( + topic='topic', webpush=messaging.WebpushConfig(fcm_options=options))) + expected = 'WebpushConfig.fcm_options.link must be a string.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', ['', 'foo', 'http://example']) + def test_invalid_link_format(self, data): + options = messaging.WebpushFCMOptions(link=data) + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message( + topic='topic', webpush=messaging.WebpushConfig(fcm_options=options))) + expected = 'WebpushFCMOptions.link must be a HTTPS URL.' + assert str(excinfo.value) == expected + + def test_webpush_options(self): + msg = messaging.Message( + topic='topic', + webpush=messaging.WebpushConfig( + fcm_options=messaging.WebpushFCMOptions( + link='https://example', + ), + ) + ) + expected = { + 'topic': 'topic', + 'webpush': { + 'fcm_options': { + 'link': 'https://example', + }, + }, + } + check_encoding(msg, expected) + + +class TestWebpushNotificationEncoder: + + def _check_notification(self, notification): + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message( + topic='topic', webpush=messaging.WebpushConfig(notification=notification))) + return excinfo + + @pytest.mark.parametrize('data', NON_OBJECT_ARGS) + def test_invalid_webpush_notification(self, data): + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message( + topic='topic', webpush=messaging.WebpushConfig(notification=data))) + expected = 'WebpushConfig.notification must be an instance of WebpushNotification class.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_title(self, data): + notification = messaging.WebpushNotification(title=data) + excinfo = self._check_notification(notification) + assert str(excinfo.value) == 'WebpushNotification.title must be a string.' + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_body(self, data): + notification = messaging.WebpushNotification(body=data) + excinfo = self._check_notification(notification) + assert str(excinfo.value) == 'WebpushNotification.body must be a string.' + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_icon(self, data): + notification = messaging.WebpushNotification(icon=data) + excinfo = self._check_notification(notification) + assert str(excinfo.value) == 'WebpushNotification.icon must be a string.' + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_badge(self, data): + notification = messaging.WebpushNotification(badge=data) + excinfo = self._check_notification(notification) + assert str(excinfo.value) == 'WebpushNotification.badge must be a string.' + + @pytest.mark.parametrize('data', NON_STRING_ARGS + ['foo']) + def test_invalid_direction(self, data): + notification = messaging.WebpushNotification(direction=data) + excinfo = self._check_notification(notification) + if isinstance(data, str): + assert str(excinfo.value) == ('WebpushNotification.direction must be "auto", ' + '"ltr" or "rtl".') + else: + assert str(excinfo.value) == 'WebpushNotification.direction must be a string.' + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_image(self, data): + notification = messaging.WebpushNotification(image=data) + excinfo = self._check_notification(notification) + assert str(excinfo.value) == 'WebpushNotification.image must be a string.' + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_language(self, data): + notification = messaging.WebpushNotification(language=data) + excinfo = self._check_notification(notification) + assert str(excinfo.value) == 'WebpushNotification.language must be a string.' + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_tag(self, data): + notification = messaging.WebpushNotification(tag=data) + excinfo = self._check_notification(notification) + assert str(excinfo.value) == 'WebpushNotification.tag must be a string.' + + @pytest.mark.parametrize('data', ['', 'foo', [], tuple(), {}]) + def test_invalid_timestamp(self, data): + notification = messaging.WebpushNotification(timestamp_millis=data) + excinfo = self._check_notification(notification) + assert str(excinfo.value) == 'WebpushNotification.timestamp_millis must be a number.' + + @pytest.mark.parametrize('data', ['', [], tuple(), True, False, 1, 0]) + def test_invalid_custom_data(self, data): + notification = messaging.WebpushNotification(custom_data=data) + excinfo = self._check_notification(notification) + assert str(excinfo.value) == 'WebpushNotification.custom_data must be a dict.' + + @pytest.mark.parametrize('data', ['', {}, tuple(), True, False, 1, 0, [1, 2]]) + def test_invalid_actions(self, data): + notification = messaging.WebpushNotification(actions=data) + excinfo = self._check_notification(notification) + assert str(excinfo.value) == ('WebpushConfig.notification.actions must be a list of ' + 'WebpushNotificationAction instances.') + + def test_webpush_notification(self): + msg = messaging.Message( + topic='topic', + webpush=messaging.WebpushConfig( + notification=messaging.WebpushNotification( + badge='badge', + body='body', + data={'foo': 'bar'}, + icon='icon', + image='image', + language='language', + renotify=True, + require_interaction=True, + silent=True, + tag='tag', + timestamp_millis=100, + title='title', + vibrate=[100, 200, 100], + custom_data={'k1': 'v1', 'k2': 'v2'}, + ), + ) + ) + expected = { + 'topic': 'topic', + 'webpush': { + 'notification': { + 'badge': 'badge', + 'body': 'body', + 'data': {'foo': 'bar'}, + 'icon': 'icon', + 'image': 'image', + 'lang': 'language', + 'renotify': True, + 'requireInteraction': True, + 'silent': True, + 'tag': 'tag', + 'timestamp': 100, + 'vibrate': [100, 200, 100], + 'title': 'title', + 'k1': 'v1', + 'k2': 'v2', + }, + }, + } + check_encoding(msg, expected) + + def test_multiple_field_specifications(self): + notification = messaging.WebpushNotification( + badge='badge', + custom_data={'badge': 'other badge'}, + ) + excinfo = self._check_notification(notification) + expected = 'Multiple specifications for badge in WebpushNotification.' + assert str(excinfo.value) == expected + + def test_webpush_notification_action(self): + msg = messaging.Message( + topic='topic', + webpush=messaging.WebpushConfig( + notification=messaging.WebpushNotification( + actions=[ + messaging.WebpushNotificationAction( + action='a1', + title='t1', + ), + messaging.WebpushNotificationAction( + action='a2', + title='t2', + icon='i2', + ), + ], + ), + ) + ) + expected = { + 'topic': 'topic', + 'webpush': { + 'notification': { + 'actions': [ + { + 'action': 'a1', + 'title': 't1', + }, + { + 'action': 'a2', + 'title': 't2', + 'icon': 'i2', + }, + ], + }, + }, + } + check_encoding(msg, expected) + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_action_name(self, data): + action = messaging.WebpushNotificationAction(action=data, title='title') + notification = messaging.WebpushNotification(actions=[action]) + excinfo = self._check_notification(notification) + assert str(excinfo.value) == 'WebpushNotificationAction.action must be a string.' + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_action_title(self, data): + action = messaging.WebpushNotificationAction(action='action', title=data) + notification = messaging.WebpushNotification(actions=[action]) + excinfo = self._check_notification(notification) + assert str(excinfo.value) == 'WebpushNotificationAction.title must be a string.' + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_action_icon(self, data): + action = messaging.WebpushNotificationAction(action='action', title='title', icon=data) + notification = messaging.WebpushNotification(actions=[action]) + excinfo = self._check_notification(notification) + assert str(excinfo.value) == 'WebpushNotificationAction.icon must be a string.' + + +class TestAPNSConfigEncoder: + + @pytest.mark.parametrize('data', NON_OBJECT_ARGS) + def test_invalid_apns(self, data): + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message( + topic='topic', apns=data)) + expected = 'Message.apns must be an instance of APNSConfig class.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', NON_DICT_ARGS) + def test_invalid_headers(self, data): + with pytest.raises(ValueError): + check_encoding(messaging.Message( + topic='topic', apns=messaging.APNSConfig(headers=data))) + + def test_apns_config(self): + msg = messaging.Message( + topic='topic', + apns=messaging.APNSConfig( + headers={'h1': 'v1', 'h2': 'v2'}, + fcm_options=messaging.APNSFCMOptions('analytics_label_v1'), + live_activity_token='test_token_string' + ), + ) + expected = { + 'topic': 'topic', + 'apns': { + 'headers': { + 'h1': 'v1', + 'h2': 'v2', + }, + 'fcm_options': { + 'analytics_label': 'analytics_label_v1', + }, + 'live_activity_token': 'test_token_string', + }, + } + check_encoding(msg, expected) + + +class TestAPNSPayloadEncoder: + + @pytest.mark.parametrize('data', NON_OBJECT_ARGS) + def test_invalid_payload(self, data): + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message( + topic='topic', apns=messaging.APNSConfig(payload=data))) + expected = 'APNSConfig.payload must be an instance of APNSPayload class.' + assert str(excinfo.value) == expected + + def test_apns_payload(self): + msg = messaging.Message( + topic='topic', + apns=messaging.APNSConfig(payload=messaging.APNSPayload( + aps=messaging.Aps(alert='alert text'), + k1='v1', + k2=True + )) + ) + expected = { + 'topic': 'topic', + 'apns': { + 'payload': { + 'aps': { + 'alert': 'alert text', + }, + 'k1': 'v1', + 'k2': True, + }, + }, + } + check_encoding(msg, expected) + + +class TestApsEncoder: + + def _encode_aps(self, aps): + return check_encoding(messaging.Message( + topic='topic', apns=messaging.APNSConfig(payload=messaging.APNSPayload(aps=aps)))) + + @pytest.mark.parametrize('data', NON_OBJECT_ARGS) + def test_invalid_aps(self, data): + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message( + topic='topic', + apns=messaging.APNSConfig(payload=messaging.APNSPayload(aps=data)))) + expected = 'APNSPayload.aps must be an instance of Aps class.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_alert(self, data): + aps = messaging.Aps(alert=data) + with pytest.raises(ValueError) as excinfo: + self._encode_aps(aps) + expected = 'Aps.alert must be a string or an instance of ApsAlert class.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', [[], tuple(), {}, 'foo']) + def test_invalid_badge(self, data): + aps = messaging.Aps(badge=data) + with pytest.raises(ValueError) as excinfo: + self._encode_aps(aps) + expected = 'Aps.badge must be a number.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', NON_STRING_ARGS + ['']) + def test_invalid_sound(self, data): + aps = messaging.Aps(sound=data) + with pytest.raises(ValueError) as excinfo: + self._encode_aps(aps) + expected = 'Aps.sound must be a non-empty string or an instance of CriticalSound class.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_category(self, data): + aps = messaging.Aps(category=data) + with pytest.raises(ValueError) as excinfo: + self._encode_aps(aps) + expected = 'Aps.category must be a string.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_thread_id(self, data): + aps = messaging.Aps(thread_id=data) + with pytest.raises(ValueError) as excinfo: + self._encode_aps(aps) + expected = 'Aps.thread_id must be a string.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', ['', [], tuple(), True, False, 1, 0, ]) + def test_invalid_custom_data_dict(self, data): + if isinstance(data, dict): + return + aps = messaging.Aps(custom_data=data) + with pytest.raises(ValueError) as excinfo: + self._encode_aps(aps) + expected = 'Aps.custom_data must be a dict.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', [True, False, 1, 0]) + def test_invalid_custom_field_name(self, data): + aps = messaging.Aps(custom_data={data: 'foo'}) + with pytest.raises(ValueError) as excinfo: + self._encode_aps(aps) + expected = 'Aps.custom_data key must be a string.' + assert str(excinfo.value) == expected + + def test_multiple_field_specifications(self): + aps = messaging.Aps(thread_id='foo', custom_data={'thread-id': 'foo'}) + with pytest.raises(ValueError) as excinfo: + self._encode_aps(aps) + expected = 'Multiple specifications for thread-id in Aps.' + assert str(excinfo.value) == expected + + def test_aps(self): + msg = messaging.Message( + topic='topic', + apns=messaging.APNSConfig( + payload=messaging.APNSPayload( + aps=messaging.Aps( + alert='alert text', + badge=42, + sound='s', + content_available=True, + mutable_content=True, + category='c', + thread_id='t' + ), + ) + ) + ) + expected = { + 'topic': 'topic', + 'apns': { + 'payload': { + 'aps': { + 'alert': 'alert text', + 'badge': 42, + 'sound': 's', + 'content-available': 1, + 'mutable-content': 1, + 'category': 'c', + 'thread-id': 't', + }, + } + }, + } + check_encoding(msg, expected) + + def test_aps_custom_data(self): + msg = messaging.Message( + topic='topic', + apns=messaging.APNSConfig( + payload=messaging.APNSPayload( + aps=messaging.Aps( + alert='alert text', + custom_data={'k1': 'v1', 'k2': 1}, + ), + ) + ) + ) + expected = { + 'topic': 'topic', + 'apns': { + 'payload': { + 'aps': { + 'alert': 'alert text', + 'k1': 'v1', + 'k2': 1, + }, + } + }, + } + check_encoding(msg, expected) + + +class TestApsSoundEncoder: + + def _check_sound(self, sound): + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message( + topic='topic', apns=messaging.APNSConfig( + payload=messaging.APNSPayload(aps=messaging.Aps(sound=sound)) + ) + )) + return excinfo + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_name(self, data): + sound = messaging.CriticalSound(name=data) + excinfo = self._check_sound(sound) + expected = 'CriticalSound.name must be a non-empty string.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', [[], tuple(), {}, 'foo']) + def test_invalid_volume(self, data): + sound = messaging.CriticalSound(name='default', volume=data) + excinfo = self._check_sound(sound) + expected = 'CriticalSound.volume must be a number.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', [-0.1, 1.1]) + def test_volume_out_of_range(self, data): + sound = messaging.CriticalSound(name='default', volume=data) + excinfo = self._check_sound(sound) + expected = 'CriticalSound.volume must be in the interval [0,1].' + assert str(excinfo.value) == expected + + def test_sound_string(self): + msg = messaging.Message( + topic='topic', + apns=messaging.APNSConfig( + payload=messaging.APNSPayload(aps=messaging.Aps(sound='default')) + ) + ) + expected = { + 'topic': 'topic', + 'apns': { + 'payload': { + 'aps': { + 'sound': 'default', + }, + } + }, + } + check_encoding(msg, expected) + + def test_critical_sound(self): + msg = messaging.Message( + topic='topic', + apns=messaging.APNSConfig( + payload=messaging.APNSPayload( + aps=messaging.Aps( + sound=messaging.CriticalSound( + name='default', + critical=True, + volume=0.5 + ) + ), + ) + ) + ) + expected = { + 'topic': 'topic', + 'apns': { + 'payload': { + 'aps': { + 'sound': { + 'name': 'default', + 'critical': 1, + 'volume': 0.5, + }, + }, + } + }, + } + check_encoding(msg, expected) + + def test_critical_sound_name_only(self): + msg = messaging.Message( + topic='topic', + apns=messaging.APNSConfig( + payload=messaging.APNSPayload( + aps=messaging.Aps( + sound=messaging.CriticalSound(name='default') + ), + ) + ) + ) + expected = { + 'topic': 'topic', + 'apns': { + 'payload': { + 'aps': { + 'sound': { + 'name': 'default', + }, + }, + } + }, + } + check_encoding(msg, expected) + + +class TestApsAlertEncoder: + + def _check_alert(self, alert): + with pytest.raises(ValueError) as excinfo: + check_encoding(messaging.Message( + topic='topic', apns=messaging.APNSConfig( + payload=messaging.APNSPayload(aps=messaging.Aps(alert=alert)) + ) + )) + return excinfo + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_title(self, data): + alert = messaging.ApsAlert(title=data) + excinfo = self._check_alert(alert) + expected = 'ApsAlert.title must be a string.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_subtitle(self, data): + alert = messaging.ApsAlert(subtitle=data) + excinfo = self._check_alert(alert) + expected = 'ApsAlert.subtitle must be a string.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_body(self, data): + alert = messaging.ApsAlert(body=data) + excinfo = self._check_alert(alert) + expected = 'ApsAlert.body must be a string.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_title_loc_key(self, data): + alert = messaging.ApsAlert(title_loc_key=data) + excinfo = self._check_alert(alert) + expected = 'ApsAlert.title_loc_key must be a string.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_loc_key(self, data): + alert = messaging.ApsAlert(loc_key=data) + excinfo = self._check_alert(alert) + expected = 'ApsAlert.loc_key must be a string.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_action_loc_key(self, data): + alert = messaging.ApsAlert(action_loc_key=data) + excinfo = self._check_alert(alert) + expected = 'ApsAlert.action_loc_key must be a string.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', NON_STRING_ARGS) + def test_invalid_launch_image(self, data): + alert = messaging.ApsAlert(launch_image=data) + excinfo = self._check_alert(alert) + expected = 'ApsAlert.launch_image must be a string.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', NON_LIST_ARGS) + def test_invalid_title_loc_args(self, data): + alert = messaging.ApsAlert(title_loc_key='foo', title_loc_args=data) + excinfo = self._check_alert(alert) + if isinstance(data, list): + expected = 'ApsAlert.title_loc_args must not contain non-string values.' + assert str(excinfo.value) == expected + else: + expected = 'ApsAlert.title_loc_args must be a list of strings.' + assert str(excinfo.value) == expected + + def test_no_title_loc_key(self): + alert = messaging.ApsAlert(title_loc_args=['foo']) + excinfo = self._check_alert(alert) + expected = 'ApsAlert.title_loc_key is required when specifying title_loc_args.' + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('data', NON_LIST_ARGS) + def test_invalid_loc_args(self, data): + alert = messaging.ApsAlert(loc_key='foo', loc_args=data) + excinfo = self._check_alert(alert) + if isinstance(data, list): + expected = 'ApsAlert.loc_args must not contain non-string values.' + assert str(excinfo.value) == expected + else: + expected = 'ApsAlert.loc_args must be a list of strings.' + assert str(excinfo.value) == expected + + def test_no_loc_key(self): + alert = messaging.ApsAlert(loc_args=['foo']) + excinfo = self._check_alert(alert) + expected = 'ApsAlert.loc_key is required when specifying loc_args.' + assert str(excinfo.value) == expected + + def test_aps_alert(self): + msg = messaging.Message( + topic='topic', + apns=messaging.APNSConfig( + payload=messaging.APNSPayload( + aps=messaging.Aps( + alert=messaging.ApsAlert( + title='t', + subtitle='st', + body='b', + title_loc_key='tlk', + title_loc_args=['t1', 't2'], + loc_key='lk', + loc_args=['l1', 'l2'], + action_loc_key='alk', + launch_image='li' + ) + ), + ) + ) + ) + expected = { + 'topic': 'topic', + 'apns': { + 'payload': { + 'aps': { + 'alert': { + 'title': 't', + 'subtitle': 'st', + 'body': 'b', + 'title-loc-key': 'tlk', + 'title-loc-args': ['t1', 't2'], + 'loc-key': 'lk', + 'loc-args': ['l1', 'l2'], + 'action-loc-key': 'alk', + 'launch-image': 'li', + }, + }, + } + }, + } + check_encoding(msg, expected) + + def test_aps_alert_custom_data_merge(self): + msg = messaging.Message( + topic='topic', + apns=messaging.APNSConfig( + payload=messaging.APNSPayload( + aps=messaging.Aps( + alert=messaging.ApsAlert( + title='t', + subtitle='st', + custom_data={'k1': 'v1', 'k2': 'v2'} + ) + ), + ) + ) + ) + expected = { + 'topic': 'topic', + 'apns': { + 'payload': { + 'aps': { + 'alert': { + 'title': 't', + 'subtitle': 'st', + 'k1': 'v1', + 'k2': 'v2' + }, + }, + } + }, + } + check_encoding(msg, expected) + + def test_aps_alert_custom_data_override(self): + msg = messaging.Message( + topic='topic', + apns=messaging.APNSConfig( + payload=messaging.APNSPayload( + aps=messaging.Aps( + alert=messaging.ApsAlert( + title='t', + subtitle='st', + launch_image='li', + custom_data={'launch-image': ['li1', 'li2']} + ) + ), + ) + ) + ) + expected = { + 'topic': 'topic', + 'apns': { + 'payload': { + 'aps': { + 'alert': { + 'title': 't', + 'subtitle': 'st', + 'launch-image': [ + 'li1', + 'li2' + ] + }, + }, + } + }, + } + check_encoding(msg, expected) + + +class TestTimeout: + + def teardown_method(self): + testutils.cleanup_apps() + + def _instrument_service(self, url, response): + app = firebase_admin.get_app() + fcm_service = messaging._get_messaging_service(app) + recorder = [] + fcm_service._client.session.mount( + url, testutils.MockAdapter(json.dumps(response), 200, recorder)) + return recorder + + def _check_timeout(self, recorder, timeout): + assert len(recorder) == 1 + if timeout is None: + assert recorder[0]._extra_kwargs['timeout'] is None + else: + assert recorder[0]._extra_kwargs['timeout'] == pytest.approx(timeout, 0.001) + + @pytest.mark.parametrize('options, timeout', [ + ({'httpTimeout': 4}, 4), + ({'httpTimeout': None}, None), + ({}, _http_client.DEFAULT_TIMEOUT_SECONDS), + ]) + def test_send(self, options, timeout): + cred = testutils.MockCredential() + all_options = {'projectId': 'explicit-project-id'} + all_options.update(options) + firebase_admin.initialize_app(cred, all_options) + recorder = self._instrument_service( + 'https://fcm.googleapis.com', {'name': 'message-id'}) + msg = messaging.Message(topic='foo') + messaging.send(msg) + self._check_timeout(recorder, timeout) + + @pytest.mark.parametrize('options, timeout', [ + ({'httpTimeout': 4}, 4), + ({'httpTimeout': None}, None), + ({}, _http_client.DEFAULT_TIMEOUT_SECONDS), + ]) + def test_topic_management_custom_timeout(self, options, timeout): + cred = testutils.MockCredential() + all_options = {'projectId': 'explicit-project-id'} + all_options.update(options) + firebase_admin.initialize_app(cred, all_options) + recorder = self._instrument_service( + 'https://iid.googleapis.com', {'results': [{}, {'error': 'error_reason'}]}) + messaging.subscribe_to_topic(['1'], 'a') + self._check_timeout(recorder, timeout) + + +class TestSend: + + _DEFAULT_RESPONSE = json.dumps({'name': 'message-id'}) + _CLIENT_VERSION = f'fire-admin-python/{firebase_admin.__version__}' + + @classmethod + def setup_class(cls): + cred = testutils.MockCredential() + firebase_admin.initialize_app(cred, {'projectId': 'explicit-project-id'}) + + @classmethod + def teardown_class(cls): + testutils.cleanup_apps() + + def _instrument_messaging_service(self, app=None, status=200, payload=_DEFAULT_RESPONSE): + if not app: + app = firebase_admin.get_app() + fcm_service = messaging._get_messaging_service(app) + recorder = [] + fcm_service._client.session.mount( + 'https://fcm.googleapis.com', + testutils.MockAdapter(payload, status, recorder)) + return fcm_service, recorder + + + def _assert_request(self, request, expected_method, expected_url, expected_body=None): + assert request.method == expected_method + assert request.url == expected_url + assert request.headers['X-GOOG-API-FORMAT-VERSION'] == '2' + assert request.headers['X-FIREBASE-CLIENT'] == self._CLIENT_VERSION + expected_metrics_header = _utils.get_metrics_header() + ' mock-cred-metric-tag' + assert request.headers['x-goog-api-client'] == expected_metrics_header + if expected_body is None: + assert request.body is None + else: + assert json.loads(request.body.decode()) == expected_body + + def _get_url(self, project_id): + return messaging._MessagingService.FCM_URL.format(project_id) + + def test_no_project_id(self): + def evaluate(): + app = firebase_admin.initialize_app(testutils.MockCredential(), name='no_project_id') + with pytest.raises(ValueError): + messaging.send(messaging.Message(topic='foo'), app=app) + testutils.run_without_project_id(evaluate) + + @pytest.mark.parametrize('msg', NON_OBJECT_ARGS + [None]) + def test_invalid_send(self, msg): + with pytest.raises(ValueError) as excinfo: + messaging.send(msg) + assert str(excinfo.value) == 'Message must be an instance of messaging.Message class.' + + def test_send_dry_run(self): + _, recorder = self._instrument_messaging_service() + msg = messaging.Message(topic='foo') + msg_id = messaging.send(msg, dry_run=True) + assert msg_id == 'message-id' + assert len(recorder) == 1 + body = { + 'message': messaging._MessagingService.encode_message(msg), + 'validate_only': True, + } + self._assert_request(recorder[0], 'POST', self._get_url('explicit-project-id'), body) + + def test_send(self): + _, recorder = self._instrument_messaging_service() + msg = messaging.Message(topic='foo') + msg_id = messaging.send(msg) + assert msg_id == 'message-id' + assert len(recorder) == 1 + body = {'message': messaging._MessagingService.encode_message(msg)} + self._assert_request(recorder[0], 'POST', self._get_url('explicit-project-id'), body) + + @pytest.mark.parametrize('status,exc_type', HTTP_ERROR_CODES.items()) + def test_send_error(self, status, exc_type): + _, recorder = self._instrument_messaging_service(status=status, payload='{}') + msg = messaging.Message(topic='foo') + with pytest.raises(exc_type) as excinfo: + messaging.send(msg) + expected = f'Unexpected HTTP response with status: {status}; body: {{}}' + check_exception(excinfo.value, expected, status) + assert len(recorder) == 1 + body = {'message': messaging._MessagingService.JSON_ENCODER.default(msg)} + self._assert_request(recorder[0], 'POST', self._get_url('explicit-project-id'), body) + + @pytest.mark.parametrize('status', HTTP_ERROR_CODES) + def test_send_detailed_error(self, status): + payload = json.dumps({ + 'error': { + 'status': 'INVALID_ARGUMENT', + 'message': 'test error' + } + }) + _, recorder = self._instrument_messaging_service(status=status, payload=payload) + msg = messaging.Message(topic='foo') + with pytest.raises(exceptions.InvalidArgumentError) as excinfo: + messaging.send(msg) + check_exception(excinfo.value, 'test error', status) + assert len(recorder) == 1 + body = {'message': messaging._MessagingService.JSON_ENCODER.default(msg)} + self._assert_request(recorder[0], 'POST', self._get_url('explicit-project-id'), body) + + @pytest.mark.parametrize('status', HTTP_ERROR_CODES) + def test_send_canonical_error_code(self, status): + payload = json.dumps({ + 'error': { + 'status': 'NOT_FOUND', + 'message': 'test error' + } + }) + _, recorder = self._instrument_messaging_service(status=status, payload=payload) + msg = messaging.Message(topic='foo') + with pytest.raises(exceptions.NotFoundError) as excinfo: + messaging.send(msg) + check_exception(excinfo.value, 'test error', status) + assert len(recorder) == 1 + body = {'message': messaging._MessagingService.JSON_ENCODER.default(msg)} + self._assert_request(recorder[0], 'POST', self._get_url('explicit-project-id'), body) + + @pytest.mark.parametrize('status', HTTP_ERROR_CODES) + @pytest.mark.parametrize('fcm_error_code, exc_type', FCM_ERROR_CODES.items()) + def test_send_fcm_error_code(self, status, fcm_error_code, exc_type): + payload = json.dumps({ + 'error': { + 'status': 'INVALID_ARGUMENT', + 'message': 'test error', + 'details': [ + { + '@type': 'type.googleapis.com/google.firebase.fcm.v1.FcmError', + 'errorCode': fcm_error_code, + }, + ], + } + }) + _, recorder = self._instrument_messaging_service(status=status, payload=payload) + msg = messaging.Message(topic='foo') + with pytest.raises(exc_type) as excinfo: + messaging.send(msg) + check_exception(excinfo.value, 'test error', status) + assert len(recorder) == 1 + body = {'message': messaging._MessagingService.JSON_ENCODER.default(msg)} + self._assert_request(recorder[0], 'POST', self._get_url('explicit-project-id'), body) + + @pytest.mark.parametrize('status', HTTP_ERROR_CODES) + def test_send_unknown_fcm_error_code(self, status): + payload = json.dumps({ + 'error': { + 'status': 'INVALID_ARGUMENT', + 'message': 'test error', + 'details': [ + { + '@type': 'type.googleapis.com/google.firebase.fcm.v1.FcmError', + 'errorCode': 'SOME_UNKNOWN_CODE', + }, + ], + } + }) + _, recorder = self._instrument_messaging_service(status=status, payload=payload) + msg = messaging.Message(topic='foo') + with pytest.raises(exceptions.InvalidArgumentError) as excinfo: + messaging.send(msg) + check_exception(excinfo.value, 'test error', status) + assert len(recorder) == 1 + body = {'message': messaging._MessagingService.JSON_ENCODER.default(msg)} + self._assert_request(recorder[0], 'POST', self._get_url('explicit-project-id'), body) + + +class TestSendEach(): + @classmethod + def setup_class(cls): + cred = testutils.MockCredential() + firebase_admin.initialize_app(cred, {'projectId': 'explicit-project-id'}) + + @classmethod + def teardown_class(cls): + testutils.cleanup_apps() + + def _instrument_messaging_service(self, response_dict, app=None): + if not app: + app = firebase_admin.get_app() + fcm_service = messaging._get_messaging_service(app) + recorder = [] + fcm_service._client.session.mount( + 'https://fcm.googleapis.com', + testutils.MockRequestBasedMultiRequestAdapter(response_dict, recorder)) + return fcm_service, recorder + + def test_no_project_id(self): + def evaluate(): + app = firebase_admin.initialize_app(testutils.MockCredential(), name='no_project_id') + with pytest.raises(ValueError): + messaging.send_each([messaging.Message(topic='foo')], app=app) + testutils.run_without_project_id(evaluate) + + @pytest.mark.parametrize('msg', NON_LIST_ARGS) + def test_invalid_send_each(self, msg): + with pytest.raises(ValueError) as excinfo: + messaging.send_each(msg) + if isinstance(msg, list): + expected = 'Message must be an instance of messaging.Message class.' + assert str(excinfo.value) == expected + else: + expected = 'messages must be a list of messaging.Message instances.' + assert str(excinfo.value) == expected + + def test_invalid_over_500(self): + msg = messaging.Message(topic='foo') + with pytest.raises(ValueError) as excinfo: + messaging.send_each([msg for _ in range(0, 501)]) + expected = 'messages must not contain more than 500 elements.' + assert str(excinfo.value) == expected + + def test_send_each(self): + payload1 = json.dumps({'name': 'message-id1'}) + payload2 = json.dumps({'name': 'message-id2'}) + _ = self._instrument_messaging_service( + response_dict={'foo1': [200, payload1], 'foo2': [200, payload2]}) + msg1 = messaging.Message(topic='foo1') + msg2 = messaging.Message(topic='foo2') + batch_response = messaging.send_each([msg1, msg2], dry_run=True) + assert batch_response.success_count == 2 + assert batch_response.failure_count == 0 + assert len(batch_response.responses) == 2 + assert [r.message_id for r in batch_response.responses] == ['message-id1', 'message-id2'] + assert all(r.success for r in batch_response.responses) + assert not any(r.exception for r in batch_response.responses) + + @respx.mock + @pytest.mark.asyncio + async def test_send_each_async(self): + responses = [ + respx.MockResponse(200, http_version='HTTP/2', json={'name': 'message-id1'}), + respx.MockResponse(200, http_version='HTTP/2', json={'name': 'message-id2'}), + respx.MockResponse(200, http_version='HTTP/2', json={'name': 'message-id3'}), + ] + msg1 = messaging.Message(topic='foo1') + msg2 = messaging.Message(topic='foo2') + msg3 = messaging.Message(topic='foo3') + route = respx.request( + 'POST', + 'https://fcm.googleapis.com/v1/projects/explicit-project-id/messages:send' + ).mock(side_effect=responses) + + batch_response = await messaging.send_each_async([msg1, msg2, msg3], dry_run=True) + + assert batch_response.success_count == 3 + assert batch_response.failure_count == 0 + assert len(batch_response.responses) == 3 + assert [r.message_id for r in batch_response.responses] \ + == ['message-id1', 'message-id2', 'message-id3'] + assert all(r.success for r in batch_response.responses) + assert not any(r.exception for r in batch_response.responses) + + assert route.call_count == 3 + + @respx.mock + @pytest.mark.asyncio + async def test_send_each_async_error_401_fail_auth_retry(self): + payload = json.dumps({ + 'error': { + 'status': 'UNAUTHENTICATED', + 'message': 'test unauthenticated error', + 'details': [ + { + '@type': 'type.googleapis.com/google.firebase.fcm.v1.FcmError', + 'errorCode': 'SOME_UNKNOWN_CODE', + }, + ], + } + }) + + responses = repeat(respx.MockResponse(401, http_version='HTTP/2', content=payload)) + + msg1 = messaging.Message(topic='foo1') + route = respx.request( + 'POST', + 'https://fcm.googleapis.com/v1/projects/explicit-project-id/messages:send' + ).mock(side_effect=responses) + batch_response = await messaging.send_each_async([msg1], dry_run=True) + + assert route.call_count == 3 + assert batch_response.success_count == 0 + assert batch_response.failure_count == 1 + assert len(batch_response.responses) == 1 + exception = batch_response.responses[0].exception + assert isinstance(exception, exceptions.UnauthenticatedError) + + @respx.mock + @pytest.mark.asyncio + async def test_send_each_async_error_401_pass_on_auth_retry(self): + payload = json.dumps({ + 'error': { + 'status': 'UNAUTHENTICATED', + 'message': 'test unauthenticated error', + 'details': [ + { + '@type': 'type.googleapis.com/google.firebase.fcm.v1.FcmError', + 'errorCode': 'SOME_UNKNOWN_CODE', + }, + ], + } + }) + responses = [ + respx.MockResponse(401, http_version='HTTP/2', content=payload), + respx.MockResponse(200, http_version='HTTP/2', json={'name': 'message-id1'}), + ] + + msg1 = messaging.Message(topic='foo1') + route = respx.request( + 'POST', + 'https://fcm.googleapis.com/v1/projects/explicit-project-id/messages:send' + ).mock(side_effect=responses) + batch_response = await messaging.send_each_async([msg1], dry_run=True) + + assert route.call_count == 2 + assert batch_response.success_count == 1 + assert batch_response.failure_count == 0 + assert len(batch_response.responses) == 1 + assert [r.message_id for r in batch_response.responses] == ['message-id1'] + assert all(r.success for r in batch_response.responses) + assert not any(r.exception for r in batch_response.responses) + + @respx.mock + @pytest.mark.asyncio + async def test_send_each_async_error_500_fail_retry_config(self): + payload = json.dumps({ + 'error': { + 'status': 'INTERNAL', + 'message': 'test INTERNAL error', + 'details': [ + { + '@type': 'type.googleapis.com/google.firebase.fcm.v1.FcmError', + 'errorCode': 'SOME_UNKNOWN_CODE', + }, + ], + } + }) + + responses = repeat(respx.MockResponse(500, http_version='HTTP/2', content=payload)) + + msg1 = messaging.Message(topic='foo1') + route = respx.request( + 'POST', + 'https://fcm.googleapis.com/v1/projects/explicit-project-id/messages:send' + ).mock(side_effect=responses) + batch_response = await messaging.send_each_async([msg1], dry_run=True) + + assert route.call_count == 5 + assert batch_response.success_count == 0 + assert batch_response.failure_count == 1 + assert len(batch_response.responses) == 1 + exception = batch_response.responses[0].exception + assert isinstance(exception, exceptions.InternalError) + + + @respx.mock + @pytest.mark.asyncio + async def test_send_each_async_error_500_pass_on_retry_config(self): + payload = json.dumps({ + 'error': { + 'status': 'INTERNAL', + 'message': 'test INTERNAL error', + 'details': [ + { + '@type': 'type.googleapis.com/google.firebase.fcm.v1.FcmError', + 'errorCode': 'SOME_UNKNOWN_CODE', + }, + ], + } + }) + responses = chain( + [ + respx.MockResponse(500, http_version='HTTP/2', content=payload), + respx.MockResponse(500, http_version='HTTP/2', content=payload), + respx.MockResponse(500, http_version='HTTP/2', content=payload), + respx.MockResponse(500, http_version='HTTP/2', content=payload), + respx.MockResponse(200, http_version='HTTP/2', json={'name': 'message-id1'}), + ], + ) + + msg1 = messaging.Message(topic='foo1') + route = respx.request( + 'POST', + 'https://fcm.googleapis.com/v1/projects/explicit-project-id/messages:send' + ).mock(side_effect=responses) + batch_response = await messaging.send_each_async([msg1], dry_run=True) + + assert route.call_count == 5 + assert batch_response.success_count == 1 + assert batch_response.failure_count == 0 + assert len(batch_response.responses) == 1 + assert [r.message_id for r in batch_response.responses] == ['message-id1'] + assert all(r.success for r in batch_response.responses) + assert not any(r.exception for r in batch_response.responses) + + + @pytest.mark.asyncio + @respx.mock + async def test_send_each_async_request_error(self): + responses = httpx.ConnectError("Test request error", request=httpx.Request( + 'POST', + 'https://fcm.googleapis.com/v1/projects/explicit-project-id/messages:send')) + + msg1 = messaging.Message(topic='foo1') + route = respx.request( + 'POST', + 'https://fcm.googleapis.com/v1/projects/explicit-project-id/messages:send' + ).mock(side_effect=responses) + batch_response = await messaging.send_each_async([msg1], dry_run=True) + + assert route.call_count == 1 + assert batch_response.success_count == 0 + assert batch_response.failure_count == 1 + assert len(batch_response.responses) == 1 + exception = batch_response.responses[0].exception + assert isinstance(exception, exceptions.UnavailableError) + + @pytest.mark.parametrize('status', HTTP_ERROR_CODES) + def test_send_each_detailed_error(self, status): + success_payload = json.dumps({'name': 'message-id'}) + error_payload = json.dumps({ + 'error': { + 'status': 'INVALID_ARGUMENT', + 'message': 'test error' + } + }) + _ = self._instrument_messaging_service( + response_dict={'foo1': [200, success_payload], 'foo2': [status, error_payload]}) + msg1 = messaging.Message(topic='foo1') + msg2 = messaging.Message(topic='foo2') + batch_response = messaging.send_each([msg1, msg2]) + assert batch_response.success_count == 1 + assert batch_response.failure_count == 1 + assert len(batch_response.responses) == 2 + success_response = batch_response.responses[0] + assert success_response.message_id == 'message-id' + assert success_response.success is True + assert success_response.exception is None + error_response = batch_response.responses[1] + assert error_response.message_id is None + assert error_response.success is False + exception = error_response.exception + assert isinstance(exception, exceptions.InvalidArgumentError) + check_exception(exception, 'test error', status) + + @pytest.mark.parametrize('status', HTTP_ERROR_CODES) + def test_send_each_canonical_error_code(self, status): + success_payload = json.dumps({'name': 'message-id'}) + error_payload = json.dumps({ + 'error': { + 'status': 'NOT_FOUND', + 'message': 'test error' + } + }) + _ = self._instrument_messaging_service( + response_dict={'foo1': [200, success_payload], 'foo2': [status, error_payload]}) + msg1 = messaging.Message(topic='foo1') + msg2 = messaging.Message(topic='foo2') + batch_response = messaging.send_each([msg1, msg2]) + assert batch_response.success_count == 1 + assert batch_response.failure_count == 1 + assert len(batch_response.responses) == 2 + success_response = batch_response.responses[0] + assert success_response.message_id == 'message-id' + assert success_response.success is True + assert success_response.exception is None + error_response = batch_response.responses[1] + assert error_response.message_id is None + assert error_response.success is False + exception = error_response.exception + assert isinstance(exception, exceptions.NotFoundError) + check_exception(exception, 'test error', status) + + @pytest.mark.parametrize('status', HTTP_ERROR_CODES) + @pytest.mark.parametrize('fcm_error_code, exc_type', FCM_ERROR_CODES.items()) + def test_send_each_fcm_error_code(self, status, fcm_error_code, exc_type): + success_payload = json.dumps({'name': 'message-id'}) + error_payload = json.dumps({ + 'error': { + 'status': 'INVALID_ARGUMENT', + 'message': 'test error', + 'details': [ + { + '@type': 'type.googleapis.com/google.firebase.fcm.v1.FcmError', + 'errorCode': fcm_error_code, + }, + ], + } + }) + _ = self._instrument_messaging_service( + response_dict={'foo1': [200, success_payload], 'foo2': [status, error_payload]}) + msg1 = messaging.Message(topic='foo1') + msg2 = messaging.Message(topic='foo2') + batch_response = messaging.send_each([msg1, msg2]) + assert batch_response.success_count == 1 + assert batch_response.failure_count == 1 + assert len(batch_response.responses) == 2 + success_response = batch_response.responses[0] + assert success_response.message_id == 'message-id' + assert success_response.success is True + assert success_response.exception is None + error_response = batch_response.responses[1] + assert error_response.message_id is None + assert error_response.success is False + exception = error_response.exception + assert isinstance(exception, exc_type) + check_exception(exception, 'test error', status) + + +class TestSendEachForMulticast(TestSendEach): + + def test_no_project_id(self): + def evaluate(): + app = firebase_admin.initialize_app(testutils.MockCredential(), name='no_project_id') + with pytest.raises(ValueError): + messaging.send_each([messaging.Message(topic='foo')], app=app) + testutils.run_without_project_id(evaluate) + + @pytest.mark.parametrize('msg', NON_LIST_ARGS) + def test_invalid_send_each_for_multicast(self, msg): + with pytest.raises(ValueError) as excinfo: + messaging.send_each_for_multicast(msg) + expected = 'Message must be an instance of messaging.MulticastMessage class.' + assert str(excinfo.value) == expected + + def test_send_each_for_multicast(self): + payload1 = json.dumps({'name': 'message-id1'}) + payload2 = json.dumps({'name': 'message-id2'}) + _ = self._instrument_messaging_service( + response_dict={'foo1': [200, payload1], 'foo2': [200, payload2]}) + msg = messaging.MulticastMessage(tokens=['foo1', 'foo2']) + batch_response = messaging.send_each_for_multicast(msg, dry_run=True) + assert batch_response.success_count == 2 + assert batch_response.failure_count == 0 + assert len(batch_response.responses) == 2 + assert [r.message_id for r in batch_response.responses] == ['message-id1', 'message-id2'] + assert all(r.success for r in batch_response.responses) + assert not any(r.exception for r in batch_response.responses) + + @pytest.mark.parametrize('status', HTTP_ERROR_CODES) + def test_send_each_for_multicast_detailed_error(self, status): + success_payload = json.dumps({'name': 'message-id'}) + error_payload = json.dumps({ + 'error': { + 'status': 'INVALID_ARGUMENT', + 'message': 'test error' + } + }) + _ = self._instrument_messaging_service( + response_dict={'foo1': [200, success_payload], 'foo2': [status, error_payload]}) + msg = messaging.MulticastMessage(tokens=['foo1', 'foo2']) + batch_response = messaging.send_each_for_multicast(msg) + assert batch_response.success_count == 1 + assert batch_response.failure_count == 1 + assert len(batch_response.responses) == 2 + success_response = batch_response.responses[0] + assert success_response.message_id == 'message-id' + assert success_response.success is True + assert success_response.exception is None + error_response = batch_response.responses[1] + assert error_response.message_id is None + assert error_response.success is False + assert error_response.exception is not None + exception = error_response.exception + assert isinstance(exception, exceptions.InvalidArgumentError) + check_exception(exception, 'test error', status) + + @pytest.mark.parametrize('status', HTTP_ERROR_CODES) + def test_send_each_for_multicast_canonical_error_code(self, status): + success_payload = json.dumps({'name': 'message-id'}) + error_payload = json.dumps({ + 'error': { + 'status': 'NOT_FOUND', + 'message': 'test error' + } + }) + _ = self._instrument_messaging_service( + response_dict={'foo1': [200, success_payload], 'foo2': [status, error_payload]}) + msg = messaging.MulticastMessage(tokens=['foo1', 'foo2']) + batch_response = messaging.send_each_for_multicast(msg) + assert batch_response.success_count == 1 + assert batch_response.failure_count == 1 + assert len(batch_response.responses) == 2 + success_response = batch_response.responses[0] + assert success_response.message_id == 'message-id' + assert success_response.success is True + assert success_response.exception is None + error_response = batch_response.responses[1] + assert error_response.message_id is None + assert error_response.success is False + assert error_response.exception is not None + exception = error_response.exception + assert isinstance(exception, exceptions.NotFoundError) + check_exception(exception, 'test error', status) + + @pytest.mark.parametrize('status', HTTP_ERROR_CODES) + def test_send_each_for_multicast_fcm_error_code(self, status): + success_payload = json.dumps({'name': 'message-id'}) + error_payload = json.dumps({ + 'error': { + 'status': 'INVALID_ARGUMENT', + 'message': 'test error', + 'details': [ + { + '@type': 'type.googleapis.com/google.firebase.fcm.v1.FcmError', + 'errorCode': 'UNREGISTERED', + }, + ], + } + }) + _ = self._instrument_messaging_service( + response_dict={'foo1': [200, success_payload], 'foo2': [status, error_payload]}) + msg = messaging.MulticastMessage(tokens=['foo1', 'foo2']) + batch_response = messaging.send_each_for_multicast(msg) + assert batch_response.success_count == 1 + assert batch_response.failure_count == 1 + assert len(batch_response.responses) == 2 + success_response = batch_response.responses[0] + assert success_response.message_id == 'message-id' + assert success_response.success is True + assert success_response.exception is None + error_response = batch_response.responses[1] + assert error_response.message_id is None + assert error_response.success is False + assert error_response.exception is not None + exception = error_response.exception + assert isinstance(exception, messaging.UnregisteredError) + check_exception(exception, 'test error', status) + + +class TestTopicManagement: + + _DEFAULT_RESPONSE = json.dumps({'results': [{}, {'error': 'error_reason'}]}) + _DEFAULT_ERROR_RESPONSE = json.dumps({'error': 'error_reason'}) + _VALID_ARGS = [ + # (tokens, topic, expected) + ( + ['foo', 'bar'], + 'test-topic', + {'to': '/topics/test-topic', 'registration_tokens': ['foo', 'bar']} + ), + ( + 'foo', + '/topics/test-topic', + {'to': '/topics/test-topic', 'registration_tokens': ['foo']} + ), + ] + + @classmethod + def setup_class(cls): + cred = testutils.MockCredential() + firebase_admin.initialize_app(cred, {'projectId': 'explicit-project-id'}) + + @classmethod + def teardown_class(cls): + testutils.cleanup_apps() + + def _instrument_iid_service(self, app=None, status=200, payload=_DEFAULT_RESPONSE): + if not app: + app = firebase_admin.get_app() + fcm_service = messaging._get_messaging_service(app) + recorder = [] + fcm_service._client.session.mount( + 'https://iid.googleapis.com', + testutils.MockAdapter(payload, status, recorder)) + return fcm_service, recorder + + def _assert_request(self, request, expected_method, expected_url): + assert request.method == expected_method + assert request.url == expected_url + assert request.headers['access_token_auth'] == 'true' + expected_metrics_header = _utils.get_metrics_header() + ' mock-cred-metric-tag' + assert request.headers['x-goog-api-client'] == expected_metrics_header + + def _get_url(self, path): + return f'{messaging._MessagingService.IID_URL}/{path}' + + @pytest.mark.parametrize('tokens', [None, '', [], {}, tuple()]) + def test_invalid_tokens(self, tokens): + expected = 'Tokens must be a string or a non-empty list of strings.' + if isinstance(tokens, str): + expected = 'Tokens must be non-empty strings.' + + with pytest.raises(ValueError) as excinfo: + messaging.subscribe_to_topic(tokens, 'test-topic') + assert str(excinfo.value) == expected + + with pytest.raises(ValueError) as excinfo: + messaging.unsubscribe_from_topic(tokens, 'test-topic') + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('topic', NON_STRING_ARGS + [None, '']) + def test_invalid_topic(self, topic): + expected = 'Topic must be a non-empty string.' + with pytest.raises(ValueError) as excinfo: + messaging.subscribe_to_topic('test-token', topic) + assert str(excinfo.value) == expected + + with pytest.raises(ValueError) as excinfo: + messaging.unsubscribe_from_topic('test-tokens', topic) + assert str(excinfo.value) == expected + + @pytest.mark.parametrize('args', _VALID_ARGS) + def test_subscribe_to_topic(self, args): + _, recorder = self._instrument_iid_service() + resp = messaging.subscribe_to_topic(args[0], args[1]) + self._check_response(resp) + assert len(recorder) == 1 + self._assert_request(recorder[0], 'POST', self._get_url('iid/v1:batchAdd')) + assert json.loads(recorder[0].body.decode()) == args[2] + + @pytest.mark.parametrize('status, exc_type', HTTP_ERROR_CODES.items()) + def test_subscribe_to_topic_error(self, status, exc_type): + _, recorder = self._instrument_iid_service( + status=status, payload=self._DEFAULT_ERROR_RESPONSE) + with pytest.raises(exc_type) as excinfo: + messaging.subscribe_to_topic('foo', 'test-topic') + assert str(excinfo.value) == 'Error while calling the IID service: error_reason' + assert len(recorder) == 1 + self._assert_request(recorder[0], 'POST', self._get_url('iid/v1:batchAdd')) + + @pytest.mark.parametrize('status, exc_type', HTTP_ERROR_CODES.items()) + def test_subscribe_to_topic_non_json_error(self, status, exc_type): + _, recorder = self._instrument_iid_service(status=status, payload='not json') + with pytest.raises(exc_type) as excinfo: + messaging.subscribe_to_topic('foo', 'test-topic') + reason = f'Unexpected HTTP response with status: {status}; body: not json' + assert str(excinfo.value) == reason + assert len(recorder) == 1 + self._assert_request(recorder[0], 'POST', self._get_url('iid/v1:batchAdd')) + + @pytest.mark.parametrize('args', _VALID_ARGS) + def test_unsubscribe_from_topic(self, args): + _, recorder = self._instrument_iid_service() + resp = messaging.unsubscribe_from_topic(args[0], args[1]) + self._check_response(resp) + assert len(recorder) == 1 + self._assert_request(recorder[0], 'POST', self._get_url('iid/v1:batchRemove')) + assert json.loads(recorder[0].body.decode()) == args[2] + + @pytest.mark.parametrize('status, exc_type', HTTP_ERROR_CODES.items()) + def test_unsubscribe_from_topic_error(self, status, exc_type): + _, recorder = self._instrument_iid_service( + status=status, payload=self._DEFAULT_ERROR_RESPONSE) + with pytest.raises(exc_type) as excinfo: + messaging.unsubscribe_from_topic('foo', 'test-topic') + assert str(excinfo.value) == 'Error while calling the IID service: error_reason' + assert len(recorder) == 1 + self._assert_request(recorder[0], 'POST', self._get_url('iid/v1:batchRemove')) + + @pytest.mark.parametrize('status, exc_type', HTTP_ERROR_CODES.items()) + def test_unsubscribe_from_topic_non_json_error(self, status, exc_type): + _, recorder = self._instrument_iid_service(status=status, payload='not json') + with pytest.raises(exc_type) as excinfo: + messaging.unsubscribe_from_topic('foo', 'test-topic') + reason = f'Unexpected HTTP response with status: {status}; body: not json' + assert str(excinfo.value) == reason + assert len(recorder) == 1 + self._assert_request(recorder[0], 'POST', self._get_url('iid/v1:batchRemove')) + + def _check_response(self, resp): + assert resp.success_count == 1 + assert resp.failure_count == 1 + assert len(resp.errors) == 1 + assert resp.errors[0].index == 1 + assert resp.errors[0].reason == 'error_reason' diff --git a/tests/test_ml.py b/tests/test_ml.py new file mode 100644 index 000000000..bcc93fd05 --- /dev/null +++ b/tests/test_ml.py @@ -0,0 +1,1088 @@ +# Copyright 2019 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test cases for the firebase_admin.ml module.""" + +import json + +import pytest + +import firebase_admin +from firebase_admin import exceptions +from firebase_admin import ml +from firebase_admin import _utils +from tests import testutils + + +BASE_URL = 'https://firebaseml.googleapis.com/v1beta2/' +PROJECT_ID = 'my-project-1' + +PAGE_TOKEN = 'pageToken' +NEXT_PAGE_TOKEN = 'nextPageToken' + +CREATE_TIME = '2020-01-21T20:44:27.392932Z' +CREATE_TIME_MILLIS = 1579639467392 + +UPDATE_TIME = '2020-01-21T22:45:29.392932Z' +UPDATE_TIME_MILLIS = 1579646729392 + +CREATE_TIME_2 = '2020-01-21T21:44:27.392932Z' +UPDATE_TIME_2 = '2020-01-21T23:45:29.392932Z' + +ETAG = '33a64df551425fcc55e4d42a148795d9f25f89d4' +MODEL_HASH = '987987a98b98798d098098e09809fc0893897' +TAG_1 = 'Tag1' +TAG_2 = 'Tag2' +TAG_3 = 'Tag3' +TAGS = [TAG_1, TAG_2] +TAGS_2 = [TAG_1, TAG_3] + +MODEL_ID_1 = 'modelId1' +MODEL_NAME_1 = f'projects/{PROJECT_ID}/models/{MODEL_ID_1}' +DISPLAY_NAME_1 = 'displayName1' +MODEL_JSON_1 = { + 'name': MODEL_NAME_1, + 'displayName': DISPLAY_NAME_1 +} +MODEL_1 = ml.Model.from_dict(MODEL_JSON_1) + +MODEL_ID_2 = 'modelId2' +MODEL_NAME_2 = f'projects/{PROJECT_ID}/models/{MODEL_ID_2}' +DISPLAY_NAME_2 = 'displayName2' +MODEL_JSON_2 = { + 'name': MODEL_NAME_2, + 'displayName': DISPLAY_NAME_2 +} +MODEL_2 = ml.Model.from_dict(MODEL_JSON_2) + +MODEL_ID_3 = 'modelId3' +MODEL_NAME_3 = f'projects/{PROJECT_ID}/models/{MODEL_ID_3}' +DISPLAY_NAME_3 = 'displayName3' +MODEL_JSON_3 = { + 'name': MODEL_NAME_3, + 'displayName': DISPLAY_NAME_3 +} +MODEL_3 = ml.Model.from_dict(MODEL_JSON_3) + +MODEL_STATE_PUBLISHED_JSON = { + 'published': True +} +VALIDATION_ERROR_CODE = 400 +VALIDATION_ERROR_MSG = f'No model format found for {MODEL_ID_1}.' +MODEL_STATE_ERROR_JSON = { + 'validationError': { + 'code': VALIDATION_ERROR_CODE, + 'message': VALIDATION_ERROR_MSG, + } +} + +OPERATION_NAME_1 = f'projects/{PROJECT_ID}/operations/123' +OPERATION_NOT_DONE_JSON_1 = { + 'name': OPERATION_NAME_1, + 'metadata': { + '@type': 'type.googleapis.com/google.firebase.ml.v1beta2.ModelOperationMetadata', + 'name': f'projects/{PROJECT_ID}/models/{MODEL_ID_1}', + 'basic_operation_status': 'BASIC_OPERATION_STATUS_UPLOADING' + } +} + +GCS_BUCKET_NAME = 'my_bucket' +GCS_BLOB_NAME = 'mymodel.tflite' +GCS_TFLITE_URI = f'gs://{GCS_BUCKET_NAME}/{GCS_BLOB_NAME}' +GCS_TFLITE_URI_JSON = {'gcsTfliteUri': GCS_TFLITE_URI} +GCS_TFLITE_MODEL_SOURCE = ml.TFLiteGCSModelSource(GCS_TFLITE_URI) +TFLITE_FORMAT_JSON = { + 'gcsTfliteUri': GCS_TFLITE_URI, + 'sizeBytes': '1234567' +} +TFLITE_FORMAT = ml.TFLiteFormat.from_dict(TFLITE_FORMAT_JSON) + +GCS_TFLITE_SIGNED_URI_PATTERN = ( + 'https://storage.googleapis.com/{0}/{1}?X-Goog-Algorithm=GOOG4-RSA-SHA256&foo') +GCS_TFLITE_SIGNED_URI = GCS_TFLITE_SIGNED_URI_PATTERN.format(GCS_BUCKET_NAME, GCS_BLOB_NAME) + +GCS_TFLITE_URI_2 = 'gs://my_bucket/mymodel2.tflite' +GCS_TFLITE_URI_JSON_2 = {'gcsTfliteUri': GCS_TFLITE_URI_2} +GCS_TFLITE_MODEL_SOURCE_2 = ml.TFLiteGCSModelSource(GCS_TFLITE_URI_2) +TFLITE_FORMAT_JSON_2 = { + 'gcsTfliteUri': GCS_TFLITE_URI_2, + 'sizeBytes': '2345678' +} +TFLITE_FORMAT_2 = ml.TFLiteFormat.from_dict(TFLITE_FORMAT_JSON_2) + +CREATED_UPDATED_MODEL_JSON_1 = { + 'name': MODEL_NAME_1, + 'displayName': DISPLAY_NAME_1, + 'createTime': CREATE_TIME, + 'updateTime': UPDATE_TIME, + 'state': MODEL_STATE_ERROR_JSON, + 'etag': ETAG, + 'modelHash': MODEL_HASH, + 'tags': TAGS, +} +CREATED_UPDATED_MODEL_1 = ml.Model.from_dict(CREATED_UPDATED_MODEL_JSON_1) + +LOCKED_MODEL_JSON_1 = { + 'name': MODEL_NAME_1, + 'displayName': DISPLAY_NAME_1, + 'createTime': CREATE_TIME, + 'updateTime': UPDATE_TIME, + 'tags': TAGS, + 'activeOperations': [OPERATION_NOT_DONE_JSON_1] +} + +LOCKED_MODEL_JSON_2 = { + 'name': MODEL_NAME_1, + 'displayName': DISPLAY_NAME_2, + 'createTime': CREATE_TIME_2, + 'updateTime': UPDATE_TIME_2, + 'tags': TAGS_2, + 'activeOperations': [OPERATION_NOT_DONE_JSON_1] +} + +OPERATION_DONE_MODEL_JSON_1 = { + 'done': True, + 'response': CREATED_UPDATED_MODEL_JSON_1 +} +OPERATION_MALFORMED_JSON_1 = { + 'done': True, + # if done is true then either response or error should be populated +} +OPERATION_MISSING_NAME = { + # Name is required if the operation is not done. + 'done': False +} +OPERATION_ERROR_CODE = 3 +OPERATION_ERROR_MSG = "Invalid argument" +OPERATION_ERROR_EXPECTED_STATUS = 'INVALID_ARGUMENT' +OPERATION_ERROR_JSON_1 = { + 'done': True, + 'error': { + 'code': OPERATION_ERROR_CODE, + 'message': OPERATION_ERROR_MSG, + } +} + +FULL_MODEL_ERR_STATE_LRO_JSON = { + 'name': MODEL_NAME_1, + 'displayName': DISPLAY_NAME_1, + 'createTime': CREATE_TIME, + 'updateTime': UPDATE_TIME, + 'state': MODEL_STATE_ERROR_JSON, + 'etag': ETAG, + 'modelHash': MODEL_HASH, + 'tags': TAGS, + 'activeOperations': [OPERATION_NOT_DONE_JSON_1], +} +FULL_MODEL_PUBLISHED_JSON = { + 'name': MODEL_NAME_1, + 'displayName': DISPLAY_NAME_1, + 'createTime': CREATE_TIME, + 'updateTime': UPDATE_TIME, + 'state': MODEL_STATE_PUBLISHED_JSON, + 'etag': ETAG, + 'modelHash': MODEL_HASH, + 'tags': TAGS, + 'tfliteModel': TFLITE_FORMAT_JSON +} +FULL_MODEL_PUBLISHED = ml.Model.from_dict(FULL_MODEL_PUBLISHED_JSON) +OPERATION_DONE_FULL_MODEL_PUBLISHED_JSON = { + 'name': OPERATION_NAME_1, + 'done': True, + 'response': FULL_MODEL_PUBLISHED_JSON +} + +EMPTY_RESPONSE = json.dumps({}) +OPERATION_NOT_DONE_RESPONSE = json.dumps(OPERATION_NOT_DONE_JSON_1) +OPERATION_DONE_RESPONSE = json.dumps(OPERATION_DONE_MODEL_JSON_1) +OPERATION_DONE_PUBLISHED_RESPONSE = json.dumps(OPERATION_DONE_FULL_MODEL_PUBLISHED_JSON) +OPERATION_ERROR_RESPONSE = json.dumps(OPERATION_ERROR_JSON_1) +OPERATION_MALFORMED_RESPONSE = json.dumps(OPERATION_MALFORMED_JSON_1) +OPERATION_MISSING_NAME_RESPONSE = json.dumps(OPERATION_MISSING_NAME) +DEFAULT_GET_RESPONSE = json.dumps(MODEL_JSON_1) +LOCKED_MODEL_2_RESPONSE = json.dumps(LOCKED_MODEL_JSON_2) +NO_MODELS_LIST_RESPONSE = json.dumps({}) +DEFAULT_LIST_RESPONSE = json.dumps({ + 'models': [MODEL_JSON_1, MODEL_JSON_2], + 'nextPageToken': NEXT_PAGE_TOKEN +}) +LAST_PAGE_LIST_RESPONSE = json.dumps({ + 'models': [MODEL_JSON_3] +}) +ONE_PAGE_LIST_RESPONSE = json.dumps({ + 'models': [MODEL_JSON_1, MODEL_JSON_2, MODEL_JSON_3], +}) + +ERROR_CODE_NOT_FOUND = 404 +ERROR_MSG_NOT_FOUND = 'The resource was not found' +ERROR_STATUS_NOT_FOUND = 'NOT_FOUND' +ERROR_JSON_NOT_FOUND = { + 'error': { + 'code': ERROR_CODE_NOT_FOUND, + 'message': ERROR_MSG_NOT_FOUND, + 'status': ERROR_STATUS_NOT_FOUND + } +} +ERROR_RESPONSE_NOT_FOUND = json.dumps(ERROR_JSON_NOT_FOUND) + +ERROR_CODE_BAD_REQUEST = 400 +ERROR_MSG_BAD_REQUEST = 'Invalid Argument' +ERROR_STATUS_BAD_REQUEST = 'INVALID_ARGUMENT' +ERROR_JSON_BAD_REQUEST = { + 'error': { + 'code': ERROR_CODE_BAD_REQUEST, + 'message': ERROR_MSG_BAD_REQUEST, + 'status': ERROR_STATUS_BAD_REQUEST + } +} +ERROR_RESPONSE_BAD_REQUEST = json.dumps(ERROR_JSON_BAD_REQUEST) + +INVALID_MODEL_ID_ARGS = [ + ('', ValueError), + ('&_*#@:/?', ValueError), + (None, TypeError), + (12345, TypeError), +] +INVALID_MODEL_ARGS = [ + 'abc', + 4.2, + [], + {}, + True, + -1, + 0, + None +] +INVALID_OP_NAME_ARGS = [ + 'abc', + '123', + 'operations/project/1234/model/abc/operation/123', + 'projects/operations/123', + 'projects/$#@/operations/123', + 'projects/1234/operations/123/extrathing', +] +PAGE_SIZE_VALUE_ERROR_MSG = ( + f'Page size must be a positive integer between 1 and {ml._MAX_PAGE_SIZE}' +) +INVALID_STRING_OR_NONE_ARGS = [0, -1, 4.2, 0x10, False, [], {}] + + +# For validation type errors +def check_error(excinfo, err_type, msg=None): + err = excinfo.value + assert isinstance(err, err_type) + if msg: + assert str(err) == msg + + +# For errors that are returned in an operation +def check_operation_error(excinfo, code, msg): + err = excinfo.value + assert isinstance(err, exceptions.FirebaseError) + assert err.code == code + assert str(err) == msg + + +# For rpc errors +def check_firebase_error(excinfo, code, status, msg): + err = excinfo.value + assert isinstance(err, exceptions.FirebaseError) + assert err.code == code + assert err.http_response is not None + assert err.http_response.status_code == status + assert str(err) == msg + + +def instrument_ml_service(status=200, payload=None, operations=False, app=None): + if not app: + app = firebase_admin.get_app() + ml_service = ml._get_ml_service(app) + recorder = [] + session_url = 'https://firebaseml.googleapis.com/v1beta2/' + + if isinstance(status, list): + adapter = testutils.MockMultiRequestAdapter + else: + adapter = testutils.MockAdapter + + if operations: + ml_service._operation_client.session.mount( + session_url, adapter(payload, status, recorder)) + else: + ml_service._client.session.mount( + session_url, adapter(payload, status, recorder)) + return recorder + +def _assert_request(request, expected_method, expected_url): + assert request.method == expected_method + assert request.url == expected_url + assert request.headers['X-FIREBASE-CLIENT'] == f'fire-admin-python/{firebase_admin.__version__}' + expected_metrics_header = _utils.get_metrics_header() + ' mock-cred-metric-tag' + assert request.headers['x-goog-api-client'] == expected_metrics_header + +class _TestStorageClient: + @staticmethod + def upload(bucket_name, model_file_name, app): + del app # unused variable + blob_name = ml._CloudStorageClient.BLOB_NAME.format(model_file_name) + return ml._CloudStorageClient.GCS_URI.format(bucket_name, blob_name) + + @staticmethod + def sign_uri(gcs_tflite_uri, app): + del app # unused variable + bucket_name, blob_name = ml._CloudStorageClient._parse_gcs_tflite_uri(gcs_tflite_uri) + return GCS_TFLITE_SIGNED_URI_PATTERN.format(bucket_name, blob_name) + +class TestModel: + """Tests ml.Model class.""" + @classmethod + def setup_class(cls): + cred = testutils.MockCredential() + firebase_admin.initialize_app(cred, {'projectId': PROJECT_ID}) + ml._MLService.POLL_BASE_WAIT_TIME_SECONDS = 0.1 # shorter for test + ml.TFLiteGCSModelSource._STORAGE_CLIENT = _TestStorageClient() + + @classmethod + def teardown_class(cls): + testutils.cleanup_apps() + + @staticmethod + def _op_url(project_id): + return BASE_URL + f'projects/{project_id}/operations/123' + + def test_model_success_err_state_lro(self): + model = ml.Model.from_dict(FULL_MODEL_ERR_STATE_LRO_JSON) + assert model.model_id == MODEL_ID_1 + assert model.display_name == DISPLAY_NAME_1 + assert model.create_time == CREATE_TIME_MILLIS + assert model.update_time == UPDATE_TIME_MILLIS + assert model.validation_error == VALIDATION_ERROR_MSG + assert model.published is False + assert model.etag == ETAG + assert model.model_hash == MODEL_HASH + assert model.tags == TAGS + assert model.locked is True + assert model.model_format is None + assert model.as_dict() == FULL_MODEL_ERR_STATE_LRO_JSON + + def test_model_success_published(self): + model = ml.Model.from_dict(FULL_MODEL_PUBLISHED_JSON) + assert model.model_id == MODEL_ID_1 + assert model.display_name == DISPLAY_NAME_1 + assert model.create_time == CREATE_TIME_MILLIS + assert model.update_time == UPDATE_TIME_MILLIS + assert model.validation_error is None + assert model.published is True + assert model.etag == ETAG + assert model.model_hash == MODEL_HASH + assert model.tags == TAGS + assert model.locked is False + assert model.model_format == TFLITE_FORMAT + assert model.as_dict() == FULL_MODEL_PUBLISHED_JSON + + def test_model_keyword_based_creation_and_setters(self): + model = ml.Model(display_name=DISPLAY_NAME_1, tags=TAGS, model_format=TFLITE_FORMAT) + assert model.display_name == DISPLAY_NAME_1 + assert model.tags == TAGS + assert model.model_format == TFLITE_FORMAT + assert model.as_dict() == { + 'displayName': DISPLAY_NAME_1, + 'tags': TAGS, + 'tfliteModel': TFLITE_FORMAT_JSON + } + + model.display_name = DISPLAY_NAME_2 + model.tags = TAGS_2 + model.model_format = TFLITE_FORMAT_2 + assert model.as_dict() == { + 'displayName': DISPLAY_NAME_2, + 'tags': TAGS_2, + 'tfliteModel': TFLITE_FORMAT_JSON_2 + } + + def test_gcs_tflite_model_format_source_creation(self): + model_source = ml.TFLiteGCSModelSource(gcs_tflite_uri=GCS_TFLITE_URI) + model_format = ml.TFLiteFormat(model_source=model_source) + model = ml.Model(display_name=DISPLAY_NAME_1, model_format=model_format) + assert model.as_dict() == { + 'displayName': DISPLAY_NAME_1, + 'tfliteModel': { + 'gcsTfliteUri': GCS_TFLITE_URI + } + } + + def test_source_creation_from_tflite_file(self): + model_source = ml.TFLiteGCSModelSource.from_tflite_model_file( + "my_model.tflite", "my_bucket") + assert model_source.as_dict() == { + 'gcsTfliteUri': 'gs://my_bucket/Firebase/ML/Models/my_model.tflite' + } + + def test_gcs_tflite_model_source_setters(self): + model_source = ml.TFLiteGCSModelSource(GCS_TFLITE_URI) + model_source.gcs_tflite_uri = GCS_TFLITE_URI_2 + assert model_source.gcs_tflite_uri == GCS_TFLITE_URI_2 + assert model_source.as_dict() == GCS_TFLITE_URI_JSON_2 + + def test_model_format_setters(self): + model_format = ml.TFLiteFormat(model_source=GCS_TFLITE_MODEL_SOURCE) + model_format.model_source = GCS_TFLITE_MODEL_SOURCE_2 + assert model_format.model_source == GCS_TFLITE_MODEL_SOURCE_2 + assert model_format.as_dict() == { + 'tfliteModel': { + 'gcsTfliteUri': GCS_TFLITE_URI_2 + } + } + + def test_model_as_dict_for_upload(self): + model_source = ml.TFLiteGCSModelSource(gcs_tflite_uri=GCS_TFLITE_URI) + model_format = ml.TFLiteFormat(model_source=model_source) + model = ml.Model(display_name=DISPLAY_NAME_1, model_format=model_format) + assert model.as_dict(for_upload=True) == { + 'displayName': DISPLAY_NAME_1, + 'tfliteModel': { + 'gcsTfliteUri': GCS_TFLITE_SIGNED_URI + } + } + + @pytest.mark.parametrize('helper_func', [ + ml.TFLiteGCSModelSource.from_keras_model, + ml.TFLiteGCSModelSource.from_saved_model + ]) + def test_tf_not_enabled(self, helper_func): + ml._TF_ENABLED = False # for reliability + with pytest.raises(ImportError) as excinfo: + helper_func(None) + check_error(excinfo, ImportError) + + @pytest.mark.parametrize('display_name, exc_type', [ + ('', ValueError), + ('&_*#@:/?', ValueError), + (12345, TypeError) + ]) + def test_model_display_name_validation_errors(self, display_name, exc_type): + with pytest.raises(exc_type) as excinfo: + ml.Model(display_name=display_name) + check_error(excinfo, exc_type) + + @pytest.mark.parametrize('tags, exc_type, error_message', [ + ('tag1', TypeError, 'Tags must be a list of strings.'), + (123, TypeError, 'Tags must be a list of strings.'), + (['tag1', 123, 'tag2'], TypeError, 'Tags must be a list of strings.'), + (['tag1', '@#$%^&'], ValueError, 'Tag format is invalid.'), + (['', 'tag2'], ValueError, 'Tag format is invalid.'), + (['sixty-one_characters_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx', + 'tag2'], ValueError, 'Tag format is invalid.') + ]) + def test_model_tags_validation_errors(self, tags, exc_type, error_message): + with pytest.raises(exc_type) as excinfo: + ml.Model(tags=tags) + check_error(excinfo, exc_type, error_message) + + @pytest.mark.parametrize('model_format', [ + 123, + "abc", + {}, + [], + True + ]) + def test_model_format_validation_errors(self, model_format): + with pytest.raises(TypeError) as excinfo: + ml.Model(model_format=model_format) + check_error(excinfo, TypeError, 'Model format must be a ModelFormat object.') + + @pytest.mark.parametrize('model_source', [ + 123, + "abc", + {}, + [], + True + ]) + def test_model_source_validation_errors(self, model_source): + with pytest.raises(TypeError) as excinfo: + ml.TFLiteFormat(model_source=model_source) + check_error(excinfo, TypeError, 'Model source must be a TFLiteModelSource object.') + + @pytest.mark.parametrize('uri, exc_type', [ + (123, TypeError), + ('abc', ValueError), + ('gs://NO_CAPITALS', ValueError), + ('gs://abc/', ValueError), + ('gs://aa/model.tflite', ValueError), + ('gs://@#$%/model.tflite', ValueError), + ('gs://invalid space/model.tflite', ValueError), + ('gs://sixty-four-characters_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx/model.tflite', + ValueError) + ]) + def test_gcs_tflite_source_validation_errors(self, uri, exc_type): + with pytest.raises(exc_type) as excinfo: + ml.TFLiteGCSModelSource(gcs_tflite_uri=uri) + check_error(excinfo, exc_type) + + def test_wait_for_unlocked_not_locked(self): + model = ml.Model(display_name="not_locked") + model.wait_for_unlocked() + + def test_wait_for_unlocked(self): + recorder = instrument_ml_service(status=200, + operations=True, + payload=OPERATION_DONE_PUBLISHED_RESPONSE) + model = ml.Model.from_dict(LOCKED_MODEL_JSON_1) + model.wait_for_unlocked() + assert model == FULL_MODEL_PUBLISHED + assert len(recorder) == 1 + _assert_request(recorder[0], 'GET', TestModel._op_url(PROJECT_ID)) + + def test_wait_for_unlocked_timeout(self): + recorder = instrument_ml_service( + status=200, operations=True, payload=OPERATION_NOT_DONE_RESPONSE) + ml._MLService.POLL_BASE_WAIT_TIME_SECONDS = 3 # longer so timeout applies immediately + model = ml.Model.from_dict(LOCKED_MODEL_JSON_1) + with pytest.raises(Exception) as excinfo: + model.wait_for_unlocked(max_time_seconds=0.1) + check_error(excinfo, exceptions.DeadlineExceededError, 'Polling max time exceeded.') + assert len(recorder) == 1 + + +class TestCreateModel: + """Tests ml.create_model.""" + @classmethod + def setup_class(cls): + cred = testutils.MockCredential() + firebase_admin.initialize_app(cred, {'projectId': PROJECT_ID}) + ml._MLService.POLL_BASE_WAIT_TIME_SECONDS = 0.1 # shorter for test + + @classmethod + def teardown_class(cls): + testutils.cleanup_apps() + + @staticmethod + def _url(project_id): + return BASE_URL + f'projects/{project_id}/models' + + @staticmethod + def _op_url(project_id): + return BASE_URL + f'projects/{project_id}/operations/123' + + @staticmethod + def _get_url(project_id, model_id): + return BASE_URL + f'projects/{project_id}/models/{model_id}' + + def test_immediate_done(self): + instrument_ml_service(status=200, payload=OPERATION_DONE_RESPONSE) + model = ml.create_model(MODEL_1) + assert model == CREATED_UPDATED_MODEL_1 + + def test_returns_locked(self): + recorder = instrument_ml_service( + status=[200, 200], + payload=[OPERATION_NOT_DONE_RESPONSE, LOCKED_MODEL_2_RESPONSE]) + expected_model = ml.Model.from_dict(LOCKED_MODEL_JSON_2) + model = ml.create_model(MODEL_1) + + assert model == expected_model + assert len(recorder) == 2 + _assert_request(recorder[0], 'POST', TestCreateModel._url(PROJECT_ID)) + _assert_request(recorder[1], 'GET', TestCreateModel._get_url(PROJECT_ID, MODEL_ID_1)) + + def test_operation_error(self): + instrument_ml_service(status=200, payload=OPERATION_ERROR_RESPONSE) + with pytest.raises(Exception) as excinfo: + ml.create_model(MODEL_1) + # The http request succeeded, the operation returned contains a create failure + check_operation_error(excinfo, OPERATION_ERROR_EXPECTED_STATUS, OPERATION_ERROR_MSG) + + def test_malformed_operation(self): + instrument_ml_service(status=200, payload=OPERATION_MALFORMED_RESPONSE) + with pytest.raises(Exception) as excinfo: + ml.create_model(MODEL_1) + check_error(excinfo, exceptions.UnknownError, 'Internal Error: Malformed Operation.') + + def test_rpc_error_create(self): + create_recorder = instrument_ml_service( + status=400, payload=ERROR_RESPONSE_BAD_REQUEST) + with pytest.raises(Exception) as excinfo: + ml.create_model(MODEL_1) + check_firebase_error( + excinfo, + ERROR_STATUS_BAD_REQUEST, + ERROR_CODE_BAD_REQUEST, + ERROR_MSG_BAD_REQUEST + ) + assert len(create_recorder) == 1 + + @pytest.mark.parametrize('model', INVALID_MODEL_ARGS) + def test_not_model(self, model): + with pytest.raises(Exception) as excinfo: + ml.create_model(model) + check_error(excinfo, TypeError, 'Model must be an ml.Model.') + + def test_missing_display_name(self): + with pytest.raises(Exception) as excinfo: + ml.create_model(ml.Model.from_dict({})) + check_error(excinfo, ValueError, 'Model must have a display name.') + + def test_missing_op_name(self): + instrument_ml_service(status=200, payload=OPERATION_MISSING_NAME_RESPONSE) + with pytest.raises(Exception) as excinfo: + ml.create_model(MODEL_1) + check_error(excinfo, TypeError) + + @pytest.mark.parametrize('op_name', INVALID_OP_NAME_ARGS) + def test_invalid_op_name(self, op_name): + payload = json.dumps({'name': op_name}) + instrument_ml_service(status=200, payload=payload) + with pytest.raises(Exception) as excinfo: + ml.create_model(MODEL_1) + check_error(excinfo, ValueError, 'Operation name format is invalid.') + + +class TestUpdateModel: + """Tests ml.update_model.""" + @classmethod + def setup_class(cls): + cred = testutils.MockCredential() + firebase_admin.initialize_app(cred, {'projectId': PROJECT_ID}) + ml._MLService.POLL_BASE_WAIT_TIME_SECONDS = 0.1 # shorter for test + + @classmethod + def teardown_class(cls): + testutils.cleanup_apps() + + @staticmethod + def _url(project_id, model_id): + return BASE_URL + f'projects/{project_id}/models/{model_id}' + + @staticmethod + def _op_url(project_id): + return BASE_URL + f'projects/{project_id}/operations/123' + + def test_immediate_done(self): + instrument_ml_service(status=200, payload=OPERATION_DONE_RESPONSE) + model = ml.update_model(MODEL_1) + assert model == CREATED_UPDATED_MODEL_1 + + def test_returns_locked(self): + recorder = instrument_ml_service( + status=[200, 200], + payload=[OPERATION_NOT_DONE_RESPONSE, LOCKED_MODEL_2_RESPONSE]) + expected_model = ml.Model.from_dict(LOCKED_MODEL_JSON_2) + model = ml.update_model(MODEL_1) + + assert model == expected_model + assert len(recorder) == 2 + _assert_request(recorder[0], 'PATCH', TestUpdateModel._url(PROJECT_ID, MODEL_ID_1)) + _assert_request(recorder[1], 'GET', TestUpdateModel._url(PROJECT_ID, MODEL_ID_1)) + + def test_operation_error(self): + instrument_ml_service(status=200, payload=OPERATION_ERROR_RESPONSE) + with pytest.raises(Exception) as excinfo: + ml.update_model(MODEL_1) + # The http request succeeded, the operation returned contains an update failure + check_operation_error(excinfo, OPERATION_ERROR_EXPECTED_STATUS, OPERATION_ERROR_MSG) + + def test_malformed_operation(self): + instrument_ml_service(status=200, payload=OPERATION_MALFORMED_RESPONSE) + with pytest.raises(Exception) as excinfo: + ml.update_model(MODEL_1) + check_error(excinfo, exceptions.UnknownError, 'Internal Error: Malformed Operation.') + + def test_rpc_error(self): + create_recorder = instrument_ml_service( + status=400, payload=ERROR_RESPONSE_BAD_REQUEST) + with pytest.raises(Exception) as excinfo: + ml.update_model(MODEL_1) + check_firebase_error( + excinfo, + ERROR_STATUS_BAD_REQUEST, + ERROR_CODE_BAD_REQUEST, + ERROR_MSG_BAD_REQUEST + ) + assert len(create_recorder) == 1 + + @pytest.mark.parametrize('model', INVALID_MODEL_ARGS) + def test_not_model(self, model): + with pytest.raises(Exception) as excinfo: + ml.update_model(model) + check_error(excinfo, TypeError, 'Model must be an ml.Model.') + + def test_missing_display_name(self): + with pytest.raises(Exception) as excinfo: + ml.update_model(ml.Model.from_dict({})) + check_error(excinfo, ValueError, 'Model must have a display name.') + + def test_missing_op_name(self): + instrument_ml_service(status=200, payload=OPERATION_MISSING_NAME_RESPONSE) + with pytest.raises(Exception) as excinfo: + ml.update_model(MODEL_1) + check_error(excinfo, TypeError) + + @pytest.mark.parametrize('op_name', INVALID_OP_NAME_ARGS) + def test_invalid_op_name(self, op_name): + payload = json.dumps({'name': op_name}) + instrument_ml_service(status=200, payload=payload) + with pytest.raises(Exception) as excinfo: + ml.update_model(MODEL_1) + check_error(excinfo, ValueError, 'Operation name format is invalid.') + + +class TestPublishUnpublish: + """Tests ml.publish_model and ml.unpublish_model.""" + + PUBLISH_UNPUBLISH_WITH_ARGS = [ + (ml.publish_model, True), + (ml.unpublish_model, False) + ] + PUBLISH_UNPUBLISH_FUNCS = [item[0] for item in PUBLISH_UNPUBLISH_WITH_ARGS] + + @classmethod + def setup_class(cls): + cred = testutils.MockCredential() + firebase_admin.initialize_app(cred, {'projectId': PROJECT_ID}) + ml._MLService.POLL_BASE_WAIT_TIME_SECONDS = 0.1 # shorter for test + + @classmethod + def teardown_class(cls): + testutils.cleanup_apps() + + @staticmethod + def _update_url(project_id, model_id): + update_url = f'projects/{project_id}/models/{model_id}?updateMask=state.published' + return BASE_URL + update_url + + @staticmethod + def _get_url(project_id, model_id): + return BASE_URL + f'projects/{project_id}/models/{model_id}' + + @staticmethod + def _op_url(project_id): + return BASE_URL + f'projects/{project_id}/operations/123' + + @pytest.mark.parametrize('publish_function, published', PUBLISH_UNPUBLISH_WITH_ARGS) + def test_immediate_done(self, publish_function, published): + recorder = instrument_ml_service(status=200, payload=OPERATION_DONE_RESPONSE) + model = publish_function(MODEL_ID_1) + assert model == CREATED_UPDATED_MODEL_1 + assert len(recorder) == 1 + _assert_request( + recorder[0], 'PATCH', TestPublishUnpublish._update_url(PROJECT_ID, MODEL_ID_1)) + body = json.loads(recorder[0].body.decode()) + assert body.get('state', {}).get('published', None) is published + + @pytest.mark.parametrize('publish_function', PUBLISH_UNPUBLISH_FUNCS) + def test_returns_locked(self, publish_function): + recorder = instrument_ml_service( + status=[200, 200], + payload=[OPERATION_NOT_DONE_RESPONSE, LOCKED_MODEL_2_RESPONSE]) + expected_model = ml.Model.from_dict(LOCKED_MODEL_JSON_2) + model = publish_function(MODEL_ID_1) + + assert model == expected_model + assert len(recorder) == 2 + _assert_request( + recorder[0], 'PATCH', TestPublishUnpublish._update_url(PROJECT_ID, MODEL_ID_1)) + _assert_request( + recorder[1], 'GET', TestPublishUnpublish._get_url(PROJECT_ID, MODEL_ID_1)) + + @pytest.mark.parametrize('publish_function', PUBLISH_UNPUBLISH_FUNCS) + def test_operation_error(self, publish_function): + instrument_ml_service(status=200, payload=OPERATION_ERROR_RESPONSE) + with pytest.raises(Exception) as excinfo: + publish_function(MODEL_ID_1) + # The http request succeeded, the operation returned contains an update failure + check_operation_error(excinfo, OPERATION_ERROR_EXPECTED_STATUS, OPERATION_ERROR_MSG) + + @pytest.mark.parametrize('publish_function', PUBLISH_UNPUBLISH_FUNCS) + def test_malformed_operation(self, publish_function): + instrument_ml_service(status=200, payload=OPERATION_MALFORMED_RESPONSE) + with pytest.raises(Exception) as excinfo: + publish_function(MODEL_ID_1) + check_error(excinfo, exceptions.UnknownError, 'Internal Error: Malformed Operation.') + + @pytest.mark.parametrize('publish_function', PUBLISH_UNPUBLISH_FUNCS) + def test_rpc_error(self, publish_function): + create_recorder = instrument_ml_service( + status=400, payload=ERROR_RESPONSE_BAD_REQUEST) + with pytest.raises(Exception) as excinfo: + publish_function(MODEL_ID_1) + check_firebase_error( + excinfo, + ERROR_STATUS_BAD_REQUEST, + ERROR_CODE_BAD_REQUEST, + ERROR_MSG_BAD_REQUEST + ) + assert len(create_recorder) == 1 + + +class TestGetModel: + """Tests ml.get_model.""" + @classmethod + def setup_class(cls): + cred = testutils.MockCredential() + firebase_admin.initialize_app(cred, {'projectId': PROJECT_ID}) + + @classmethod + def teardown_class(cls): + testutils.cleanup_apps() + + @staticmethod + def _url(project_id, model_id): + return BASE_URL + f'projects/{project_id}/models/{model_id}' + + def test_get_model(self): + recorder = instrument_ml_service(status=200, payload=DEFAULT_GET_RESPONSE) + model = ml.get_model(MODEL_ID_1) + assert len(recorder) == 1 + _assert_request(recorder[0], 'GET', TestGetModel._url(PROJECT_ID, MODEL_ID_1)) + assert model == MODEL_1 + assert model.model_id == MODEL_ID_1 + assert model.display_name == DISPLAY_NAME_1 + + @pytest.mark.parametrize('model_id, exc_type', INVALID_MODEL_ID_ARGS) + def test_get_model_validation_errors(self, model_id, exc_type): + with pytest.raises(exc_type) as excinfo: + ml.get_model(model_id) + check_error(excinfo, exc_type) + + def test_get_model_error(self): + recorder = instrument_ml_service(status=404, payload=ERROR_RESPONSE_NOT_FOUND) + with pytest.raises(exceptions.NotFoundError) as excinfo: + ml.get_model(MODEL_ID_1) + check_firebase_error( + excinfo, + ERROR_STATUS_NOT_FOUND, + ERROR_CODE_NOT_FOUND, + ERROR_MSG_NOT_FOUND + ) + assert len(recorder) == 1 + _assert_request(recorder[0], 'GET', TestGetModel._url(PROJECT_ID, MODEL_ID_1)) + + def test_no_project_id(self): + def evaluate(): + app = firebase_admin.initialize_app(testutils.MockCredential(), name='no_project_id') + with pytest.raises(ValueError): + ml.get_model(MODEL_ID_1, app) + testutils.run_without_project_id(evaluate) + + +class TestDeleteModel: + """Tests ml.delete_model.""" + @classmethod + def setup_class(cls): + cred = testutils.MockCredential() + firebase_admin.initialize_app(cred, {'projectId': PROJECT_ID}) + + @classmethod + def teardown_class(cls): + testutils.cleanup_apps() + + @staticmethod + def _url(project_id, model_id): + return BASE_URL + f'projects/{project_id}/models/{model_id}' + + def test_delete_model(self): + recorder = instrument_ml_service(status=200, payload=EMPTY_RESPONSE) + ml.delete_model(MODEL_ID_1) # no response for delete + assert len(recorder) == 1 + _assert_request(recorder[0], 'DELETE', TestDeleteModel._url(PROJECT_ID, MODEL_ID_1)) + + @pytest.mark.parametrize('model_id, exc_type', INVALID_MODEL_ID_ARGS) + def test_delete_model_validation_errors(self, model_id, exc_type): + with pytest.raises(exc_type) as excinfo: + ml.delete_model(model_id) + check_error(excinfo, exc_type) + + def test_delete_model_error(self): + recorder = instrument_ml_service(status=404, payload=ERROR_RESPONSE_NOT_FOUND) + with pytest.raises(exceptions.NotFoundError) as excinfo: + ml.delete_model(MODEL_ID_1) + check_firebase_error( + excinfo, + ERROR_STATUS_NOT_FOUND, + ERROR_CODE_NOT_FOUND, + ERROR_MSG_NOT_FOUND + ) + assert len(recorder) == 1 + _assert_request(recorder[0], 'DELETE', self._url(PROJECT_ID, MODEL_ID_1)) + + def test_no_project_id(self): + def evaluate(): + app = firebase_admin.initialize_app(testutils.MockCredential(), name='no_project_id') + with pytest.raises(ValueError): + ml.delete_model(MODEL_ID_1, app) + testutils.run_without_project_id(evaluate) + + +class TestListModels: + """Tests ml.list_models.""" + @classmethod + def setup_class(cls): + cred = testutils.MockCredential() + firebase_admin.initialize_app(cred, {'projectId': PROJECT_ID}) + + @classmethod + def teardown_class(cls): + testutils.cleanup_apps() + + @staticmethod + def _url(project_id): + return BASE_URL + f'projects/{project_id}/models' + + @staticmethod + def _check_page(page, model_count): + assert isinstance(page, ml.ListModelsPage) + assert len(page.models) == model_count + for model in page.models: + assert isinstance(model, ml.Model) + + def test_list_models_no_args(self): + recorder = instrument_ml_service(status=200, payload=DEFAULT_LIST_RESPONSE) + models_page = ml.list_models() + assert len(recorder) == 1 + _assert_request(recorder[0], 'GET', TestListModels._url(PROJECT_ID)) + TestListModels._check_page(models_page, 2) + assert models_page.has_next_page + assert models_page.next_page_token == NEXT_PAGE_TOKEN + assert models_page.models[0] == MODEL_1 + assert models_page.models[1] == MODEL_2 + + def test_list_models_with_all_args(self): + recorder = instrument_ml_service(status=200, payload=LAST_PAGE_LIST_RESPONSE) + models_page = ml.list_models( + 'display_name=displayName3', + page_size=10, + page_token=PAGE_TOKEN) + assert len(recorder) == 1 + _assert_request(recorder[0], 'GET', ( + TestListModels._url(PROJECT_ID) + + f'?filter=display_name%3DdisplayName3&page_size=10&page_token={PAGE_TOKEN}' + )) + assert isinstance(models_page, ml.ListModelsPage) + assert len(models_page.models) == 1 + assert models_page.models[0] == MODEL_3 + assert not models_page.has_next_page + + @pytest.mark.parametrize('list_filter', INVALID_STRING_OR_NONE_ARGS) + def test_list_models_list_filter_validation(self, list_filter): + with pytest.raises(TypeError) as excinfo: + ml.list_models(list_filter=list_filter) + check_error(excinfo, TypeError, 'List filter must be a string or None.') + + @pytest.mark.parametrize('page_size, exc_type, error_message', [ + ('abc', TypeError, 'Page size must be a number or None.'), + (4.2, TypeError, 'Page size must be a number or None.'), + ([], TypeError, 'Page size must be a number or None.'), + ({}, TypeError, 'Page size must be a number or None.'), + (True, TypeError, 'Page size must be a number or None.'), + (-1, ValueError, PAGE_SIZE_VALUE_ERROR_MSG), + (0, ValueError, PAGE_SIZE_VALUE_ERROR_MSG), + (ml._MAX_PAGE_SIZE + 1, ValueError, PAGE_SIZE_VALUE_ERROR_MSG) + ]) + def test_list_models_page_size_validation(self, page_size, exc_type, error_message): + with pytest.raises(exc_type) as excinfo: + ml.list_models(page_size=page_size) + check_error(excinfo, exc_type, error_message) + + @pytest.mark.parametrize('page_token', INVALID_STRING_OR_NONE_ARGS) + def test_list_models_page_token_validation(self, page_token): + with pytest.raises(TypeError) as excinfo: + ml.list_models(page_token=page_token) + check_error(excinfo, TypeError, 'Page token must be a string or None.') + + def test_list_models_error(self): + recorder = instrument_ml_service(status=400, payload=ERROR_RESPONSE_BAD_REQUEST) + with pytest.raises(exceptions.InvalidArgumentError) as excinfo: + ml.list_models() + check_firebase_error( + excinfo, + ERROR_STATUS_BAD_REQUEST, + ERROR_CODE_BAD_REQUEST, + ERROR_MSG_BAD_REQUEST + ) + assert len(recorder) == 1 + _assert_request(recorder[0], 'GET', TestListModels._url(PROJECT_ID)) + + def test_no_project_id(self): + def evaluate(): + app = firebase_admin.initialize_app(testutils.MockCredential(), name='no_project_id') + with pytest.raises(ValueError): + ml.list_models(app=app) + testutils.run_without_project_id(evaluate) + + def test_list_single_page(self): + recorder = instrument_ml_service(status=200, payload=LAST_PAGE_LIST_RESPONSE) + models_page = ml.list_models() + assert len(recorder) == 1 + assert models_page.next_page_token == '' + assert models_page.has_next_page is False + assert models_page.get_next_page() is None + models = list(models_page.iterate_all()) + assert len(models) == 1 + + def test_list_multiple_pages(self): + # Page 1 + recorder = instrument_ml_service(status=200, payload=DEFAULT_LIST_RESPONSE) + page = ml.list_models() + assert len(recorder) == 1 + assert len(page.models) == 2 + assert page.next_page_token == NEXT_PAGE_TOKEN + assert page.has_next_page is True + + # Page 2 + recorder = instrument_ml_service(status=200, payload=LAST_PAGE_LIST_RESPONSE) + page_2 = page.get_next_page() + assert len(recorder) == 1 + assert len(page_2.models) == 1 + assert page_2.next_page_token == '' + assert page_2.has_next_page is False + assert page_2.get_next_page() is None + + def test_list_models_paged_iteration(self): + # Page 1 + recorder = instrument_ml_service(status=200, payload=DEFAULT_LIST_RESPONSE) + page = ml.list_models() + assert page.next_page_token == NEXT_PAGE_TOKEN + assert page.has_next_page is True + iterator = page.iterate_all() + for index in range(2): + model = next(iterator) + assert model.display_name == f'displayName{index+1}' + assert len(recorder) == 1 + + # Page 2 + recorder = instrument_ml_service(status=200, payload=LAST_PAGE_LIST_RESPONSE) + model = next(iterator) + assert model.display_name == DISPLAY_NAME_3 + with pytest.raises(StopIteration): + next(iterator) + + def test_list_models_stop_iteration(self): + recorder = instrument_ml_service(status=200, payload=ONE_PAGE_LIST_RESPONSE) + page = ml.list_models() + assert len(recorder) == 1 + assert len(page.models) == 3 + iterator = page.iterate_all() + models = list(iterator) + assert len(page.models) == 3 + with pytest.raises(StopIteration): + next(iterator) + assert len(models) == 3 + + def test_list_models_no_models(self): + recorder = instrument_ml_service(status=200, payload=NO_MODELS_LIST_RESPONSE) + page = ml.list_models() + assert len(recorder) == 1 + assert len(page.models) == 0 + models = list(page.iterate_all()) + assert len(models) == 0 diff --git a/tests/test_project_management.py b/tests/test_project_management.py new file mode 100644 index 000000000..89e48c2e5 --- /dev/null +++ b/tests/test_project_management.py @@ -0,0 +1,1252 @@ +# Copyright 2018 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for firebase_admin.project_management.""" + +import base64 +import json + +import pytest + +import firebase_admin +from firebase_admin import exceptions +from firebase_admin import project_management +from firebase_admin import _http_client +from firebase_admin import _utils +from tests import testutils + +OPERATION_IN_PROGRESS_RESPONSE = json.dumps({ + 'name': 'operations/abcdefg', + 'done': False +}) +OPERATION_FAILED_RESPONSE = json.dumps({ + 'name': 'operations/abcdefg', + 'done': True, + 'error': 'some error', +}) +ANDROID_APP_OPERATION_SUCCESSFUL_RESPONSE = json.dumps({ + 'name': 'operations/abcdefg', + 'done': True, + 'response': { + 'name': 'projects/test-project-id/androidApps/1:12345678:android:deadbeef', + 'appId': '1:12345678:android:deadbeef', + 'displayName': 'My Android App', + 'projectId': 'test-project-id', + 'packageName': 'com.hello.world.android', + }, +}) +ANDROID_APP_NO_DISPLAY_NAME_OPERATION_SUCCESSFUL_RESPONSE = json.dumps({ + 'name': 'operations/abcdefg', + 'done': True, + 'response': { + 'name': 'projects/test-project-id/androidApps/1:12345678:android:deadbeef', + 'appId': '1:12345678:android:deadbeef', + 'projectId': 'test-project-id', + 'packageName': 'com.hello.world.android', + }, +}) +ANDROID_APP_METADATA_RESPONSE = json.dumps({ + 'name': 'projects/test-project-id/androidApps/1:12345678:android:deadbeef', + 'appId': '1:12345678:android:deadbeef', + 'displayName': 'My Android App', + 'projectId': 'test-project-id', + 'packageName': 'com.hello.world.android', +}) +ANDROID_APP_NO_DISPLAY_NAME_METADATA_RESPONSE = json.dumps({ + 'name': 'projects/test-project-id/androidApps/1:12345678:android:deadbeef', + 'appId': '1:12345678:android:deadbeef', + 'projectId': 'test-project-id', + 'packageName': 'com.hello.world.android', +}) +IOS_APP_OPERATION_SUCCESSFUL_RESPONSE = json.dumps({ + 'name': 'operations/abcdefg', + 'done': True, + 'response': { + 'name': 'projects/test-project-id/iosApps/1:12345678:ios:ca5cade5', + 'appId': '1:12345678:ios:ca5cade5', + 'displayName': 'My iOS App', + 'projectId': 'test-project-id', + 'bundleId': 'com.hello.world.ios', + }, +}) +IOS_APP_NO_DISPLAY_NAME_OPERATION_SUCCESSFUL_RESPONSE = json.dumps({ + 'name': 'operations/abcdefg', + 'done': True, + 'response': { + 'name': 'projects/test-project-id/iosApps/1:12345678:ios:ca5cade5', + 'appId': '1:12345678:ios:ca5cade5', + 'projectId': 'test-project-id', + 'bundleId': 'com.hello.world.ios', + }, +}) +IOS_APP_METADATA_RESPONSE = json.dumps({ + 'name': 'projects/test-project-id/iosApps/1:12345678:ios:ca5cade5', + 'appId': '1:12345678:ios:ca5cade5', + 'displayName': 'My iOS App', + 'projectId': 'test-project-id', + 'bundleId': 'com.hello.world.ios', +}) +IOS_APP_NO_DISPLAY_NAME_METADATA_RESPONSE = json.dumps({ + 'name': 'projects/test-project-id/iosApps/1:12345678:ios:ca5cade5', + 'appId': '1:12345678:ios:ca5cade5', + 'projectId': 'test-project-id', + 'bundleId': 'com.hello.world.ios', +}) + +LIST_ANDROID_APPS_RESPONSE = json.dumps({'apps': [ + { + 'name': 'projects/test-project-id/androidApps/1:12345678:android:deadbeef', + 'appId': '1:12345678:android:deadbeef', + 'displayName': 'My Android App', + 'projectId': 'test-project-id', + 'packageName': 'com.hello.world.android', + }, + { + 'name': 'projects/test-project-id/androidApps/1:12345678:android:deadbeefcafe', + 'appId': '1:12345678:android:deadbeefcafe', + 'projectId': 'test-project-id', + 'packageName': 'com.hello.world.android2', + }]}) +LIST_ANDROID_APPS_PAGE_1_RESPONSE = json.dumps({ + 'apps': [{ + 'name': 'projects/test-project-id/androidApps/1:12345678:android:deadbeef', + 'appId': '1:12345678:android:deadbeef', + 'displayName': 'My Android App', + 'projectId': 'test-project-id', + 'packageName': 'com.hello.world.android', + }], + 'nextPageToken': 'nextpagetoken', +}) +LIST_ANDROID_APPS_PAGE_2_RESPONSE = json.dumps({ + 'apps': [{ + 'name': 'projects/test-project-id/androidApps/1:12345678:android:deadbeefcafe', + 'appId': '1:12345678:android:deadbeefcafe', + 'projectId': 'test-project-id', + 'packageName': 'com.hello.world.android2', + }]}) +LIST_IOS_APPS_RESPONSE = json.dumps({'apps': [ + { + 'name': 'projects/test-project-id/iosApps/1:12345678:ios:ca5cade5', + 'appId': '1:12345678:ios:ca5cade5', + 'displayName': 'My iOS App', + 'projectId': 'test-project-id', + 'bundleId': 'com.hello.world.ios', + }, + { + 'name': 'projects/test-project-id/iosApps/1:12345678:ios:ca5cade5cafe', + 'appId': '1:12345678:ios:ca5cade5cafe', + 'projectId': 'test-project-id', + 'bundleId': 'com.hello.world.ios2', + }]}) +LIST_IOS_APPS_PAGE_1_RESPONSE = json.dumps({ + 'apps': [{ + 'name': 'projects/test-project-id/iosApps/1:12345678:ios:ca5cade5', + 'appId': '1:12345678:ios:ca5cade5', + 'displayName': 'My iOS App', + 'projectId': 'test-project-id', + 'bundleId': 'com.hello.world.ios', + }], + 'nextPageToken': 'nextpagetoken', +}) +LIST_IOS_APPS_PAGE_2_RESPONSE = json.dumps({ + 'apps': [{ + 'name': 'projects/test-project-id/iosApps/1:12345678:ios:ca5cade5cafe', + 'appId': '1:12345678:ios:ca5cade5cafe', + 'projectId': 'test-project-id', + 'bundleId': 'com.hello.world.ios2', + }]}) + +# In Python 2.7, the base64 module works with strings, while in Python 3, it works with bytes +# objects. This line works in both versions. +TEST_APP_ENCODED_CONFIG = base64.standard_b64encode('hello world'.encode('utf-8')).decode('utf-8') +TEST_APP_CONFIG_RESPONSE = json.dumps({ + 'configFilename': 'hello', + 'configFileContents': TEST_APP_ENCODED_CONFIG, +}) + +SHA_1_CERTIFICATE = project_management.SHACertificate( + '123456789a123456789a123456789a123456789a', + 'projects/-/androidApps/1:12345678:android:deadbeef/sha/name1') +SHA_256_CERTIFICATE = project_management.SHACertificate( + '123456789a123456789a123456789a123456789a123456789a123456789a1234', + 'projects/-/androidApps/1:12345678:android:deadbeef/sha/name256') +GET_SHA_CERTIFICATES_RESPONSE = json.dumps({'certificates': [ + {'name': cert.name, 'shaHash': cert.sha_hash, 'certType': cert.cert_type} + for cert in [SHA_1_CERTIFICATE, SHA_256_CERTIFICATE] +]}) + +ANDROID_APP_METADATA = project_management.AndroidAppMetadata( + package_name='com.hello.world.android', + name='projects/test-project-id/androidApps/1:12345678:android:deadbeef', + app_id='1:12345678:android:deadbeef', + display_name='My Android App', + project_id='test-project-id') +IOS_APP_METADATA = project_management.IOSAppMetadata( + bundle_id='com.hello.world.ios', + name='projects/test-project-id/iosApps/1:12345678:ios:ca5cade5', + app_id='1:12345678:android:deadbeef', + display_name='My iOS App', + project_id='test-project-id') + +ALREADY_EXISTS_RESPONSE = ('{"error": {"status": "ALREADY_EXISTS", ' + '"message": "The resource already exists"}}') +NOT_FOUND_RESPONSE = '{"error": {"message": "Failed to find the resource"}}' +UNAVAILABLE_RESPONSE = '{"error": {"message": "Backend servers are over capacity"}}' + +class TestAndroidAppMetadata: + + def test_create_android_app_metadata_errors(self): + # package_name must be a non-empty string. + with pytest.raises(ValueError): + project_management.AndroidAppMetadata( + package_name='', + name='projects/test-project-id/androidApps/1:12345678:android:deadbeef', + app_id='1:12345678:android:deadbeef', + display_name='My Android App', + project_id='test-project-id') + # name must be a non-empty string. + with pytest.raises(ValueError): + project_management.AndroidAppMetadata( + package_name='com.hello.world.android', + name='', + app_id='1:12345678:android:deadbeef', + display_name='My Android App', + project_id='test-project-id') + # app_id must be a non-empty string. + with pytest.raises(ValueError): + project_management.AndroidAppMetadata( + package_name='com.hello.world.android', + name='projects/test-project-id/androidApps/1:12345678:android:deadbeef', + app_id='', + display_name='My Android App', + project_id='test-project-id') + # display_name must be a string or None. + with pytest.raises(ValueError): + project_management.AndroidAppMetadata( + package_name='com.hello.world.android', + name='projects/test-project-id/androidApps/1:12345678:android:deadbeef', + app_id='1:12345678:android:deadbeef', + display_name=0, + project_id='test-project-id') + # project_id must be a nonempty string. + with pytest.raises(ValueError): + project_management.AndroidAppMetadata( + package_name='com.hello.world.android', + name='projects/test-project-id/androidApps/1:12345678:android:deadbeef', + app_id='1:12345678:android:deadbeef', + display_name='projects/test-project-id/androidApps/1:12345678:android:deadbeef', + project_id='') + + def test_android_app_metadata_eq_and_hash(self): + metadata_1 = ANDROID_APP_METADATA + metadata_2 = project_management.AndroidAppMetadata( + package_name='different', + name='projects/test-project-id/androidApps/1:12345678:android:deadbeef', + app_id='1:12345678:android:deadbeef', + display_name='My Android App', + project_id='test-project-id') + metadata_3 = project_management.AndroidAppMetadata( + package_name='com.hello.world.android', + name='different', + app_id='1:12345678:android:deadbeef', + display_name='My Android App', + project_id='test-project-id') + metadata_4 = project_management.AndroidAppMetadata( + package_name='com.hello.world.android', + name='projects/test-project-id/androidApps/1:12345678:android:deadbeef', + app_id='different', + display_name='My Android App', + project_id='test-project-id') + metadata_5 = project_management.AndroidAppMetadata( + package_name='com.hello.world.android', + name='projects/test-project-id/androidApps/1:12345678:android:deadbeef', + app_id='1:12345678:android:deadbeef', + display_name=None, + project_id='test-project-id') + metadata_6 = project_management.AndroidAppMetadata( + package_name='com.hello.world.android', + name='projects/test-project-id/androidApps/1:12345678:android:deadbeef', + app_id='1:12345678:android:deadbeef', + display_name='My Android App', + project_id='different') + metadata_7 = project_management.AndroidAppMetadata( + package_name='com.hello.world.android', + name='projects/test-project-id/androidApps/1:12345678:android:deadbeef', + app_id='1:12345678:android:deadbeef', + display_name='My Android App', + project_id='test-project-id') + ios_metadata = IOS_APP_METADATA + + # Don't trigger __ne__. + assert not metadata_1 == ios_metadata # pylint: disable=unneeded-not + assert metadata_1 != ios_metadata + assert metadata_1 != metadata_2 + assert metadata_1 != metadata_3 + assert metadata_1 != metadata_4 + assert metadata_1 != metadata_5 + assert metadata_1 != metadata_6 + assert metadata_1 == metadata_7 + assert set([metadata_1, metadata_2, metadata_7]) == set([metadata_1, metadata_2]) + + def test_android_app_metadata_package_name(self): + assert ANDROID_APP_METADATA.package_name == 'com.hello.world.android' + + def test_android_app_metadata_name(self): + assert (ANDROID_APP_METADATA._name == + 'projects/test-project-id/androidApps/1:12345678:android:deadbeef') + + def test_android_app_metadata_app_id(self): + assert ANDROID_APP_METADATA.app_id == '1:12345678:android:deadbeef' + + def test_android_app_metadata_display_name(self): + assert ANDROID_APP_METADATA.display_name == 'My Android App' + + def test_android_app_metadata_project_id(self): + assert ANDROID_APP_METADATA.project_id == 'test-project-id' + + +class TestIOSAppMetadata: + + def test_create_ios_app_metadata_errors(self): + # bundle_id must be a non-empty string. + with pytest.raises(ValueError): + project_management.IOSAppMetadata( + bundle_id='', + name='projects/test-project-id/iosApps/1:12345678:ios:ca5cade5', + app_id='1:12345678:android:deadbeef', + display_name='My iOS App', + project_id='test-project-id') + # name must be a non-empty string. + with pytest.raises(ValueError): + project_management.IOSAppMetadata( + bundle_id='com.hello.world.ios', + name='', + app_id='1:12345678:android:deadbeef', + display_name='My iOS App', + project_id='test-project-id') + # app_id must be a non-empty string. + with pytest.raises(ValueError): + project_management.IOSAppMetadata( + bundle_id='com.hello.world.ios', + name='projects/test-project-id/iosApps/1:12345678:ios:ca5cade5', + app_id='', + display_name='My iOS App', + project_id='test-project-id') + # display_name must be a string or None. + with pytest.raises(ValueError): + project_management.IOSAppMetadata( + bundle_id='com.hello.world.ios', + name='projects/test-project-id/iosApps/1:12345678:ios:ca5cade5', + app_id='1:12345678:android:deadbeef', + display_name=0, + project_id='test-project-id') + # project_id must be a nonempty string. + with pytest.raises(ValueError): + project_management.IOSAppMetadata( + bundle_id='com.hello.world.ios', + name='projects/test-project-id/iosApps/1:12345678:ios:ca5cade5', + app_id='1:12345678:android:deadbeef', + display_name='projects/test-project-id/iosApps/1:12345678:ios:ca5cade5', + project_id='') + + def test_ios_app_metadata_eq_and_hash(self): + metadata_1 = IOS_APP_METADATA + metadata_2 = project_management.IOSAppMetadata( + bundle_id='different', + name='projects/test-project-id/iosApps/1:12345678:ios:ca5cade5', + app_id='1:12345678:android:deadbeef', + display_name='My iOS App', + project_id='test-project-id') + metadata_3 = project_management.IOSAppMetadata( + bundle_id='com.hello.world.ios', + name='different', + app_id='1:12345678:android:deadbeef', + display_name='My iOS App', + project_id='test-project-id') + metadata_4 = project_management.IOSAppMetadata( + bundle_id='com.hello.world.ios', + name='projects/test-project-id/iosApps/1:12345678:ios:ca5cade5', + app_id='different', + display_name='My iOS App', + project_id='test-project-id') + metadata_5 = project_management.IOSAppMetadata( + bundle_id='com.hello.world.ios', + name='projects/test-project-id/iosApps/1:12345678:ios:ca5cade5', + app_id='1:12345678:android:deadbeef', + display_name='different', + project_id='test-project-id') + metadata_6 = project_management.IOSAppMetadata( + bundle_id='com.hello.world.ios', + name='projects/test-project-id/iosApps/1:12345678:ios:ca5cade5', + app_id='1:12345678:android:deadbeef', + display_name='My iOS App', + project_id='different') + metadata_7 = project_management.IOSAppMetadata( + bundle_id='com.hello.world.ios', + name='projects/test-project-id/iosApps/1:12345678:ios:ca5cade5', + app_id='1:12345678:android:deadbeef', + display_name='My iOS App', + project_id='test-project-id') + android_metadata = ANDROID_APP_METADATA + + # Don't trigger __ne__. + assert not metadata_1 == android_metadata # pylint: disable=unneeded-not + assert metadata_1 != android_metadata + assert metadata_1 != metadata_2 + assert metadata_1 != metadata_3 + assert metadata_1 != metadata_4 + assert metadata_1 != metadata_5 + assert metadata_1 != metadata_6 + assert metadata_1 == metadata_7 + assert set([metadata_1, metadata_2, metadata_7]) == set([metadata_1, metadata_2]) + + def test_ios_app_metadata_bundle_id(self): + assert IOS_APP_METADATA.bundle_id == 'com.hello.world.ios' + + def test_ios_app_metadata_name(self): + assert IOS_APP_METADATA._name == 'projects/test-project-id/iosApps/1:12345678:ios:ca5cade5' + + def test_ios_app_metadata_app_id(self): + assert IOS_APP_METADATA.app_id == '1:12345678:android:deadbeef' + + def test_ios_app_metadata_display_name(self): + assert IOS_APP_METADATA.display_name == 'My iOS App' + + def test_ios_app_metadata_project_id(self): + assert IOS_APP_METADATA.project_id == 'test-project-id' + + +class TestSHACertificate: + def test_create_sha_certificate_errors(self): + # sha_hash cannot be None. + with pytest.raises(ValueError): + project_management.SHACertificate(sha_hash=None) + # sha_hash must be a string. + with pytest.raises(ValueError): + project_management.SHACertificate(sha_hash=0x123456789a123456789a123456789a123456789a) + # sha_hash must be a valid SHA-1 or SHA-256 hash. + with pytest.raises(ValueError): + project_management.SHACertificate(sha_hash='123456789a123456789') + with pytest.raises(ValueError): + project_management.SHACertificate(sha_hash='123456789a123456789a123456789a123456oops') + + def test_sha_certificate_eq(self): + sha_cert_1 = project_management.SHACertificate( + '123456789a123456789a123456789a123456789a', + 'projects/-/androidApps/1:12345678:android:deadbeef/sha/name1') + # sha_hash is different from sha_cert_1, but name is the same. + sha_cert_2 = project_management.SHACertificate( + '0000000000000000000000000000000000000000', + 'projects/-/androidApps/1:12345678:android:deadbeef/sha/name1') + # name is different from sha_cert_1, but sha_hash is the same. + sha_cert_3 = project_management.SHACertificate( + '123456789a123456789a123456789a123456789a', None) + # name is different from sha_cert_1, but sha_hash is the same. + sha_cert_4 = project_management.SHACertificate( + '123456789a123456789a123456789a123456789a', 'projects/-/androidApps/{0}/sha/notname1') + # sha_hash and cert_type are different from sha_cert_1, but name is the same. + sha_cert_5 = project_management.SHACertificate( + '123456789a123456789a123456789a123456789a123456789a123456789a1234', + 'projects/-/androidApps/{0}/sha/name1') + # Exactly the same as sha_cert_1. + sha_cert_6 = project_management.SHACertificate( + '123456789a123456789a123456789a123456789a', + 'projects/-/androidApps/1:12345678:android:deadbeef/sha/name1') + not_a_sha_cert = { + 'name': 'projects/-/androidApps/1:12345678:android:deadbeef/sha/name1', + 'sha_hash': '123456789a123456789a123456789a123456789a', + 'cert_type': 'SHA_1', + } + + assert sha_cert_1 != sha_cert_2 + assert sha_cert_1 != sha_cert_3 + assert sha_cert_1 != sha_cert_4 + assert sha_cert_1 != sha_cert_5 + assert sha_cert_1 == sha_cert_6 + # Don't trigger __ne__. + assert not sha_cert_1 == not_a_sha_cert # pylint: disable=unneeded-not + assert sha_cert_1 != not_a_sha_cert + + def test_sha_certificate_name(self): + assert (SHA_1_CERTIFICATE.name == + 'projects/-/androidApps/1:12345678:android:deadbeef/sha/name1') + assert (SHA_256_CERTIFICATE.name == + 'projects/-/androidApps/1:12345678:android:deadbeef/sha/name256') + + def test_sha_certificate_sha_hash(self): + assert (SHA_1_CERTIFICATE.sha_hash == + '123456789a123456789a123456789a123456789a') + assert (SHA_256_CERTIFICATE.sha_hash == + '123456789a123456789a123456789a123456789a123456789a123456789a1234') + + def test_sha_certificate_cert_type(self): + assert SHA_1_CERTIFICATE.cert_type == 'SHA_1' + assert SHA_256_CERTIFICATE.cert_type == 'SHA_256' + + +class BaseProjectManagementTest: + @classmethod + def setup_class(cls): + project_management._ProjectManagementService.POLL_BASE_WAIT_TIME_SECONDS = 0.01 + project_management._ProjectManagementService.MAXIMUM_POLLING_ATTEMPTS = 3 + firebase_admin.initialize_app( + testutils.MockCredential(), {'projectId': 'test-project-id'}) + + @classmethod + def teardown_class(cls): + testutils.cleanup_apps() + project_management._ProjectManagementService.POLL_BASE_WAIT_TIME_SECONDS = 0.5 + + def _instrument_service(self, statuses, responses, app=None): + if not app: + app = firebase_admin.get_app() + project_management_service = project_management._get_project_management_service(app) + recorder = [] + project_management_service._client.session.mount( + 'https://firebase.googleapis.com', + testutils.MockMultiRequestAdapter(responses, statuses, recorder)) + return recorder + + def _assert_request_is_correct( + self, request, expected_method, expected_url, expected_body=None): + assert request.method == expected_method + assert request.url == expected_url + assert request.headers['X-Client-Version'] == f'Python/Admin/{firebase_admin.__version__}' + expected_metrics_header = _utils.get_metrics_header() + ' mock-cred-metric-tag' + assert request.headers['x-goog-api-client'] == expected_metrics_header + if expected_body is None: + assert request.body is None + else: + assert json.loads(request.body.decode()) == expected_body + + +class TestTimeout(BaseProjectManagementTest): + + def test_default_timeout(self): + app = firebase_admin.get_app() + project_management_service = project_management._get_project_management_service(app) + assert project_management_service._client.timeout == _http_client.DEFAULT_TIMEOUT_SECONDS + + @pytest.mark.parametrize('timeout', [4, None]) + def test_custom_timeout(self, timeout): + options = { + 'httpTimeout': timeout, + 'projectId': 'test-project-id' + } + app = firebase_admin.initialize_app( + testutils.MockCredential(), options, f'timeout-{timeout}') + project_management_service = project_management._get_project_management_service(app) + assert project_management_service._client.timeout == timeout + + +class TestCreateAndroidApp(BaseProjectManagementTest): + _CREATION_URL = 'https://firebase.googleapis.com/v1beta1/projects/test-project-id/androidApps' + + def test_create_android_app_without_display_name(self): + recorder = self._instrument_service( + statuses=[200, 200, 200], + responses=[ + OPERATION_IN_PROGRESS_RESPONSE, # Request to create Android app asynchronously. + OPERATION_IN_PROGRESS_RESPONSE, # Creation operation is still not done. + ANDROID_APP_NO_DISPLAY_NAME_OPERATION_SUCCESSFUL_RESPONSE, # Operation completed. + ]) + + android_app = project_management.create_android_app( + package_name='com.hello.world.android') + + assert android_app.app_id == '1:12345678:android:deadbeef' + assert len(recorder) == 3 + body = {'packageName': 'com.hello.world.android'} + self._assert_request_is_correct( + recorder[0], 'POST', TestCreateAndroidApp._CREATION_URL, body) + self._assert_request_is_correct( + recorder[1], 'GET', 'https://firebase.googleapis.com/v1/operations/abcdefg') + self._assert_request_is_correct( + recorder[2], 'GET', 'https://firebase.googleapis.com/v1/operations/abcdefg') + + def test_create_android_app(self): + recorder = self._instrument_service( + statuses=[200, 200, 200], + responses=[ + OPERATION_IN_PROGRESS_RESPONSE, # Request to create Android app asynchronously. + OPERATION_IN_PROGRESS_RESPONSE, # Creation operation is still not done. + ANDROID_APP_OPERATION_SUCCESSFUL_RESPONSE, # Creation operation completed. + ]) + + android_app = project_management.create_android_app( + package_name='com.hello.world.android', + display_name='My Android App') + + assert android_app.app_id == '1:12345678:android:deadbeef' + assert len(recorder) == 3 + body = { + 'packageName': 'com.hello.world.android', + 'displayName': 'My Android App', + } + self._assert_request_is_correct( + recorder[0], 'POST', TestCreateAndroidApp._CREATION_URL, body) + self._assert_request_is_correct( + recorder[1], 'GET', 'https://firebase.googleapis.com/v1/operations/abcdefg') + self._assert_request_is_correct( + recorder[2], 'GET', 'https://firebase.googleapis.com/v1/operations/abcdefg') + + def test_create_android_app_already_exists(self): + recorder = self._instrument_service(statuses=[409], responses=[ALREADY_EXISTS_RESPONSE]) + + with pytest.raises(exceptions.AlreadyExistsError) as excinfo: + project_management.create_android_app( + package_name='com.hello.world.android', + display_name='My Android App') + + assert 'The resource already exists' in str(excinfo.value) + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + assert len(recorder) == 1 + + def test_create_android_app_polling_rpc_error(self): + recorder = self._instrument_service( + statuses=[200, 200, 503], # Error 503 means that backend servers are over capacity. + responses=[ + OPERATION_IN_PROGRESS_RESPONSE, # Request to create Android app asynchronously. + OPERATION_IN_PROGRESS_RESPONSE, # Creation operation is still not done. + UNAVAILABLE_RESPONSE, # Error 503. + ]) + + with pytest.raises(exceptions.UnavailableError) as excinfo: + project_management.create_android_app( + package_name='com.hello.world.android', + display_name='My Android App') + + assert 'Backend servers are over capacity' in str(excinfo.value) + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + assert len(recorder) == 3 + + def test_create_android_app_polling_failure(self): + recorder = self._instrument_service( + statuses=[200, 200, 200], + responses=[ + OPERATION_IN_PROGRESS_RESPONSE, # Request to create Android app asynchronously. + OPERATION_IN_PROGRESS_RESPONSE, # Creation operation is still not done. + OPERATION_FAILED_RESPONSE, # Operation is finished, but terminated with an error. + ]) + + with pytest.raises(exceptions.UnknownError) as excinfo: + project_management.create_android_app( + package_name='com.hello.world.android', + display_name='My Android App') + + assert 'Polling finished, but the operation terminated in an error' in str(excinfo.value) + assert excinfo.value.cause is None + assert excinfo.value.http_response is not None + assert len(recorder) == 3 + + def test_create_android_app_polling_limit_exceeded(self): + project_management._ProjectManagementService.MAXIMUM_POLLING_ATTEMPTS = 2 + recorder = self._instrument_service( + statuses=[200, 200, 200], + responses=[ + OPERATION_IN_PROGRESS_RESPONSE, # Request to create Android app asynchronously. + OPERATION_IN_PROGRESS_RESPONSE, # Creation Operation is still not done. + OPERATION_IN_PROGRESS_RESPONSE, # Creation Operation is still not done. + ]) + + with pytest.raises(exceptions.DeadlineExceededError) as excinfo: + project_management.create_android_app( + package_name='com.hello.world.android', + display_name='My Android App') + + assert 'Polling deadline exceeded' in str(excinfo.value) + assert excinfo.value.cause is None + assert len(recorder) == 3 + + +class TestCreateIOSApp(BaseProjectManagementTest): + _CREATION_URL = 'https://firebase.googleapis.com/v1beta1/projects/test-project-id/iosApps' + + def test_create_ios_app_without_display_name(self): + recorder = self._instrument_service( + statuses=[200, 200, 200], + responses=[ + OPERATION_IN_PROGRESS_RESPONSE, # Request to create iOS app asynchronously. + OPERATION_IN_PROGRESS_RESPONSE, # Creation operation is still not done. + IOS_APP_NO_DISPLAY_NAME_OPERATION_SUCCESSFUL_RESPONSE, # Operation completed. + ]) + + ios_app = project_management.create_ios_app( + bundle_id='com.hello.world.ios') + + assert ios_app.app_id == '1:12345678:ios:ca5cade5' + assert len(recorder) == 3 + body = {'bundleId': 'com.hello.world.ios'} + self._assert_request_is_correct(recorder[0], 'POST', TestCreateIOSApp._CREATION_URL, body) + self._assert_request_is_correct( + recorder[1], 'GET', 'https://firebase.googleapis.com/v1/operations/abcdefg') + self._assert_request_is_correct( + recorder[2], 'GET', 'https://firebase.googleapis.com/v1/operations/abcdefg') + + def test_create_ios_app(self): + recorder = self._instrument_service( + statuses=[200, 200, 200], + responses=[ + OPERATION_IN_PROGRESS_RESPONSE, # Request to create iOS app asynchronously. + OPERATION_IN_PROGRESS_RESPONSE, # Creation operation is still not done. + IOS_APP_OPERATION_SUCCESSFUL_RESPONSE, # Creation operation completed. + ]) + + ios_app = project_management.create_ios_app( + bundle_id='com.hello.world.ios', + display_name='My iOS App') + + assert ios_app.app_id == '1:12345678:ios:ca5cade5' + assert len(recorder) == 3 + body = { + 'bundleId': 'com.hello.world.ios', + 'displayName': 'My iOS App', + } + self._assert_request_is_correct(recorder[0], 'POST', TestCreateIOSApp._CREATION_URL, body) + self._assert_request_is_correct( + recorder[1], 'GET', 'https://firebase.googleapis.com/v1/operations/abcdefg') + self._assert_request_is_correct( + recorder[2], 'GET', 'https://firebase.googleapis.com/v1/operations/abcdefg') + + def test_create_ios_app_already_exists(self): + recorder = self._instrument_service(statuses=[409], responses=[ALREADY_EXISTS_RESPONSE]) + + with pytest.raises(exceptions.AlreadyExistsError) as excinfo: + project_management.create_ios_app( + bundle_id='com.hello.world.ios', + display_name='My iOS App') + + assert 'The resource already exists' in str(excinfo.value) + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + assert len(recorder) == 1 + + def test_create_ios_app_polling_rpc_error(self): + recorder = self._instrument_service( + statuses=[200, 200, 503], # Error 503 means that backend servers are over capacity. + responses=[ + OPERATION_IN_PROGRESS_RESPONSE, # Request to create iOS app asynchronously. + OPERATION_IN_PROGRESS_RESPONSE, # Creation operation is still not done. + UNAVAILABLE_RESPONSE, # Error 503. + ]) + + with pytest.raises(exceptions.UnavailableError) as excinfo: + project_management.create_ios_app( + bundle_id='com.hello.world.ios', + display_name='My iOS App') + + assert 'Backend servers are over capacity' in str(excinfo.value) + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + assert len(recorder) == 3 + + def test_create_ios_app_polling_failure(self): + recorder = self._instrument_service( + statuses=[200, 200, 200], + responses=[ + OPERATION_IN_PROGRESS_RESPONSE, # Request to create iOS app asynchronously. + OPERATION_IN_PROGRESS_RESPONSE, # Creation operation is still not done. + OPERATION_FAILED_RESPONSE, # Operation is finished, but terminated with an error. + ]) + + with pytest.raises(exceptions.UnknownError) as excinfo: + project_management.create_ios_app( + bundle_id='com.hello.world.ios', + display_name='My iOS App') + + assert 'Polling finished, but the operation terminated in an error' in str(excinfo.value) + assert excinfo.value.cause is None + assert excinfo.value.http_response is not None + assert len(recorder) == 3 + + def test_create_ios_app_polling_limit_exceeded(self): + project_management._ProjectManagementService.MAXIMUM_POLLING_ATTEMPTS = 2 + recorder = self._instrument_service( + statuses=[200, 200, 200], + responses=[ + OPERATION_IN_PROGRESS_RESPONSE, # Request to create iOS app asynchronously. + OPERATION_IN_PROGRESS_RESPONSE, # Creation Operation is still not done. + OPERATION_IN_PROGRESS_RESPONSE, # Creation Operation is still not done. + ]) + + with pytest.raises(exceptions.DeadlineExceededError) as excinfo: + project_management.create_ios_app( + bundle_id='com.hello.world.ios', + display_name='My iOS App') + + assert 'Polling deadline exceeded' in str(excinfo.value) + assert excinfo.value.cause is None + assert len(recorder) == 3 + + +class TestListAndroidApps(BaseProjectManagementTest): + _LISTING_URL = ('https://firebase.googleapis.com/v1beta1/projects/test-project-id/' + 'androidApps?pageSize=100') + _LISTING_PAGE_2_URL = ('https://firebase.googleapis.com/v1beta1/projects/test-project-id/' + 'androidApps?pageToken=nextpagetoken&pageSize=100') + + def test_list_android_apps(self): + recorder = self._instrument_service(statuses=[200], responses=[LIST_ANDROID_APPS_RESPONSE]) + + android_apps = project_management.list_android_apps() + + expected_app_ids = set(['1:12345678:android:deadbeef', '1:12345678:android:deadbeefcafe']) + assert set(app.app_id for app in android_apps) == expected_app_ids + assert len(recorder) == 1 + self._assert_request_is_correct(recorder[0], 'GET', TestListAndroidApps._LISTING_URL) + + def test_list_android_apps_rpc_error(self): + recorder = self._instrument_service(statuses=[503], responses=[UNAVAILABLE_RESPONSE]) + + with pytest.raises(exceptions.UnavailableError) as excinfo: + project_management.list_android_apps() + + assert 'Backend servers are over capacity' in str(excinfo.value) + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + assert len(recorder) == 1 + + def test_list_android_apps_empty_list(self): + recorder = self._instrument_service(statuses=[200], responses=[json.dumps({})]) + + android_apps = project_management.list_android_apps() + + assert android_apps == [] + assert len(recorder) == 1 + self._assert_request_is_correct(recorder[0], 'GET', TestListAndroidApps._LISTING_URL) + + def test_list_android_apps_multiple_pages(self): + recorder = self._instrument_service( + statuses=[200, 200], + responses=[LIST_ANDROID_APPS_PAGE_1_RESPONSE, LIST_ANDROID_APPS_PAGE_2_RESPONSE]) + + android_apps = project_management.list_android_apps() + + expected_app_ids = set(['1:12345678:android:deadbeef', '1:12345678:android:deadbeefcafe']) + assert set(app.app_id for app in android_apps) == expected_app_ids + assert len(recorder) == 2 + self._assert_request_is_correct(recorder[0], 'GET', TestListAndroidApps._LISTING_URL) + self._assert_request_is_correct(recorder[1], 'GET', TestListAndroidApps._LISTING_PAGE_2_URL) + + def test_list_android_apps_multiple_pages_rpc_error(self): + recorder = self._instrument_service( + statuses=[200, 503], + responses=[LIST_ANDROID_APPS_PAGE_1_RESPONSE, UNAVAILABLE_RESPONSE]) + + with pytest.raises(exceptions.UnavailableError) as excinfo: + project_management.list_android_apps() + + assert 'Backend servers are over capacity' in str(excinfo.value) + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + assert len(recorder) == 2 + + +class TestListIOSApps(BaseProjectManagementTest): + _LISTING_URL = ('https://firebase.googleapis.com/v1beta1/projects/test-project-id/' + 'iosApps?pageSize=100') + _LISTING_PAGE_2_URL = ('https://firebase.googleapis.com/v1beta1/projects/test-project-id/' + 'iosApps?pageToken=nextpagetoken&pageSize=100') + + def test_list_ios_apps(self): + recorder = self._instrument_service(statuses=[200], responses=[LIST_IOS_APPS_RESPONSE]) + + ios_apps = project_management.list_ios_apps() + + expected_app_ids = set(['1:12345678:ios:ca5cade5', '1:12345678:ios:ca5cade5cafe']) + assert set(app.app_id for app in ios_apps) == expected_app_ids + assert len(recorder) == 1 + self._assert_request_is_correct(recorder[0], 'GET', TestListIOSApps._LISTING_URL) + + def test_list_ios_apps_rpc_error(self): + recorder = self._instrument_service(statuses=[503], responses=[UNAVAILABLE_RESPONSE]) + + with pytest.raises(exceptions.UnavailableError) as excinfo: + project_management.list_ios_apps() + + assert 'Backend servers are over capacity' in str(excinfo.value) + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + assert len(recorder) == 1 + + def test_list_ios_apps_empty_list(self): + recorder = self._instrument_service(statuses=[200], responses=[json.dumps({})]) + + ios_apps = project_management.list_ios_apps() + + assert ios_apps == [] + assert len(recorder) == 1 + self._assert_request_is_correct(recorder[0], 'GET', TestListIOSApps._LISTING_URL) + + def test_list_ios_apps_multiple_pages(self): + recorder = self._instrument_service( + statuses=[200, 200], + responses=[LIST_IOS_APPS_PAGE_1_RESPONSE, LIST_IOS_APPS_PAGE_2_RESPONSE]) + + ios_apps = project_management.list_ios_apps() + + expected_app_ids = set(['1:12345678:ios:ca5cade5', '1:12345678:ios:ca5cade5cafe']) + assert set(app.app_id for app in ios_apps) == expected_app_ids + assert len(recorder) == 2 + self._assert_request_is_correct(recorder[0], 'GET', TestListIOSApps._LISTING_URL) + self._assert_request_is_correct(recorder[1], 'GET', TestListIOSApps._LISTING_PAGE_2_URL) + + def test_list_ios_apps_multiple_pages_rpc_error(self): + recorder = self._instrument_service( + statuses=[200, 503], + responses=[LIST_IOS_APPS_PAGE_1_RESPONSE, UNAVAILABLE_RESPONSE]) + + with pytest.raises(exceptions.UnavailableError) as excinfo: + project_management.list_ios_apps() + + assert 'Backend servers are over capacity' in str(excinfo.value) + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + assert len(recorder) == 2 + + +class TestAndroidApp(BaseProjectManagementTest): + _GET_METADATA_URL = ('https://firebase.googleapis.com/v1beta1/projects/-/androidApps/' + '1:12345678:android:deadbeef') + _SET_DISPLAY_NAME_URL = ('https://firebase.googleapis.com/v1beta1/projects/-/androidApps/' + '1:12345678:android:deadbeef?updateMask=displayName') + _GET_CONFIG_URL = ('https://firebase.googleapis.com/v1beta1/projects/-/androidApps/' + '1:12345678:android:deadbeef/config') + _ADD_CERT_URL = ('https://firebase.googleapis.com/v1beta1/projects/-/androidApps/' + '1:12345678:android:deadbeef/sha') + _LIST_CERTS_URL = ('https://firebase.googleapis.com/v1beta1/projects/-/androidApps/' + '1:12345678:android:deadbeef/sha') + _DELETE_SHA_1_CERT_URL = ('https://firebase.googleapis.com/v1beta1/projects/-/androidApps/' + '1:12345678:android:deadbeef/sha/name1') + _DELETE_SHA_256_CERT_URL = ('https://firebase.googleapis.com/v1beta1/projects/-/androidApps/' + '1:12345678:android:deadbeef/sha/name256') + + @pytest.fixture + def android_app(self): + return project_management.android_app('1:12345678:android:deadbeef') + + def test_get_metadata_no_display_name(self, android_app): + recorder = self._instrument_service( + statuses=[200], responses=[ANDROID_APP_NO_DISPLAY_NAME_METADATA_RESPONSE]) + + metadata = android_app.get_metadata() + + assert metadata._name == 'projects/test-project-id/androidApps/1:12345678:android:deadbeef' + assert metadata.app_id == '1:12345678:android:deadbeef' + assert metadata.display_name is None + assert metadata.project_id == 'test-project-id' + assert metadata.package_name == 'com.hello.world.android' + assert len(recorder) == 1 + self._assert_request_is_correct(recorder[0], 'GET', TestAndroidApp._GET_METADATA_URL) + + def test_get_metadata(self, android_app): + recorder = self._instrument_service( + statuses=[200], responses=[ANDROID_APP_METADATA_RESPONSE]) + + metadata = android_app.get_metadata() + + assert metadata._name == 'projects/test-project-id/androidApps/1:12345678:android:deadbeef' + assert metadata.app_id == '1:12345678:android:deadbeef' + assert metadata.display_name == 'My Android App' + assert metadata.project_id == 'test-project-id' + assert metadata.package_name == 'com.hello.world.android' + assert len(recorder) == 1 + self._assert_request_is_correct(recorder[0], 'GET', TestAndroidApp._GET_METADATA_URL) + + def test_get_metadata_unknown_error(self, android_app): + recorder = self._instrument_service( + statuses=[428], responses=['precondition required error']) + + with pytest.raises(exceptions.UnknownError) as excinfo: + android_app.get_metadata() + + message = 'Unexpected HTTP response with status: 428; body: precondition required error' + assert str(excinfo.value) == message + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + assert len(recorder) == 1 + + def test_get_metadata_not_found(self, android_app): + recorder = self._instrument_service(statuses=[404], responses=[NOT_FOUND_RESPONSE]) + + with pytest.raises(exceptions.NotFoundError) as excinfo: + android_app.get_metadata() + + assert 'Failed to find the resource' in str(excinfo.value) + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + assert len(recorder) == 1 + + def test_set_display_name(self, android_app): + recorder = self._instrument_service(statuses=[200], responses=[json.dumps({})]) + new_display_name = 'A new display name!' + + android_app.set_display_name(new_display_name) + + assert len(recorder) == 1 + body = {'displayName': new_display_name} + self._assert_request_is_correct( + recorder[0], 'PATCH', TestAndroidApp._SET_DISPLAY_NAME_URL, body) + + def test_set_display_name_not_found(self, android_app): + recorder = self._instrument_service(statuses=[404], responses=[NOT_FOUND_RESPONSE]) + new_display_name = 'A new display name!' + + with pytest.raises(exceptions.NotFoundError) as excinfo: + android_app.set_display_name(new_display_name) + + assert 'Failed to find the resource' in str(excinfo.value) + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + assert len(recorder) == 1 + + def test_get_config(self, android_app): + recorder = self._instrument_service(statuses=[200], responses=[TEST_APP_CONFIG_RESPONSE]) + + config = android_app.get_config() + + assert config == 'hello world' + assert len(recorder) == 1 + self._assert_request_is_correct(recorder[0], 'GET', TestAndroidApp._GET_CONFIG_URL) + + def test_get_config_not_found(self, android_app): + recorder = self._instrument_service(statuses=[404], responses=[NOT_FOUND_RESPONSE]) + + with pytest.raises(exceptions.NotFoundError) as excinfo: + android_app.get_config() + + assert 'Failed to find the resource' in str(excinfo.value) + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + assert len(recorder) == 1 + + def test_get_sha_certificates(self, android_app): + recorder = self._instrument_service( + statuses=[200], responses=[GET_SHA_CERTIFICATES_RESPONSE]) + + certs = android_app.get_sha_certificates() + + assert set(certs) == set([SHA_1_CERTIFICATE, SHA_256_CERTIFICATE]) + assert len(recorder) == 1 + self._assert_request_is_correct(recorder[0], 'GET', TestAndroidApp._LIST_CERTS_URL) + + def test_get_sha_certificates_not_found(self, android_app): + recorder = self._instrument_service(statuses=[404], responses=[NOT_FOUND_RESPONSE]) + + with pytest.raises(exceptions.NotFoundError) as excinfo: + android_app.get_sha_certificates() + + assert 'Failed to find the resource' in str(excinfo.value) + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + assert len(recorder) == 1 + + def test_add_certificate_none_error(self, android_app): + with pytest.raises(ValueError): + android_app.add_sha_certificate(None) + + def test_add_sha_1_certificate(self, android_app): + recorder = self._instrument_service(statuses=[200], responses=[json.dumps({})]) + + android_app.add_sha_certificate( + project_management.SHACertificate('123456789a123456789a123456789a123456789a')) + + assert len(recorder) == 1 + body = {'shaHash': '123456789a123456789a123456789a123456789a', 'certType': 'SHA_1'} + self._assert_request_is_correct(recorder[0], 'POST', TestAndroidApp._ADD_CERT_URL, body) + + def test_add_sha_256_certificate(self, android_app): + recorder = self._instrument_service(statuses=[200], responses=[json.dumps({})]) + + android_app.add_sha_certificate(project_management.SHACertificate( + '123456789a123456789a123456789a123456789a123456789a123456789a1234')) + + assert len(recorder) == 1 + body = { + 'shaHash': '123456789a123456789a123456789a123456789a123456789a123456789a1234', + 'certType': 'SHA_256', + } + self._assert_request_is_correct(recorder[0], 'POST', TestAndroidApp._ADD_CERT_URL, body) + + def test_add_sha_certificates_already_exists(self, android_app): + recorder = self._instrument_service(statuses=[409], responses=[ALREADY_EXISTS_RESPONSE]) + + with pytest.raises(exceptions.AlreadyExistsError) as excinfo: + android_app.add_sha_certificate( + project_management.SHACertificate('123456789a123456789a123456789a123456789a')) + + assert 'The resource already exists' in str(excinfo.value) + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + assert len(recorder) == 1 + + def test_delete_certificate_none_error(self, android_app): + with pytest.raises(ValueError): + android_app.delete_sha_certificate(None) + + def test_delete_sha_1_certificate(self, android_app): + recorder = self._instrument_service(statuses=[200], responses=[json.dumps({})]) + + android_app.delete_sha_certificate(SHA_1_CERTIFICATE) + + assert len(recorder) == 1 + self._assert_request_is_correct( + recorder[0], 'DELETE', TestAndroidApp._DELETE_SHA_1_CERT_URL) + + def test_delete_sha_256_certificate(self, android_app): + recorder = self._instrument_service(statuses=[200], responses=[json.dumps({})]) + + android_app.delete_sha_certificate(SHA_256_CERTIFICATE) + + assert len(recorder) == 1 + self._assert_request_is_correct( + recorder[0], 'DELETE', TestAndroidApp._DELETE_SHA_256_CERT_URL) + + def test_delete_sha_certificates_not_found(self, android_app): + recorder = self._instrument_service(statuses=[404], responses=[NOT_FOUND_RESPONSE]) + + with pytest.raises(exceptions.NotFoundError) as excinfo: + android_app.delete_sha_certificate(SHA_1_CERTIFICATE) + + assert 'Failed to find the resource' in str(excinfo.value) + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + assert len(recorder) == 1 + + def test_raises_if_app_has_no_project_id(self): + def evaluate(): + app = firebase_admin.initialize_app(testutils.MockCredential(), name='no_project_id') + + with pytest.raises(ValueError): + project_management.android_app(app_id='1:12345678:android:deadbeef', app=app) + + testutils.run_without_project_id(evaluate) + + +class TestIOSApp(BaseProjectManagementTest): + _GET_METADATA_URL = ('https://firebase.googleapis.com/v1beta1/projects/-/iosApps/' + '1:12345678:ios:ca5cade5') + _SET_DISPLAY_NAME_URL = ('https://firebase.googleapis.com/v1beta1/projects/-/iosApps/' + '1:12345678:ios:ca5cade5?updateMask=displayName') + _GET_CONFIG_URL = ('https://firebase.googleapis.com/v1beta1/projects/-/iosApps/' + '1:12345678:ios:ca5cade5/config') + + @pytest.fixture + def ios_app(self): + return project_management.ios_app('1:12345678:ios:ca5cade5') + + def test_get_metadata_no_display_name(self, ios_app): + recorder = self._instrument_service( + statuses=[200], responses=[IOS_APP_NO_DISPLAY_NAME_METADATA_RESPONSE]) + + metadata = ios_app.get_metadata() + + assert metadata._name == 'projects/test-project-id/iosApps/1:12345678:ios:ca5cade5' + assert metadata.app_id == '1:12345678:ios:ca5cade5' + assert metadata.display_name is None + assert metadata.project_id == 'test-project-id' + assert metadata.bundle_id == 'com.hello.world.ios' + assert len(recorder) == 1 + self._assert_request_is_correct(recorder[0], 'GET', TestIOSApp._GET_METADATA_URL) + + def test_get_metadata(self, ios_app): + recorder = self._instrument_service(statuses=[200], responses=[IOS_APP_METADATA_RESPONSE]) + + metadata = ios_app.get_metadata() + + assert metadata._name == 'projects/test-project-id/iosApps/1:12345678:ios:ca5cade5' + assert metadata.app_id == '1:12345678:ios:ca5cade5' + assert metadata.display_name == 'My iOS App' + assert metadata.project_id == 'test-project-id' + assert metadata.bundle_id == 'com.hello.world.ios' + assert len(recorder) == 1 + self._assert_request_is_correct(recorder[0], 'GET', TestIOSApp._GET_METADATA_URL) + + def test_get_metadata_unknown_error(self, ios_app): + recorder = self._instrument_service( + statuses=[428], responses=['precondition required error']) + + with pytest.raises(exceptions.UnknownError) as excinfo: + ios_app.get_metadata() + + message = 'Unexpected HTTP response with status: 428; body: precondition required error' + assert str(excinfo.value) == message + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + assert len(recorder) == 1 + + def test_get_metadata_not_found(self, ios_app): + recorder = self._instrument_service(statuses=[404], responses=[NOT_FOUND_RESPONSE]) + + with pytest.raises(exceptions.NotFoundError) as excinfo: + ios_app.get_metadata() + + assert 'Failed to find the resource' in str(excinfo.value) + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + assert len(recorder) == 1 + + def test_set_display_name(self, ios_app): + recorder = self._instrument_service(statuses=[200], responses=[json.dumps({})]) + new_display_name = 'A new display name!' + + ios_app.set_display_name(new_display_name) + + assert len(recorder) == 1 + body = {'displayName': new_display_name} + self._assert_request_is_correct( + recorder[0], 'PATCH', TestIOSApp._SET_DISPLAY_NAME_URL, body) + + def test_set_display_name_not_found(self, ios_app): + recorder = self._instrument_service(statuses=[404], responses=[NOT_FOUND_RESPONSE]) + new_display_name = 'A new display name!' + + with pytest.raises(exceptions.NotFoundError) as excinfo: + ios_app.set_display_name(new_display_name) + + assert 'Failed to find the resource' in str(excinfo.value) + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + assert len(recorder) == 1 + + def test_get_config(self, ios_app): + recorder = self._instrument_service(statuses=[200], responses=[TEST_APP_CONFIG_RESPONSE]) + + config = ios_app.get_config() + + assert config == 'hello world' + assert len(recorder) == 1 + self._assert_request_is_correct(recorder[0], 'GET', TestIOSApp._GET_CONFIG_URL) + + def test_get_config_not_found(self, ios_app): + recorder = self._instrument_service(statuses=[404], responses=[NOT_FOUND_RESPONSE]) + + with pytest.raises(exceptions.NotFoundError) as excinfo: + ios_app.get_config() + + assert 'Failed to find the resource' in str(excinfo.value) + assert excinfo.value.cause is not None + assert excinfo.value.http_response is not None + assert len(recorder) == 1 + + def test_raises_if_app_has_no_project_id(self): + def evaluate(): + app = firebase_admin.initialize_app(testutils.MockCredential(), name='no_project_id') + + with pytest.raises(ValueError): + project_management.ios_app(app_id='1:12345678:ios:ca5cade5', app=app) + + testutils.run_without_project_id(evaluate) diff --git a/tests/test_remote_config.py b/tests/test_remote_config.py new file mode 100644 index 000000000..7bbf9721d --- /dev/null +++ b/tests/test_remote_config.py @@ -0,0 +1,984 @@ +# Copyright 2024 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for firebase_admin.remote_config.""" +import json +import uuid +import pytest +import firebase_admin +from firebase_admin.remote_config import ( + CustomSignalOperator, + PercentConditionOperator, + _REMOTE_CONFIG_ATTRIBUTE, + _RemoteConfigService) +from firebase_admin import remote_config, _utils +from tests import testutils + +VERSION_INFO = { + 'versionNumber': '86', + 'updateOrigin': 'ADMIN_SDK_PYTHON', + 'updateType': 'INCREMENTAL_UPDATE', + 'updateUser': { + 'email': 'firebase-adminsdk@gserviceaccount.com' + }, + 'description': 'production version', + 'updateTime': '2024-11-05T16:45:03.541527Z' + } + +SERVER_REMOTE_CONFIG_RESPONSE = { + 'conditions': [ + { + 'name': 'ios', + 'condition': { + 'orCondition': { + 'conditions': [ + { + 'andCondition': { + 'conditions': [ + {'true': {}} + ] + } + } + ] + } + } + }, + ], + 'parameters': { + 'holiday_promo_enabled': { + 'defaultValue': {'value': 'true'}, + 'conditionalValues': {'ios': {'useInAppDefault': 'true'}} + }, + }, + 'parameterGroups': '', + 'etag': 'etag-123456789012-5', + 'version': VERSION_INFO, + } + +SEMENTIC_VERSION_LESS_THAN_TRUE = [ + CustomSignalOperator.SEMANTIC_VERSION_LESS_THAN.value, ['12.1.3.444'], '12.1.3.443', True] +SEMENTIC_VERSION_EQUAL_TRUE = [ + CustomSignalOperator.SEMANTIC_VERSION_EQUAL.value, ['12.1.3.444'], '12.1.3.444', True] +SEMANTIC_VERSION_GREATER_THAN_FALSE = [ + CustomSignalOperator.SEMANTIC_VERSION_LESS_THAN.value, ['12.1.3.4'], '12.1.3.4', False] +SEMANTIC_VERSION_INVALID_FORMAT_STRING = [ + CustomSignalOperator.SEMANTIC_VERSION_LESS_THAN.value, ['12.1.3.444'], '12.1.3.abc', False] +SEMANTIC_VERSION_INVALID_FORMAT_NEGATIVE_INTEGER = [ + CustomSignalOperator.SEMANTIC_VERSION_LESS_THAN.value, ['12.1.3.444'], '12.1.3.-2', False] + +class TestEvaluate: + @classmethod + def setup_class(cls): + cred = testutils.MockCredential() + firebase_admin.initialize_app(cred, {'projectId': 'project-id'}) + + @classmethod + def teardown_class(cls): + testutils.cleanup_apps() + + def test_evaluate_or_and_true_condition_true(self): + app = firebase_admin.get_app() + default_config = {'param1': 'in_app_default_param1', 'param3': 'in_app_default_param3'} + condition = { + 'name': 'is_true', + 'condition': { + 'orCondition': { + 'conditions': [ + { + 'andCondition': { + 'conditions': [ + { + 'name': '', + 'true': { + } + } + ] + } + } + ] + } + } + } + template_data = { + 'conditions': [condition], + 'parameters': { + 'is_enabled': { + 'defaultValue': {'value': 'false'}, + 'conditionalValues': {'is_true': {'value': 'true'}} + }, + }, + 'parameterGroups': '', + 'version': '', + 'etag': 'etag' + } + server_template = remote_config.init_server_template( + app=app, + default_config=default_config, + template_data_json=json.dumps(template_data) + ) + + server_config = server_template.evaluate() + assert server_config.get_boolean('is_enabled') + assert server_config.get_value_source('is_enabled') == 'remote' + + def test_evaluate_or_and_false_condition_false(self): + app = firebase_admin.get_app() + default_config = {'param1': 'in_app_default_param1', 'param3': 'in_app_default_param3'} + condition = { + 'name': 'is_true', + 'condition': { + 'orCondition': { + 'conditions': [ + { + 'andCondition': { + 'conditions': [ + { + 'name': '', + 'false': { + } + } + ] + } + } + ] + } + } + } + template_data = { + 'conditions': [condition], + 'parameters': { + 'is_enabled': { + 'defaultValue': {'value': 'false'}, + 'conditionalValues': {'is_true': {'value': 'true'}} + }, + }, + 'parameterGroups': '', + 'version': '', + 'etag': 'etag' + } + server_template = remote_config.init_server_template( + app=app, + default_config=default_config, + template_data_json=json.dumps(template_data) + ) + + server_config = server_template.evaluate() + assert not server_config.get_boolean('is_enabled') + + def test_evaluate_non_or_condition(self): + app = firebase_admin.get_app() + default_config = {'param1': 'in_app_default_param1', 'param3': 'in_app_default_param3'} + condition = { + 'name': 'is_true', + 'condition': { + 'true': { + } + } + } + template_data = { + 'conditions': [condition], + 'parameters': { + 'is_enabled': { + 'defaultValue': {'value': 'false'}, + 'conditionalValues': {'is_true': {'value': 'true'}} + }, + }, + 'parameterGroups': '', + 'version': '', + 'etag': 'etag' + } + server_template = remote_config.init_server_template( + app=app, + default_config=default_config, + template_data_json=json.dumps(template_data) + ) + + server_config = server_template.evaluate() + assert server_config.get_boolean('is_enabled') + + def test_evaluate_return_conditional_values_honor_order(self): + app = firebase_admin.get_app() + default_config = {'param1': 'in_app_default_param1', 'param3': 'in_app_default_param3'} + template_data = { + 'conditions': [ + { + 'name': 'is_true', + 'condition': { + 'orCondition': { + 'conditions': [ + { + 'andCondition': { + 'conditions': [ + { + 'true': { + } + } + ] + } + } + ] + } + } + }, + { + 'name': 'is_true_too', + 'condition': { + 'orCondition': { + 'conditions': [ + { + 'andCondition': { + 'conditions': [ + { + 'true': { + } + } + ] + } + } + ] + } + } + } + ], + 'parameters': { + 'dog_type': { + 'defaultValue': {'value': 'chihuahua'}, + 'conditionalValues': { + 'is_true_too': {'value': 'dachshund'}, + 'is_true': {'value': 'corgi'} + } + }, + }, + 'parameterGroups':'', + 'version':'', + 'etag': 'etag' + } + server_template = remote_config.init_server_template( + app=app, + default_config=default_config, + template_data_json=json.dumps(template_data) + ) + server_config = server_template.evaluate() + assert server_config.get_string('dog_type') == 'corgi' + + def test_evaluate_default_when_no_param(self): + app = firebase_admin.get_app() + default_config = {'promo_enabled': False, 'promo_discount': '20',} + template_data = SERVER_REMOTE_CONFIG_RESPONSE + template_data['parameters'] = {} + server_template = remote_config.init_server_template( + app=app, + default_config=default_config, + template_data_json=json.dumps(template_data) + ) + server_config = server_template.evaluate() + assert server_config.get_boolean('promo_enabled') == default_config.get('promo_enabled') + assert server_config.get_int('promo_discount') == int(default_config.get('promo_discount')) + + def test_evaluate_default_when_no_default_value(self): + app = firebase_admin.get_app() + default_config = {'default_value': 'local default'} + template_data = SERVER_REMOTE_CONFIG_RESPONSE + template_data['parameters'] = { + 'default_value': {} + } + server_template = remote_config.init_server_template( + app=app, + default_config=default_config, + template_data_json=json.dumps(template_data) + ) + server_config = server_template.evaluate() + assert server_config.get_string('default_value') == default_config.get('default_value') + + def test_evaluate_default_when_in_default(self): + app = firebase_admin.get_app() + template_data = SERVER_REMOTE_CONFIG_RESPONSE + template_data['parameters'] = { + 'remote_default_value': {} + } + default_config = { + 'inapp_default': '🐕' + } + server_template = remote_config.init_server_template( + app=app, + default_config=default_config, + template_data_json=json.dumps(template_data) + ) + server_config = server_template.evaluate() + assert server_config.get_string('inapp_default') == default_config.get('inapp_default') + + def test_evaluate_default_when_defined(self): + app = firebase_admin.get_app() + template_data = SERVER_REMOTE_CONFIG_RESPONSE + template_data['parameters'] = {} + default_config = { + 'dog_type': 'shiba' + } + server_template = remote_config.init_server_template( + app=app, + default_config=default_config, + template_data_json=json.dumps(template_data) + ) + server_config = server_template.evaluate() + assert server_config.get_string('dog_type') == 'shiba' + + def test_evaluate_return_numeric_value(self): + app = firebase_admin.get_app() + template_data = SERVER_REMOTE_CONFIG_RESPONSE + default_config = { + 'dog_age': '12' + } + server_template = remote_config.init_server_template( + app=app, + default_config=default_config, + template_data_json=json.dumps(template_data) + ) + server_config = server_template.evaluate() + assert server_config.get_int('dog_age') == int(default_config.get('dog_age')) + + def test_evaluate_return_boolean_value(self): + app = firebase_admin.get_app() + template_data = SERVER_REMOTE_CONFIG_RESPONSE + default_config = { + 'dog_is_cute': True + } + server_template = remote_config.init_server_template( + app=app, + default_config=default_config, + template_data_json=json.dumps(template_data) + ) + server_config = server_template.evaluate() + assert server_config.get_boolean('dog_is_cute') + + def test_evaluate_unknown_operator_to_false(self): + app = firebase_admin.get_app() + condition = { + 'name': 'is_true', + 'condition': { + 'orCondition': { + 'conditions': [{ + 'andCondition': { + 'conditions': [{ + 'percent': { + 'percentOperator': PercentConditionOperator.UNKNOWN.value + } + }], + } + }] + } + } + } + default_config = { + 'dog_is_cute': True + } + template_data = { + 'conditions': [condition], + 'parameters': { + 'is_enabled': { + 'defaultValue': {'value': 'false'}, + 'conditionalValues': {'is_true': {'value': 'true'}} + }, + }, + 'parameterGroups':'', + 'version':'', + 'etag': '123' + } + context = {'randomization_id': '123'} + server_template = remote_config.init_server_template( + app=app, + default_config=default_config, + template_data_json=json.dumps(template_data) + ) + server_config = server_template.evaluate(context) + assert not server_config.get_boolean('is_enabled') + + def test_evaluate_less_or_equal_to_max_to_true(self): + app = firebase_admin.get_app() + condition = { + 'name': 'is_true', + 'condition': { + 'orCondition': { + 'conditions': [{ + 'andCondition': { + 'conditions': [{ + 'percent': { + 'percentOperator': PercentConditionOperator.LESS_OR_EQUAL.value, + 'seed': 'abcdef', + 'microPercent': 100_000_000 + } + }], + } + }] + } + } + } + default_config = { + 'dog_is_cute': True + } + template_data = { + 'conditions': [condition], + 'parameters': { + 'is_enabled': { + 'defaultValue': {'value': 'false'}, + 'conditionalValues': {'is_true': {'value': 'true'}} + }, + }, + 'parameterGroups':'', + 'version':'', + 'etag': '123' + } + context = {'randomization_id': '123'} + server_template = remote_config.init_server_template( + app=app, + default_config=default_config, + template_data_json=json.dumps(template_data) + ) + server_config = server_template.evaluate(context) + assert server_config.get_boolean('is_enabled') + + def test_evaluate_undefined_micropercent_to_false(self): + app = firebase_admin.get_app() + condition = { + 'name': 'is_true', + 'condition': { + 'orCondition': { + 'conditions': [{ + 'andCondition': { + 'conditions': [{ + 'percent': { + 'percentOperator': PercentConditionOperator.LESS_OR_EQUAL.value, + # Leaves microPercent undefined + } + }], + } + }] + } + } + } + default_config = { + 'dog_is_cute': True + } + template_data = { + 'conditions': [condition], + 'parameters': { + 'is_enabled': { + 'defaultValue': {'value': 'false'}, + 'conditionalValues': {'is_true': {'value': 'true'}} + }, + }, + 'parameterGroups':'', + 'version':'', + 'etag': '123' + } + context = {'randomization_id': '123'} + server_template = remote_config.init_server_template( + app=app, + default_config=default_config, + template_data_json=json.dumps(template_data) + ) + server_config = server_template.evaluate(context) + assert not server_config.get_boolean('is_enabled') + + def test_evaluate_undefined_micropercentrange_to_false(self): + app = firebase_admin.get_app() + condition = { + 'name': 'is_true', + 'condition': { + 'orCondition': { + 'conditions': [{ + 'andCondition': { + 'conditions': [{ + 'percent': { + 'percentOperator': PercentConditionOperator.BETWEEN.value, + # Leaves microPercent undefined + } + }], + } + }] + } + } + } + default_config = { + 'dog_is_cute': True + } + template_data = { + 'conditions': [condition], + 'parameters': { + 'is_enabled': { + 'defaultValue': {'value': 'false'}, + 'conditionalValues': {'is_true': {'value': 'true'}} + }, + }, + 'parameterGroups':'', + 'version':'', + 'etag': '123' + } + context = {'randomization_id': '123'} + server_template = remote_config.init_server_template( + app=app, + default_config=default_config, + template_data_json=json.dumps(template_data) + ) + server_config = server_template.evaluate(context) + assert not server_config.get_boolean('is_enabled') + + def test_evaluate_between_min_max_to_true(self): + app = firebase_admin.get_app() + condition = { + 'name': 'is_true', + 'condition': { + 'orCondition': { + 'conditions': [{ + 'andCondition': { + 'conditions': [{ + 'percent': { + 'percentOperator': PercentConditionOperator.BETWEEN.value, + 'seed': 'abcdef', + 'microPercentRange': { + 'microPercentLowerBound': 0, + 'microPercentUpperBound': 100_000_000 + } + } + }], + } + }] + } + } + } + default_config = { + 'dog_is_cute': True + } + template_data = { + 'conditions': [condition], + 'parameters': { + 'is_enabled': { + 'defaultValue': {'value': 'false'}, + 'conditionalValues': {'is_true': {'value': 'true'}} + }, + }, + 'parameterGroups':'', + 'version':'', + 'etag': '123' + } + context = {'randomization_id': '123'} + server_template = remote_config.init_server_template( + app=app, + default_config=default_config, + template_data_json=json.dumps(template_data) + ) + server_config = server_template.evaluate(context) + assert server_config.get_boolean('is_enabled') + + def test_evaluate_between_equal_bounds_to_false(self): + app = firebase_admin.get_app() + condition = { + 'name': 'is_true', + 'condition': { + 'orCondition': { + 'conditions': [{ + 'andCondition': { + 'conditions': [{ + 'percent': { + 'percentOperator': PercentConditionOperator.BETWEEN.value, + 'seed': 'abcdef', + 'microPercentRange': { + 'microPercentLowerBound': 50000000, + 'microPercentUpperBound': 50000000 + } + } + }], + } + }] + } + } + } + default_config = { + 'dog_is_cute': True + } + template_data = { + 'conditions': [condition], + 'parameters': { + 'is_enabled': { + 'defaultValue': {'value': 'false'}, + 'conditionalValues': {'is_true': {'value': 'true'}} + }, + }, + 'parameterGroups':'', + 'version':'', + 'etag': '123' + } + context = {'randomization_id': '123'} + server_template = remote_config.init_server_template( + app=app, + default_config=default_config, + template_data_json=json.dumps(template_data) + ) + server_config = server_template.evaluate(context) + assert not server_config.get_boolean('is_enabled') + + def test_evaluate_less_or_equal_to_approx(self): + app = firebase_admin.get_app() + condition = { + 'name': 'is_true', + 'condition': { + 'orCondition': { + 'conditions': [{ + 'andCondition': { + 'conditions': [{ + 'percent': { + 'percentOperator': PercentConditionOperator.LESS_OR_EQUAL.value, + 'seed': 'abcdef', + 'microPercent': 10_000_000 # 10% + } + }], + } + }] + } + } + } + default_config = { + 'dog_is_cute': True + } + + truthy_assignments = self.evaluate_random_assignments(condition, 100000, + app, default_config) + tolerance = 284 + assert truthy_assignments >= 10000 - tolerance + assert truthy_assignments <= 10000 + tolerance + + def test_evaluate_between_approx(self): + app = firebase_admin.get_app() + condition = { + 'name': 'is_true', + 'condition': { + 'orCondition': { + 'conditions': [{ + 'andCondition': { + 'conditions': [{ + 'percent': { + 'percentOperator': PercentConditionOperator.BETWEEN.value, + 'seed': 'abcdef', + 'microPercentRange': { + 'microPercentLowerBound': 40_000_000, + 'microPercentUpperBound': 60_000_000 + } + } + }], + } + }] + } + } + } + default_config = { + 'dog_is_cute': True + } + + truthy_assignments = self.evaluate_random_assignments(condition, 100000, + app, default_config) + tolerance = 379 + assert truthy_assignments >= 20000 - tolerance + assert truthy_assignments <= 20000 + tolerance + + def test_evaluate_between_interquartile_range_accuracy(self): + app = firebase_admin.get_app() + condition = { + 'name': 'is_true', + 'condition': { + 'orCondition': { + 'conditions': [{ + 'andCondition': { + 'conditions': [{ + 'percent': { + 'percentOperator': PercentConditionOperator.BETWEEN.value, + 'seed': 'abcdef', + 'microPercentRange': { + 'microPercentLowerBound': 25_000_000, + 'microPercentUpperBound': 75_000_000 + } + } + }], + } + }] + } + } + } + default_config = { + 'dog_is_cute': True + } + + truthy_assignments = self.evaluate_random_assignments(condition, 100000, + app, default_config) + tolerance = 490 + assert truthy_assignments >= 50000 - tolerance + assert truthy_assignments <= 50000 + tolerance + + def evaluate_random_assignments(self, condition, num_of_assignments, mock_app, default_config): + """Evaluates random assignments based on a condition. + + Args: + condition: The condition to evaluate. + num_of_assignments: The number of assignments to generate. + condition_evaluator: An instance of the ConditionEvaluator class. + + Returns: + int: The number of assignments that evaluated to true. + """ + eval_true_count = 0 + template_data = { + 'conditions': [condition], + 'parameters': { + 'is_enabled': { + 'defaultValue': {'value': 'false'}, + 'conditionalValues': {'is_true': {'value': 'true'}} + }, + }, + 'parameterGroups':'', + 'version':'', + 'etag': '123' + } + server_template = remote_config.init_server_template( + app=mock_app, + default_config=default_config, + template_data_json=json.dumps(template_data) + ) + + for _ in range(num_of_assignments): + context = {'randomization_id': str(uuid.uuid4())} + result = server_template.evaluate(context) + if result.get_boolean('is_enabled') is True: + eval_true_count += 1 + + return eval_true_count + + @pytest.mark.parametrize( + 'custom_signal_opearator, \ + target_custom_signal_value, actual_custom_signal_value, parameter_value', + [ + SEMENTIC_VERSION_LESS_THAN_TRUE, + SEMANTIC_VERSION_GREATER_THAN_FALSE, + SEMENTIC_VERSION_EQUAL_TRUE, + SEMANTIC_VERSION_INVALID_FORMAT_NEGATIVE_INTEGER, + SEMANTIC_VERSION_INVALID_FORMAT_STRING + ]) + def test_evaluate_custom_signal_semantic_version(self, + custom_signal_opearator, + target_custom_signal_value, + actual_custom_signal_value, + parameter_value): + app = firebase_admin.get_app() + condition = { + 'name': 'is_true', + 'condition': { + 'orCondition': { + 'conditions': [{ + 'andCondition': { + 'conditions': [{ + 'customSignal': { + 'customSignalOperator': custom_signal_opearator, + 'customSignalKey': 'sementic_version_key', + 'targetCustomSignalValues': target_custom_signal_value + } + }], + } + }] + } + } + } + default_config = { + 'dog_is_cute': True + } + template_data = { + 'conditions': [condition], + 'parameters': { + 'is_enabled': { + 'defaultValue': {'value': 'false'}, + 'conditionalValues': {'is_true': {'value': 'true'}} + }, + }, + 'parameterGroups':'', + 'version':'', + 'etag': '123' + } + context = {'randomization_id': '123', 'sementic_version_key': actual_custom_signal_value} + server_template = remote_config.init_server_template( + app=app, + default_config=default_config, + template_data_json=json.dumps(template_data) + ) + server_config = server_template.evaluate(context) + assert server_config.get_boolean('is_enabled') == parameter_value + + +class MockAdapter(testutils.MockAdapter): + """A Mock HTTP Adapter that provides Firebase Remote Config responses with ETag in header.""" + + ETAG = 'etag' + + def __init__(self, data, status, recorder, etag=ETAG): + testutils.MockAdapter.__init__(self, data, status, recorder) + self._etag = etag + + def send(self, request, **kwargs): + resp = super().send(request, **kwargs) + resp.headers = {'etag': self._etag} + return resp + + +class TestRemoteConfigService: + """Tests methods on _RemoteConfigService""" + @classmethod + def setup_class(cls): + cred = testutils.MockCredential() + firebase_admin.initialize_app(cred, {'projectId': 'project-id'}) + + @classmethod + def teardown_class(cls): + testutils.cleanup_apps() + + @pytest.mark.asyncio + async def test_rc_instance_get_server_template(self): + recorder = [] + response = json.dumps({ + 'parameters': { + 'test_key': 'test_value' + }, + 'conditions': [], + 'version': 'test' + }) + + rc_instance = _utils.get_app_service(firebase_admin.get_app(), + _REMOTE_CONFIG_ATTRIBUTE, _RemoteConfigService) + rc_instance._client.session.mount( + 'https://firebaseremoteconfig.googleapis.com', + MockAdapter(response, 200, recorder)) + + template = await rc_instance.get_server_template() + + assert template.parameters == {"test_key": 'test_value'} + assert str(template.version) == 'test' + assert str(template.etag) == 'etag' + + @pytest.mark.asyncio + async def test_rc_instance_get_server_template_empty_params(self): + recorder = [] + response = json.dumps({ + 'conditions': [], + 'version': 'test' + }) + + rc_instance = _utils.get_app_service(firebase_admin.get_app(), + _REMOTE_CONFIG_ATTRIBUTE, _RemoteConfigService) + rc_instance._client.session.mount( + 'https://firebaseremoteconfig.googleapis.com', + MockAdapter(response, 200, recorder)) + + template = await rc_instance.get_server_template() + + assert template.parameters == {} + assert str(template.version) == 'test' + assert str(template.etag) == 'etag' + + +class TestRemoteConfigModule: + """Tests methods on firebase_admin.remote_config""" + @classmethod + def setup_class(cls): + cred = testutils.MockCredential() + firebase_admin.initialize_app(cred, {'projectId': 'project-id'}) + + @classmethod + def teardown_class(cls): + testutils.cleanup_apps() + + def test_init_server_template(self): + app = firebase_admin.get_app() + template_data = { + 'conditions': [], + 'parameters': { + 'test_key': { + 'defaultValue': {'value': 'test_value'}, + 'conditionalValues': {} + } + }, + 'version': '', + } + + template = remote_config.init_server_template( + app=app, + default_config={'default_test': 'default_value'}, + template_data_json=json.dumps(template_data) + ) + + config = template.evaluate() + assert config.get_string('test_key') == 'test_value' + + @pytest.mark.asyncio + async def test_get_server_template(self): + app = firebase_admin.get_app() + rc_instance = _utils.get_app_service(app, + _REMOTE_CONFIG_ATTRIBUTE, _RemoteConfigService) + + recorder = [] + response = json.dumps({ + 'parameters': { + 'test_key': { + 'defaultValue': {'value': 'test_value'}, + 'conditionalValues': {} + } + }, + 'conditions': [], + 'version': 'test' + }) + + rc_instance._client.session.mount( + 'https://firebaseremoteconfig.googleapis.com', + MockAdapter(response, 200, recorder)) + + template = await remote_config.get_server_template(app=app) + + config = template.evaluate() + assert config.get_string('test_key') == 'test_value' + + @pytest.mark.asyncio + async def test_server_template_to_json(self): + app = firebase_admin.get_app() + rc_instance = _utils.get_app_service(app, + _REMOTE_CONFIG_ATTRIBUTE, _RemoteConfigService) + + recorder = [] + response = json.dumps({ + 'parameters': { + 'test_key': { + 'defaultValue': {'value': 'test_value'}, + 'conditionalValues': {} + } + }, + 'conditions': [], + 'version': 'test' + }) + + expected_template_json = '{"parameters": {' \ + '"test_key": {' \ + '"defaultValue": {' \ + '"value": "test_value"}, ' \ + '"conditionalValues": {}}}, "conditions": [], ' \ + '"version": "test", "etag": "etag"}' + + rc_instance._client.session.mount( + 'https://firebaseremoteconfig.googleapis.com', + MockAdapter(response, 200, recorder)) + template = await remote_config.get_server_template(app=app) + + template_json = template.to_json() + assert template_json == expected_template_json diff --git a/tests/test_retry.py b/tests/test_retry.py new file mode 100644 index 000000000..751fdea7b --- /dev/null +++ b/tests/test_retry.py @@ -0,0 +1,454 @@ +# Copyright 2025 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test cases for the firebase_admin._retry module.""" + +import time +import email.utils +from itertools import repeat +from unittest.mock import call +import pytest +import httpx +from pytest_mock import MockerFixture +import respx + +from firebase_admin._retry import HttpxRetry, HttpxRetryTransport + +_TEST_URL = 'http://firebase.test.url/' + +@pytest.fixture +def base_url() -> str: + """Provides a consistent base URL for tests.""" + return _TEST_URL + +class TestHttpxRetryTransport(): + @pytest.mark.asyncio + @respx.mock + async def test_no_retry_on_success(self, base_url: str, mocker: MockerFixture): + """Test that a successful response doesn't trigger retries.""" + retry_config = HttpxRetry(max_retries=3, status_forcelist=[500]) + transport = HttpxRetryTransport(retry=retry_config) + client = httpx.AsyncClient(transport=transport) + + route = respx.post(base_url).mock(return_value=httpx.Response(200, text="Success")) + + mock_sleep = mocker.patch('asyncio.sleep', return_value=None) + response = await client.post(base_url) + + assert response.status_code == 200 + assert response.text == "Success" + assert route.call_count == 1 + mock_sleep.assert_not_called() + + @pytest.mark.asyncio + @respx.mock + async def test_no_retry_on_non_retryable_status(self, base_url: str, mocker: MockerFixture): + """Test that a non-retryable error status doesn't trigger retries.""" + retry_config = HttpxRetry(max_retries=3, status_forcelist=[500, 503]) + transport = HttpxRetryTransport(retry=retry_config) + client = httpx.AsyncClient(transport=transport) + + route = respx.post(base_url).mock(return_value=httpx.Response(404, text="Not Found")) + + mock_sleep = mocker.patch('asyncio.sleep', return_value=None) + response = await client.post(base_url) + + assert response.status_code == 404 + assert response.text == "Not Found" + assert route.call_count == 1 + mock_sleep.assert_not_called() + + @pytest.mark.asyncio + @respx.mock + async def test_retry_on_status_code_success_on_last_retry( + self, base_url: str, mocker: MockerFixture + ): + """Test retry on status code from status_forcelist, succeeding on the last attempt.""" + retry_config = HttpxRetry(max_retries=2, status_forcelist=[503, 500], backoff_factor=0.5) + transport = HttpxRetryTransport(retry=retry_config) + client = httpx.AsyncClient(transport=transport) + + route = respx.post(base_url).mock(side_effect=[ + httpx.Response(503, text="Attempt 1 Failed"), + httpx.Response(500, text="Attempt 2 Failed"), + httpx.Response(200, text="Attempt 3 Success"), + ]) + + mock_sleep = mocker.patch('asyncio.sleep', return_value=None) + response = await client.post(base_url) + + assert response.status_code == 200 + assert response.text == "Attempt 3 Success" + assert route.call_count == 3 + assert mock_sleep.call_count == 2 + # Check sleep calls (backoff_factor is 0.5) + mock_sleep.assert_has_calls([call(0.0), call(1.0)]) + + @pytest.mark.asyncio + @respx.mock + async def test_retry_exhausted_returns_last_response( + self, base_url: str, mocker: MockerFixture + ): + """Test that the last response is returned when retries are exhausted.""" + retry_config = HttpxRetry(max_retries=1, status_forcelist=[500], backoff_factor=0) + transport = HttpxRetryTransport(retry=retry_config) + client = httpx.AsyncClient(transport=transport) + + route = respx.post(base_url).mock(side_effect=[ + httpx.Response(500, text="Attempt 1 Failed"), + httpx.Response(500, text="Attempt 2 Failed (Final)"), + # Should stop after previous response + httpx.Response(200, text="This should not be reached"), + ]) + + mock_sleep = mocker.patch('asyncio.sleep', return_value=None) + response = await client.post(base_url) + + assert response.status_code == 500 + assert response.text == "Attempt 2 Failed (Final)" + assert route.call_count == 2 # Initial call + 1 retry + assert mock_sleep.call_count == 1 # Slept before the single retry + + @pytest.mark.asyncio + @respx.mock + async def test_retry_after_header_seconds(self, base_url: str, mocker: MockerFixture): + """Test respecting Retry-After header with seconds value.""" + retry_config = HttpxRetry( + max_retries=1, respect_retry_after_header=True, backoff_factor=100) + transport = HttpxRetryTransport(retry=retry_config) + client = httpx.AsyncClient(transport=transport) + + route = respx.post(base_url).mock(side_effect=[ + httpx.Response(429, text="Too Many Requests", headers={'Retry-After': '10'}), + httpx.Response(200, text="OK"), + ]) + + mock_sleep = mocker.patch('asyncio.sleep', return_value=None) + response = await client.post(base_url) + + assert response.status_code == 200 + assert route.call_count == 2 + assert mock_sleep.call_count == 1 + # Assert sleep was called with the value from Retry-After header + mock_sleep.assert_called_once_with(10.0) + + @pytest.mark.asyncio + @respx.mock + async def test_retry_after_header_http_date(self, base_url: str, mocker: MockerFixture): + """Test respecting Retry-After header with an HTTP-date value.""" + retry_config = HttpxRetry( + max_retries=1, respect_retry_after_header=True, backoff_factor=100) + transport = HttpxRetryTransport(retry=retry_config) + client = httpx.AsyncClient(transport=transport) + + # Calculate a future time and format as HTTP-date + retry_delay_seconds = 60 + time_at_request = time.time() + retry_time = time_at_request + retry_delay_seconds + http_date = email.utils.formatdate(retry_time) + + route = respx.post(base_url).mock(side_effect=[ + httpx.Response(503, text="Maintenance", headers={'Retry-After': http_date}), + httpx.Response(200, text="OK"), + ]) + + mock_sleep = mocker.patch('asyncio.sleep', return_value=None) + # Patch time.time() within the test context to control the baseline for date calculation + # Set the mock time to be *just before* the Retry-After time + mocker.patch('time.time', return_value=time_at_request) + response = await client.post(base_url) + + assert response.status_code == 200 + assert route.call_count == 2 + assert mock_sleep.call_count == 1 + # Check that sleep was called with approximately the correct delay + # Allow for small floating point inaccuracies + mock_sleep.assert_called_once() + args, _ = mock_sleep.call_args + assert args[0] == pytest.approx(retry_delay_seconds, abs=2) + + @pytest.mark.asyncio + @respx.mock + async def test_retry_after_ignored_when_disabled(self, base_url: str, mocker: MockerFixture): + """Test Retry-After header is ignored if `respect_retry_after_header` is `False`.""" + retry_config = HttpxRetry( + max_retries=3, respect_retry_after_header=False, status_forcelist=[429], + backoff_factor=0.5, backoff_max=10) + transport = HttpxRetryTransport(retry=retry_config) + client = httpx.AsyncClient(transport=transport) + + route = respx.post(base_url).mock(side_effect=[ + httpx.Response(429, text="Too Many Requests", headers={'Retry-After': '60'}), + httpx.Response(429, text="Too Many Requests", headers={'Retry-After': '60'}), + httpx.Response(429, text="Too Many Requests", headers={'Retry-After': '60'}), + httpx.Response(200, text="OK"), + ]) + + mock_sleep = mocker.patch('asyncio.sleep', return_value=None) + response = await client.post(base_url) + + assert response.status_code == 200 + assert route.call_count == 4 + assert mock_sleep.call_count == 3 + + # Assert sleep was called with the calculated backoff times: + # After first attempt: delay = 0 + # After retry 1 (attempt 2): delay = 0.5 * (2**(2-1)) = 0.5 * 2 = 1.0 + # After retry 2 (attempt 3): delay = 0.5 * (2**(3-1)) = 0.5 * 4 = 2.0 + expected_sleeps = [call(0), call(1.0), call(2.0)] + mock_sleep.assert_has_calls(expected_sleeps) + + @pytest.mark.asyncio + @respx.mock + async def test_retry_after_header_missing_backoff_fallback( + self, base_url: str, mocker: MockerFixture + ): + """Test Retry-After header is ignored if `respect_retry_after_header`is `True` but header is + not set.""" + retry_config = HttpxRetry( + max_retries=3, respect_retry_after_header=True, status_forcelist=[429], + backoff_factor=0.5, backoff_max=10) + transport = HttpxRetryTransport(retry=retry_config) + client = httpx.AsyncClient(transport=transport) + + route = respx.post(base_url).mock(side_effect=[ + httpx.Response(429, text="Too Many Requests"), + httpx.Response(429, text="Too Many Requests"), + httpx.Response(429, text="Too Many Requests"), + httpx.Response(200, text="OK"), + ]) + + mock_sleep = mocker.patch('asyncio.sleep', return_value=None) + response = await client.post(base_url) + + assert response.status_code == 200 + assert route.call_count == 4 + assert mock_sleep.call_count == 3 + + # Assert sleep was called with the calculated backoff times: + # After first attempt: delay = 0 + # After retry 1 (attempt 2): delay = 0.5 * (2**(2-1)) = 0.5 * 2 = 1.0 + # After retry 2 (attempt 3): delay = 0.5 * (2**(3-1)) = 0.5 * 4 = 2.0 + expected_sleeps = [call(0), call(1.0), call(2.0)] + mock_sleep.assert_has_calls(expected_sleeps) + + @pytest.mark.asyncio + @respx.mock + async def test_exponential_backoff(self, base_url: str, mocker: MockerFixture): + """Test that sleep time increases exponentially with `backoff_factor`.""" + # status=3 allows 3 retries (attempts 2, 3, 4) + retry_config = HttpxRetry( + max_retries=3, status_forcelist=[500], backoff_factor=0.1, backoff_max=10.0) + transport = HttpxRetryTransport(retry=retry_config) + client = httpx.AsyncClient(transport=transport) + + route = respx.post(base_url).mock(side_effect=[ + httpx.Response(500, text="Fail 1"), + httpx.Response(500, text="Fail 2"), + httpx.Response(500, text="Fail 3"), + httpx.Response(200, text="Success"), + ]) + + mock_sleep = mocker.patch('asyncio.sleep', return_value=None) + response = await client.post(base_url) + + assert response.status_code == 200 + assert route.call_count == 4 + assert mock_sleep.call_count == 3 + + # Check expected backoff times: + # After first attempt: delay = 0 + # After retry 1 (attempt 2): delay = 0.1 * (2**(2-1)) = 0.1 * 2 = 0.2 + # After retry 2 (attempt 3): delay = 0.1 * (2**(3-1)) = 0.1 * 4 = 0.4 + expected_sleeps = [call(0), call(0.2), call(0.4)] + mock_sleep.assert_has_calls(expected_sleeps) + + @pytest.mark.asyncio + @respx.mock + async def test_backoff_max(self, base_url: str, mocker: MockerFixture): + """Test that backoff time respects `backoff_max`.""" + # status=4 allows 4 retries. backoff_factor=1 causes rapid increase. + retry_config = HttpxRetry( + max_retries=4, status_forcelist=[500], backoff_factor=1, backoff_max=3.0) + transport = HttpxRetryTransport(retry=retry_config) + client = httpx.AsyncClient(transport=transport) + + route = respx.post(base_url).mock(side_effect=[ + httpx.Response(500, text="Fail 1"), + httpx.Response(500, text="Fail 2"), + httpx.Response(500, text="Fail 2"), + httpx.Response(500, text="Fail 4"), + httpx.Response(200, text="Success"), + ]) + + mock_sleep = mocker.patch('asyncio.sleep', return_value=None) + response = await client.post(base_url) + + assert response.status_code == 200 + assert route.call_count == 5 + assert mock_sleep.call_count == 4 + + # Check expected backoff times: + # After first attempt: delay = 0 + # After retry 1 (attempt 2): delay = 1*(2**(2-1)) = 2. Clamped by max(0, min(3.0, 2)) = 2.0 + # After retry 2 (attempt 3): delay = 1*(2**(3-1)) = 4. Clamped by max(0, min(3.0, 4)) = 3.0 + # After retry 3 (attempt 4): delay = 1*(2**(4-1)) = 8. Clamped by max(0, min(3.0, 8)) = 3.0 + expected_sleeps = [call(0.0), call(2.0), call(3.0), call(3.0)] + mock_sleep.assert_has_calls(expected_sleeps) + + @pytest.mark.asyncio + @respx.mock + async def test_backoff_jitter(self, base_url: str, mocker: MockerFixture): + """Test that `backoff_jitter` adds randomness within bounds.""" + retry_config = HttpxRetry( + max_retries=3, status_forcelist=[500], backoff_factor=0.2, backoff_jitter=0.1) + transport = HttpxRetryTransport(retry=retry_config) + client = httpx.AsyncClient(transport=transport) + + route = respx.post(base_url).mock(side_effect=[ + httpx.Response(500, text="Fail 1"), + httpx.Response(500, text="Fail 2"), + httpx.Response(500, text="Fail 3"), + httpx.Response(200, text="Success"), + ]) + + mock_sleep = mocker.patch('asyncio.sleep', return_value=None) + response = await client.post(base_url) + + assert response.status_code == 200 + assert route.call_count == 4 + assert mock_sleep.call_count == 3 + + # Check expected backoff times are within the expected range [base - jitter, base + jitter] + # After first attempt: delay = 0 + # After retry 1 (attempt 2): delay = 0.2 * (2**(2-1)) = 0.2 * 2 = 0.4 +/- 0.1 + # After retry 2 (attempt 3): delay = 0.2 * (2**(3-1)) = 0.2 * 4 = 0.8 +/- 0.1 + expected_sleeps = [ + call(pytest.approx(0.0, abs=0.1)), + call(pytest.approx(0.4, abs=0.1)), + call(pytest.approx(0.8, abs=0.1)) + ] + mock_sleep.assert_has_calls(expected_sleeps) + + @pytest.mark.asyncio + @respx.mock + async def test_error_not_retryable(self, base_url): + """Test that non-HTTP errors are raised immediately if not retryable.""" + retry_config = HttpxRetry(max_retries=3) + transport = HttpxRetryTransport(retry=retry_config) + client = httpx.AsyncClient(transport=transport) + + # Mock a connection error + route = respx.post(base_url).mock( + side_effect=repeat(httpx.ConnectError("Connection failed"))) + + with pytest.raises(httpx.ConnectError, match="Connection failed"): + await client.post(base_url) + + assert route.call_count == 1 + + +class TestHttpxRetry(): + _TEST_REQUEST = httpx.Request('POST', _TEST_URL) + + def test_httpx_retry_copy(self, base_url): + """Test that `HttpxRetry.copy()` creates a deep copy.""" + original = HttpxRetry(max_retries=5, status_forcelist=[500, 503], backoff_factor=0.5) + original.history.append((base_url, None, None)) # Add something mutable + + copied = original.copy() + + # Assert they are different objects + assert original is not copied + assert original.history is not copied.history + + # Assert values are the same initially + assert copied.retries_left == original.retries_left + assert copied.status_forcelist == original.status_forcelist + assert copied.backoff_factor == original.backoff_factor + assert len(copied.history) == 1 + + # Modify the copy and check original is unchanged + copied.retries_left = 1 + copied.status_forcelist = [404] + copied.history.append((base_url, None, None)) + + assert original.retries_left == 5 + assert original.status_forcelist == [500, 503] + assert len(original.history) == 1 + + def test_parse_retry_after_seconds(self): + retry = HttpxRetry() + assert retry._parse_retry_after('10') == 10.0 + assert retry._parse_retry_after(' 30 ') == 30.0 + + + def test_parse_retry_after_http_date(self, mocker: MockerFixture): + mocker.patch('time.time', return_value=1000.0) + retry = HttpxRetry() + # Date string representing 1015 seconds since epoch + http_date = email.utils.formatdate(1015.0) + # time.time() is mocked to 1000.0, so delay should be 15s + assert retry._parse_retry_after(http_date) == pytest.approx(15.0) + + def test_parse_retry_after_past_http_date(self, mocker: MockerFixture): + """Test that a past date results in 0 seconds.""" + mocker.patch('time.time', return_value=1000.0) + retry = HttpxRetry() + http_date = email.utils.formatdate(990.0) # 10s in the past + assert retry._parse_retry_after(http_date) == 0.0 + + def test_parse_retry_after_invalid_date(self): + retry = HttpxRetry() + with pytest.raises(httpx.RemoteProtocolError, match='Invalid Retry-After header'): + retry._parse_retry_after('Invalid Date Format') + + def test_get_backoff_time_calculation(self): + retry = HttpxRetry( + max_retries=6, status_forcelist=[503], backoff_factor=0.5, backoff_max=10.0) + response = httpx.Response(503) + # No history -> attempt 1 -> no backoff before first request + # Note: get_backoff_time() is typically called *before* the *next* request, + # so history length reflects completed attempts. + assert retry.get_backoff_time() == 0.0 + + # Simulate attempt 1 completed + retry.increment(self._TEST_REQUEST, response) + # History len 1, attempt 2 -> base case 0 + assert retry.get_backoff_time() == pytest.approx(0) + + # Simulate attempt 2 completed + retry.increment(self._TEST_REQUEST, response) + # History len 2, attempt 3 -> 0.5*(2^1) = 1.0 + assert retry.get_backoff_time() == pytest.approx(1.0) + + # Simulate attempt 3 completed + retry.increment(self._TEST_REQUEST, response) + # History len 3, attempt 4 -> 0.5*(2^2) = 2.0 + assert retry.get_backoff_time() == pytest.approx(2.0) + + # Simulate attempt 4 completed + retry.increment(self._TEST_REQUEST, response) + # History len 4, attempt 5 -> 0.5*(2^3) = 4.0 + assert retry.get_backoff_time() == pytest.approx(4.0) + + # Simulate attempt 5 completed + retry.increment(self._TEST_REQUEST, response) + # History len 5, attempt 6 -> 0.5*(2^4) = 8.0 + assert retry.get_backoff_time() == pytest.approx(8.0) + + # Simulate attempt 6 completed + retry.increment(self._TEST_REQUEST, response) + # History len 6, attempt 7 -> 0.5*(2^5) = 16.0 Clamped to 10 + assert retry.get_backoff_time() == pytest.approx(10.0) diff --git a/tests/test_rfc3339.py b/tests/test_rfc3339.py new file mode 100644 index 000000000..5a844b07e --- /dev/null +++ b/tests/test_rfc3339.py @@ -0,0 +1,67 @@ +# Copyright 2020 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test cases for the firebase_admin._rfc3339 module.""" + +import pytest + +from firebase_admin import _rfc3339 + +def test_epoch(): + expected = pytest.approx(0) + assert _rfc3339.parse_to_epoch("1970-01-01T00:00:00Z") == expected + assert _rfc3339.parse_to_epoch("1970-01-01T00:00:00z") == expected + assert _rfc3339.parse_to_epoch("1970-01-01T00:00:00+00:00") == expected + assert _rfc3339.parse_to_epoch("1970-01-01T00:00:00-00:00") == expected + assert _rfc3339.parse_to_epoch("1970-01-01T01:00:00+01:00") == expected + assert _rfc3339.parse_to_epoch("1969-12-31T23:00:00-01:00") == expected + +def test_pre_epoch(): + expected = -5617641600 + assert _rfc3339.parse_to_epoch("1791-12-26T00:00:00Z") == expected + assert _rfc3339.parse_to_epoch("1791-12-26T00:00:00+00:00") == expected + assert _rfc3339.parse_to_epoch("1791-12-26T00:00:00-00:00") == expected + assert _rfc3339.parse_to_epoch("1791-12-26T01:00:00+01:00") == expected + assert _rfc3339.parse_to_epoch("1791-12-25T23:00:00-01:00") == expected + +def test_post_epoch(): + expected = 904892400 + assert _rfc3339.parse_to_epoch("1998-09-04T07:00:00Z") == expected + assert _rfc3339.parse_to_epoch("1998-09-04T07:00:00+00:00") == expected + assert _rfc3339.parse_to_epoch("1998-09-04T08:00:00+01:00") == expected + assert _rfc3339.parse_to_epoch("1998-09-04T06:00:00-01:00") == expected + +def test_micros_millis(): + assert _rfc3339.parse_to_epoch("1970-01-01T00:00:00Z") == pytest.approx(0) + assert _rfc3339.parse_to_epoch("1970-01-01T00:00:00.1Z") == pytest.approx(0.1) + assert _rfc3339.parse_to_epoch("1970-01-01T00:00:00.001Z") == pytest.approx(0.001) + assert _rfc3339.parse_to_epoch("1970-01-01T00:00:00.000001Z") == pytest.approx(0.000001) + + assert _rfc3339.parse_to_epoch("1970-01-01T00:00:00+00:00") == pytest.approx(0) + assert _rfc3339.parse_to_epoch("1970-01-01T00:00:00.1+00:00") == pytest.approx(0.1) + assert _rfc3339.parse_to_epoch("1970-01-01T00:00:00.001+00:00") == pytest.approx(0.001) + assert _rfc3339.parse_to_epoch("1970-01-01T00:00:00.000001+00:00") == pytest.approx(0.000001) + +def test_nanos(): + assert _rfc3339.parse_to_epoch("1970-01-01T00:00:00.0000001Z") == pytest.approx(0) + +@pytest.mark.parametrize('datestr', [ + 'not a date string', + '1970-01-01 00:00:00Z', + '1970-01-01 00:00:00+00:00', + '1970-01-01T00:00:00', + ]) +def test_bad_datestrs(datestr): + with pytest.raises(ValueError): + _rfc3339.parse_to_epoch(datestr) diff --git a/tests/test_sseclient.py b/tests/test_sseclient.py new file mode 100644 index 000000000..2c523e36f --- /dev/null +++ b/tests/test_sseclient.py @@ -0,0 +1,144 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for firebase_admin._sseclient.""" +import io +import json + +import requests + +from firebase_admin import _sseclient +from tests import testutils + + +class MockSSEClientAdapter(testutils.MockAdapter): + + def __init__(self, payload, recorder): + super().__init__(payload, 200, recorder) + + def send(self, request, **kwargs): + resp = super().send(request, **kwargs) + resp.url = request.url + resp.status_code = self.status + resp.raw = io.BytesIO(self.data.encode()) + resp.encoding = "utf-8" + return resp + + +class TestSSEClient: + """Test cases for the SSEClient""" + + test_url = "https://test.firebaseio.com" + + + def init_sse(self, payload, recorder=None): + if recorder is None: + recorder = [] + adapter = MockSSEClientAdapter(payload, recorder) + session = requests.Session() + session.mount(self.test_url, adapter) + return _sseclient.SSEClient(url=self.test_url, session=session, retry=1) + + def test_init_sseclient(self): + payload = 'event: put\ndata: {"path":"/","data":"testevent"}\n\n' + sseclient = self.init_sse(payload) + assert sseclient.url == self.test_url + assert sseclient.session is not None + + def test_single_event(self): + payload = 'event: put\ndata: {"path":"/","data":"testevent"}\n\n' + recorder = [] + sseclient = self.init_sse(payload, recorder) + event = next(sseclient) + event_payload = json.loads(event.data) + assert event_payload["data"] == "testevent" + assert event_payload["path"] == "/" + assert len(recorder) == 1 + # The SSEClient should reconnect now, at which point the mock adapter + # will echo back the same response. + event = next(sseclient) + event_payload = json.loads(event.data) + assert event_payload["data"] == "testevent" + assert event_payload["path"] == "/" + assert len(recorder) == 2 + + def test_large_event(self): + data = 'a' * int(0.1 * 1024 * 1024) + payload = 'event: put\ndata: {"path":"/","data":"' + data + '"}\n\n' + recorder = [] + sseclient = self.init_sse(payload, recorder) + event = next(sseclient) + event_payload = json.loads(event.data) + assert event_payload["data"] == data + assert event_payload["path"] == "/" + assert len(recorder) == 1 + + def test_multiple_events(self): + payload = 'event: put\ndata: {"path":"/foo","data":"testevent1"}\n\n' + payload += 'event: put\ndata: {"path":"/bar","data":"testevent2"}\n\n' + recorder = [] + sseclient = self.init_sse(payload, recorder) + event = next(sseclient) + event_payload = json.loads(event.data) + assert event_payload["data"] == "testevent1" + assert event_payload["path"] == "/foo" + event = next(sseclient) + event_payload = json.loads(event.data) + assert event_payload["data"] == "testevent2" + assert event_payload["path"] == "/bar" + assert len(recorder) == 1 + + def test_event_separators(self): + payload = 'event: put\ndata: {"path":"/foo","data":"testevent1"}\n\n' + payload += 'event: put\ndata: {"path":"/bar","data":"testevent2"}\r\r' + payload += 'event: put\ndata: {"path":"/baz","data":"testevent3"}\r\n\r\n' + recorder = [] + sseclient = self.init_sse(payload, recorder) + event = next(sseclient) + event_payload = json.loads(event.data) + assert event_payload["data"] == "testevent1" + assert event_payload["path"] == "/foo" + event = next(sseclient) + event_payload = json.loads(event.data) + assert event_payload["data"] == "testevent2" + assert event_payload["path"] == "/bar" + event = next(sseclient) + event_payload = json.loads(event.data) + assert event_payload["data"] == "testevent3" + assert event_payload["path"] == "/baz" + assert len(recorder) == 1 + + +class TestEvent: + """Test cases for server-side events""" + + def test_normal(self): + data = 'event: put\ndata: {"path":"/","data":"testdata"}' + event = _sseclient.Event.parse(data) + assert event.event_type == "put" + assert event.data == '{"path":"/","data":"testdata"}' + + def test_all_fields(self): + data = 'event: put\ndata: {"path":"/","data":"testdata"}\nretry: 5000\nid: abcd' + event = _sseclient.Event.parse(data) + assert event.event_type == "put" + assert event.data == '{"path":"/","data":"testdata"}' + assert event.retry == 5000 + assert event.event_id == 'abcd' + + def test_invalid(self): + data = 'event: invalid_event' + event = _sseclient.Event.parse(data) + assert event.event_type == "invalid_event" + assert event.data == '' diff --git a/tests/test_storage.py b/tests/test_storage.py new file mode 100644 index 000000000..c874ef640 --- /dev/null +++ b/tests/test_storage.py @@ -0,0 +1,52 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for firebase_admin.storage.""" + +import pytest + +import firebase_admin +from firebase_admin import credentials +from firebase_admin import storage +from tests import testutils + + +def setup_module(): + cred = credentials.Certificate(testutils.resource_filename('service_account.json')) + firebase_admin.initialize_app(cred) + +def teardown_module(): + testutils.cleanup_apps() + +def test_invalid_config(): + with pytest.raises(ValueError): + storage.bucket() + +@pytest.mark.parametrize('name', [None, '', 0, 1, True, False, [], tuple(), {}]) +def test_invalid_name(name): + with pytest.raises(ValueError): + storage.bucket(name) + +def test_valid_name(): + # Should not make RPC calls. + bucket = storage.bucket('foo') + assert bucket is not None + assert bucket.name == 'foo' + +def test_valid_name_with_explicit_app(): + # Should not make RPC calls. + app = firebase_admin.get_app() + bucket = storage.bucket('foo', app=app) + assert bucket is not None + assert bucket.name == 'foo' diff --git a/tests/test_tenant_mgt.py b/tests/test_tenant_mgt.py new file mode 100644 index 000000000..900faa376 --- /dev/null +++ b/tests/test_tenant_mgt.py @@ -0,0 +1,1052 @@ +# Copyright 2020 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test cases for the firebase_admin.tenant_mgt module.""" + +import json +import unittest.mock +from urllib import parse + +import pytest + +import firebase_admin +from firebase_admin import auth +from firebase_admin import credentials +from firebase_admin import exceptions +from firebase_admin import tenant_mgt +from firebase_admin import _auth_providers +from firebase_admin import _user_mgt +from firebase_admin import _utils +from tests import testutils +from tests import test_token_gen +from tests.test_token_gen import MOCK_CURRENT_TIME, MOCK_CURRENT_TIME_UTC + + +GET_TENANT_RESPONSE = """{ + "name": "projects/mock-project-id/tenants/tenant-id", + "displayName": "Test Tenant", + "allowPasswordSignup": true, + "enableEmailLinkSignin": true +}""" + +TENANT_NOT_FOUND_RESPONSE = """{ + "error": { + "message": "TENANT_NOT_FOUND" + } +}""" + +LIST_TENANTS_RESPONSE = """{ + "tenants": [ + { + "name": "projects/mock-project-id/tenants/tenant0", + "displayName": "Test Tenant", + "allowPasswordSignup": true, + "enableEmailLinkSignin": true + }, + { + "name": "projects/mock-project-id/tenants/tenant1", + "displayName": "Test Tenant", + "allowPasswordSignup": true, + "enableEmailLinkSignin": true + } + ] +}""" + +LIST_TENANTS_RESPONSE_WITH_TOKEN = """{ + "tenants": [ + { + "name": "projects/mock-project-id/tenants/tenant0" + }, + { + "name": "projects/mock-project-id/tenants/tenant1" + }, + { + "name": "projects/mock-project-id/tenants/tenant2" + } + ], + "nextPageToken": "token" +}""" + +MOCK_GET_USER_RESPONSE = testutils.resource('get_user.json') +MOCK_LIST_USERS_RESPONSE = testutils.resource('list_users.json') + +OIDC_PROVIDER_CONFIG_RESPONSE = testutils.resource('oidc_provider_config.json') +OIDC_PROVIDER_CONFIG_REQUEST = { + 'displayName': 'oidcProviderName', + 'enabled': True, + 'clientId': 'CLIENT_ID', + 'issuer': 'https://oidc.com/issuer', +} + +SAML_PROVIDER_CONFIG_RESPONSE = testutils.resource('saml_provider_config.json') +SAML_PROVIDER_CONFIG_REQUEST = body = { + 'displayName': 'samlProviderName', + 'enabled': True, + 'idpConfig': { + 'idpEntityId': 'IDP_ENTITY_ID', + 'ssoUrl': 'https://example.com/login', + 'idpCertificates': [{'x509Certificate': 'CERT1'}, {'x509Certificate': 'CERT2'}] + }, + 'spConfig': { + 'spEntityId': 'RP_ENTITY_ID', + 'callbackUri': 'https://projectId.firebaseapp.com/__/auth/handler', + } +} + +LIST_OIDC_PROVIDER_CONFIGS_RESPONSE = testutils.resource('list_oidc_provider_configs.json') +LIST_SAML_PROVIDER_CONFIGS_RESPONSE = testutils.resource('list_saml_provider_configs.json') + +INVALID_TENANT_IDS = [None, '', 0, 1, True, False, [], tuple(), {}] +INVALID_BOOLEANS = ['', 1, 0, [], tuple(), {}] + +USER_MGT_URL_PREFIX = 'https://identitytoolkit.googleapis.com/v1/projects/mock-project-id' +PROVIDER_MGT_URL_PREFIX = 'https://identitytoolkit.googleapis.com/v2/projects/mock-project-id' +TENANT_MGT_URL_PREFIX = 'https://identitytoolkit.googleapis.com/v2/projects/mock-project-id' + + +@pytest.fixture(scope='module') +def tenant_mgt_app(): + app = firebase_admin.initialize_app( + testutils.MockCredential(), name='tenantMgt', options={'projectId': 'mock-project-id'}) + yield app + firebase_admin.delete_app(app) + + +def _instrument_tenant_mgt(app, status, payload): + service = tenant_mgt._get_tenant_mgt_service(app) + recorder = [] + service.client.session.mount( + tenant_mgt._TenantManagementService.TENANT_MGT_URL, + testutils.MockAdapter(payload, status, recorder)) + return service, recorder + + +def _instrument_user_mgt(client, status, payload): + recorder = [] + user_manager = client._user_manager + user_manager.http_client.session.mount( + _user_mgt.UserManager.ID_TOOLKIT_URL, + testutils.MockAdapter(payload, status, recorder)) + return recorder + + +def _instrument_provider_mgt(client, status, payload): + recorder = [] + provider_manager = client._provider_manager + provider_manager.http_client.session.mount( + _auth_providers.ProviderConfigClient.PROVIDER_CONFIG_URL, + testutils.MockAdapter(payload, status, recorder)) + return recorder + + +class TestTenant: + + @pytest.mark.parametrize('data', [None, 'foo', 0, 1, True, False, [], tuple(), {}]) + def test_invalid_data(self, data): + with pytest.raises(ValueError): + tenant_mgt.Tenant(data) + + def test_tenant(self): + data = { + 'name': 'projects/test-project/tenants/tenant-id', + 'displayName': 'Test Tenant', + 'allowPasswordSignup': True, + 'enableEmailLinkSignin': True, + } + tenant = tenant_mgt.Tenant(data) + assert tenant.tenant_id == 'tenant-id' + assert tenant.display_name == 'Test Tenant' + assert tenant.allow_password_sign_up is True + assert tenant.enable_email_link_sign_in is True + + def test_tenant_optional_params(self): + data = { + 'name': 'projects/test-project/tenants/tenant-id', + } + tenant = tenant_mgt.Tenant(data) + assert tenant.tenant_id == 'tenant-id' + assert tenant.display_name is None + assert tenant.allow_password_sign_up is False + assert tenant.enable_email_link_sign_in is False + + +class TestGetTenant: + + @pytest.mark.parametrize('tenant_id', INVALID_TENANT_IDS) + def test_invalid_tenant_id(self, tenant_id, tenant_mgt_app): + with pytest.raises(ValueError) as excinfo: + tenant_mgt.get_tenant(tenant_id, app=tenant_mgt_app) + assert str(excinfo.value).startswith('Invalid tenant ID') + + def test_get_tenant(self, tenant_mgt_app): + _, recorder = _instrument_tenant_mgt(tenant_mgt_app, 200, GET_TENANT_RESPONSE) + tenant = tenant_mgt.get_tenant('tenant-id', app=tenant_mgt_app) + + _assert_tenant(tenant) + assert len(recorder) == 1 + req = recorder[0] + assert req.method == 'GET' + assert req.url == f'{TENANT_MGT_URL_PREFIX}/tenants/tenant-id' + assert req.headers['X-Client-Version'] == f'Python/Admin/{firebase_admin.__version__}' + expected_metrics_header = _utils.get_metrics_header() + ' mock-cred-metric-tag' + assert req.headers['x-goog-api-client'] == expected_metrics_header + + def test_tenant_not_found(self, tenant_mgt_app): + _instrument_tenant_mgt(tenant_mgt_app, 500, TENANT_NOT_FOUND_RESPONSE) + with pytest.raises(tenant_mgt.TenantNotFoundError) as excinfo: + tenant_mgt.get_tenant('tenant-id', app=tenant_mgt_app) + + error_msg = 'No tenant found for the given identifier (TENANT_NOT_FOUND).' + assert excinfo.value.code == exceptions.NOT_FOUND + assert str(excinfo.value) == error_msg + assert excinfo.value.http_response is not None + assert excinfo.value.cause is not None + + +class TestCreateTenant: + + @pytest.mark.parametrize('display_name', [True, False, 1, 0, [], tuple(), {}]) + def test_invalid_display_name_type(self, display_name, tenant_mgt_app): + with pytest.raises(ValueError) as excinfo: + tenant_mgt.create_tenant(display_name=display_name, app=tenant_mgt_app) + assert str(excinfo.value).startswith('Invalid type for displayName') + + @pytest.mark.parametrize('display_name', ['', 'foo', '1test', 'foo bar', 'a'*21]) + def test_invalid_display_name_value(self, display_name, tenant_mgt_app): + with pytest.raises(ValueError) as excinfo: + tenant_mgt.create_tenant(display_name=display_name, app=tenant_mgt_app) + assert str(excinfo.value).startswith('displayName must start') + + @pytest.mark.parametrize('allow', INVALID_BOOLEANS) + def test_invalid_allow_password_sign_up(self, allow, tenant_mgt_app): + with pytest.raises(ValueError) as excinfo: + tenant_mgt.create_tenant( + display_name='test', allow_password_sign_up=allow, app=tenant_mgt_app) + assert str(excinfo.value).startswith('Invalid type for allowPasswordSignup') + + @pytest.mark.parametrize('enable', INVALID_BOOLEANS) + def test_invalid_enable_email_link_sign_in(self, enable, tenant_mgt_app): + with pytest.raises(ValueError) as excinfo: + tenant_mgt.create_tenant( + display_name='test', enable_email_link_sign_in=enable, app=tenant_mgt_app) + assert str(excinfo.value).startswith('Invalid type for enableEmailLinkSignin') + + def test_create_tenant(self, tenant_mgt_app): + _, recorder = _instrument_tenant_mgt(tenant_mgt_app, 200, GET_TENANT_RESPONSE) + tenant = tenant_mgt.create_tenant( + display_name='My-Tenant', allow_password_sign_up=True, enable_email_link_sign_in=True, + app=tenant_mgt_app) + + _assert_tenant(tenant) + self._assert_request(recorder, { + 'displayName': 'My-Tenant', + 'allowPasswordSignup': True, + 'enableEmailLinkSignin': True, + }) + + def test_create_tenant_false_values(self, tenant_mgt_app): + _, recorder = _instrument_tenant_mgt(tenant_mgt_app, 200, GET_TENANT_RESPONSE) + tenant = tenant_mgt.create_tenant( + display_name='test', allow_password_sign_up=False, enable_email_link_sign_in=False, + app=tenant_mgt_app) + + _assert_tenant(tenant) + self._assert_request(recorder, { + 'displayName': 'test', + 'allowPasswordSignup': False, + 'enableEmailLinkSignin': False, + }) + + def test_create_tenant_minimal(self, tenant_mgt_app): + _, recorder = _instrument_tenant_mgt(tenant_mgt_app, 200, GET_TENANT_RESPONSE) + tenant = tenant_mgt.create_tenant(display_name='test', app=tenant_mgt_app) + + _assert_tenant(tenant) + self._assert_request(recorder, {'displayName': 'test'}) + + def test_error(self, tenant_mgt_app): + _instrument_tenant_mgt(tenant_mgt_app, 500, '{}') + with pytest.raises(exceptions.InternalError) as excinfo: + tenant_mgt.create_tenant(display_name='test', app=tenant_mgt_app) + + error_msg = 'Unexpected error response: {}' + assert excinfo.value.code == exceptions.INTERNAL + assert str(excinfo.value) == error_msg + assert excinfo.value.http_response is not None + assert excinfo.value.cause is not None + + def _assert_request(self, recorder, body): + assert len(recorder) == 1 + req = recorder[0] + assert req.method == 'POST' + assert req.url == f'{TENANT_MGT_URL_PREFIX}/tenants' + assert req.headers['X-Client-Version'] == f'Python/Admin/{firebase_admin.__version__}' + expected_metrics_header = _utils.get_metrics_header() + ' mock-cred-metric-tag' + assert req.headers['x-goog-api-client'] == expected_metrics_header + got = json.loads(req.body.decode()) + assert got == body + + +class TestUpdateTenant: + + @pytest.mark.parametrize('tenant_id', INVALID_TENANT_IDS) + def test_invalid_tenant_id(self, tenant_id, tenant_mgt_app): + with pytest.raises(ValueError) as excinfo: + tenant_mgt.update_tenant(tenant_id, display_name='My Tenant', app=tenant_mgt_app) + assert str(excinfo.value).startswith('Tenant ID must be a non-empty string') + + @pytest.mark.parametrize('display_name', [True, False, 1, 0, [], tuple(), {}]) + def test_invalid_display_name_type(self, display_name, tenant_mgt_app): + with pytest.raises(ValueError) as excinfo: + tenant_mgt.update_tenant('tenant-id', display_name=display_name, app=tenant_mgt_app) + assert str(excinfo.value).startswith('Invalid type for displayName') + + @pytest.mark.parametrize('display_name', ['', 'foo', '1test', 'foo bar', 'a'*21]) + def test_invalid_display_name_value(self, display_name, tenant_mgt_app): + with pytest.raises(ValueError) as excinfo: + tenant_mgt.update_tenant('tenant-id', display_name=display_name, app=tenant_mgt_app) + assert str(excinfo.value).startswith('displayName must start') + + @pytest.mark.parametrize('allow', INVALID_BOOLEANS) + def test_invalid_allow_password_sign_up(self, allow, tenant_mgt_app): + with pytest.raises(ValueError) as excinfo: + tenant_mgt.update_tenant('tenant-id', allow_password_sign_up=allow, app=tenant_mgt_app) + assert str(excinfo.value).startswith('Invalid type for allowPasswordSignup') + + @pytest.mark.parametrize('enable', INVALID_BOOLEANS) + def test_invalid_enable_email_link_sign_in(self, enable, tenant_mgt_app): + with pytest.raises(ValueError) as excinfo: + tenant_mgt.update_tenant( + 'tenant-id', enable_email_link_sign_in=enable, app=tenant_mgt_app) + assert str(excinfo.value).startswith('Invalid type for enableEmailLinkSignin') + + def test_update_tenant_no_args(self, tenant_mgt_app): + with pytest.raises(ValueError) as excinfo: + tenant_mgt.update_tenant('tenant-id', app=tenant_mgt_app) + assert str(excinfo.value).startswith('At least one parameter must be specified for update') + + def test_update_tenant(self, tenant_mgt_app): + _, recorder = _instrument_tenant_mgt(tenant_mgt_app, 200, GET_TENANT_RESPONSE) + tenant = tenant_mgt.update_tenant( + 'tenant-id', display_name='My-Tenant', allow_password_sign_up=True, + enable_email_link_sign_in=True, app=tenant_mgt_app) + + _assert_tenant(tenant) + body = { + 'displayName': 'My-Tenant', + 'allowPasswordSignup': True, + 'enableEmailLinkSignin': True, + } + mask = ['allowPasswordSignup', 'displayName', 'enableEmailLinkSignin'] + self._assert_request(recorder, body, mask) + + def test_update_tenant_false_values(self, tenant_mgt_app): + _, recorder = _instrument_tenant_mgt(tenant_mgt_app, 200, GET_TENANT_RESPONSE) + tenant = tenant_mgt.update_tenant( + 'tenant-id', allow_password_sign_up=False, + enable_email_link_sign_in=False, app=tenant_mgt_app) + + _assert_tenant(tenant) + body = { + 'allowPasswordSignup': False, + 'enableEmailLinkSignin': False, + } + mask = ['allowPasswordSignup', 'enableEmailLinkSignin'] + self._assert_request(recorder, body, mask) + + def test_update_tenant_minimal(self, tenant_mgt_app): + _, recorder = _instrument_tenant_mgt(tenant_mgt_app, 200, GET_TENANT_RESPONSE) + tenant = tenant_mgt.update_tenant( + 'tenant-id', display_name='My-Tenant', app=tenant_mgt_app) + + _assert_tenant(tenant) + body = {'displayName': 'My-Tenant'} + mask = ['displayName'] + self._assert_request(recorder, body, mask) + + def test_tenant_not_found_error(self, tenant_mgt_app): + _instrument_tenant_mgt(tenant_mgt_app, 500, TENANT_NOT_FOUND_RESPONSE) + with pytest.raises(tenant_mgt.TenantNotFoundError) as excinfo: + tenant_mgt.update_tenant('tenant', display_name='My-Tenant', app=tenant_mgt_app) + + error_msg = 'No tenant found for the given identifier (TENANT_NOT_FOUND).' + assert excinfo.value.code == exceptions.NOT_FOUND + assert str(excinfo.value) == error_msg + assert excinfo.value.http_response is not None + assert excinfo.value.cause is not None + + def _assert_request(self, recorder, body, mask): + assert len(recorder) == 1 + req = recorder[0] + assert req.method == 'PATCH' + assert req.url == f'{TENANT_MGT_URL_PREFIX}/tenants/tenant-id?updateMask={",".join(mask)}' + assert req.headers['X-Client-Version'] == f'Python/Admin/{firebase_admin.__version__}' + expected_metrics_header = _utils.get_metrics_header() + ' mock-cred-metric-tag' + assert req.headers['x-goog-api-client'] == expected_metrics_header + got = json.loads(req.body.decode()) + assert got == body + + +class TestDeleteTenant: + + @pytest.mark.parametrize('tenant_id', INVALID_TENANT_IDS) + def test_invalid_tenant_id(self, tenant_id, tenant_mgt_app): + with pytest.raises(ValueError) as excinfo: + tenant_mgt.delete_tenant(tenant_id, app=tenant_mgt_app) + assert str(excinfo.value).startswith('Invalid tenant ID') + + def test_delete_tenant(self, tenant_mgt_app): + _, recorder = _instrument_tenant_mgt(tenant_mgt_app, 200, '{}') + tenant_mgt.delete_tenant('tenant-id', app=tenant_mgt_app) + + assert len(recorder) == 1 + req = recorder[0] + assert req.method == 'DELETE' + assert req.url == f'{TENANT_MGT_URL_PREFIX}/tenants/tenant-id' + assert req.headers['X-Client-Version'] == f'Python/Admin/{firebase_admin.__version__}' + expected_metrics_header = _utils.get_metrics_header() + ' mock-cred-metric-tag' + assert req.headers['x-goog-api-client'] == expected_metrics_header + + def test_tenant_not_found(self, tenant_mgt_app): + _instrument_tenant_mgt(tenant_mgt_app, 500, TENANT_NOT_FOUND_RESPONSE) + with pytest.raises(tenant_mgt.TenantNotFoundError) as excinfo: + tenant_mgt.delete_tenant('tenant-id', app=tenant_mgt_app) + + error_msg = 'No tenant found for the given identifier (TENANT_NOT_FOUND).' + assert excinfo.value.code == exceptions.NOT_FOUND + assert str(excinfo.value) == error_msg + assert excinfo.value.http_response is not None + assert excinfo.value.cause is not None + + +class TestListTenants: + + @pytest.mark.parametrize('arg', [None, 'foo', [], {}, 0, -1, 101, False]) + def test_invalid_max_results(self, tenant_mgt_app, arg): + with pytest.raises(ValueError): + tenant_mgt.list_tenants(max_results=arg, app=tenant_mgt_app) + + @pytest.mark.parametrize('arg', ['', [], {}, 0, -1, True, False]) + def test_invalid_page_token(self, tenant_mgt_app, arg): + with pytest.raises(ValueError): + tenant_mgt.list_tenants(page_token=arg, app=tenant_mgt_app) + + def test_list_single_page(self, tenant_mgt_app): + _, recorder = _instrument_tenant_mgt(tenant_mgt_app, 200, LIST_TENANTS_RESPONSE) + page = tenant_mgt.list_tenants(app=tenant_mgt_app) + self._assert_tenants_page(page) + assert page.next_page_token == '' + assert page.has_next_page is False + assert page.get_next_page() is None + tenants = list(page.iterate_all()) + assert len(tenants) == 2 + self._assert_request(recorder) + + def test_list_multiple_pages(self, tenant_mgt_app): + # Page 1 + _, recorder = _instrument_tenant_mgt(tenant_mgt_app, 200, LIST_TENANTS_RESPONSE_WITH_TOKEN) + page = tenant_mgt.list_tenants(app=tenant_mgt_app) + assert len(page.tenants) == 3 + assert page.next_page_token == 'token' + assert page.has_next_page is True + self._assert_request(recorder) + + # Page 2 (also the last page) + response = {'tenants': [{'name': 'projects/mock-project-id/tenants/tenant3'}]} + _, recorder = _instrument_tenant_mgt(tenant_mgt_app, 200, json.dumps(response)) + page = page.get_next_page() + assert len(page.tenants) == 1 + assert page.next_page_token == '' + assert page.has_next_page is False + assert page.get_next_page() is None + self._assert_request(recorder, {'pageSize': '100', 'pageToken': 'token'}) + + def test_list_tenants_paged_iteration(self, tenant_mgt_app): + # Page 1 + _, recorder = _instrument_tenant_mgt(tenant_mgt_app, 200, LIST_TENANTS_RESPONSE_WITH_TOKEN) + page = tenant_mgt.list_tenants(app=tenant_mgt_app) + iterator = page.iterate_all() + for index in range(3): + tenant = next(iterator) + assert tenant.tenant_id == f'tenant{index}' + self._assert_request(recorder) + + # Page 2 (also the last page) + response = {'tenants': [{'name': 'projects/mock-project-id/tenants/tenant3'}]} + _, recorder = _instrument_tenant_mgt(tenant_mgt_app, 200, json.dumps(response)) + tenant = next(iterator) + assert tenant.tenant_id == 'tenant3' + + with pytest.raises(StopIteration): + next(iterator) + self._assert_request(recorder, {'pageSize': '100', 'pageToken': 'token'}) + + def test_list_tenants_iterator_state(self, tenant_mgt_app): + _, recorder = _instrument_tenant_mgt(tenant_mgt_app, 200, LIST_TENANTS_RESPONSE) + page = tenant_mgt.list_tenants(app=tenant_mgt_app) + + # Advance iterator. + iterator = page.iterate_all() + tenant = next(iterator) + assert tenant.tenant_id == 'tenant0' + + # Iterator should resume from where left off. + tenant = next(iterator) + assert tenant.tenant_id == 'tenant1' + + with pytest.raises(StopIteration): + next(iterator) + self._assert_request(recorder) + + def test_list_tenants_stop_iteration(self, tenant_mgt_app): + _, recorder = _instrument_tenant_mgt(tenant_mgt_app, 200, LIST_TENANTS_RESPONSE) + page = tenant_mgt.list_tenants(app=tenant_mgt_app) + iterator = page.iterate_all() + tenants = list(iterator) + assert len(tenants) == 2 + + with pytest.raises(StopIteration): + next(iterator) + self._assert_request(recorder) + + def test_list_tenants_no_tenants_response(self, tenant_mgt_app): + response = {'tenants': []} + _instrument_tenant_mgt(tenant_mgt_app, 200, json.dumps(response)) + page = tenant_mgt.list_tenants(app=tenant_mgt_app) + assert len(page.tenants) == 0 + tenants = list(page.iterate_all()) + assert len(tenants) == 0 + + def test_list_tenants_with_max_results(self, tenant_mgt_app): + _, recorder = _instrument_tenant_mgt(tenant_mgt_app, 200, LIST_TENANTS_RESPONSE) + page = tenant_mgt.list_tenants(max_results=50, app=tenant_mgt_app) + self._assert_tenants_page(page) + self._assert_request(recorder, {'pageSize' : '50'}) + + def test_list_tenants_with_all_args(self, tenant_mgt_app): + _, recorder = _instrument_tenant_mgt(tenant_mgt_app, 200, LIST_TENANTS_RESPONSE) + page = tenant_mgt.list_tenants(page_token='foo', max_results=50, app=tenant_mgt_app) + self._assert_tenants_page(page) + self._assert_request(recorder, {'pageToken' : 'foo', 'pageSize' : '50'}) + + def test_list_tenants_error(self, tenant_mgt_app): + _instrument_tenant_mgt(tenant_mgt_app, 500, '{"error":"test"}') + with pytest.raises(exceptions.InternalError) as excinfo: + tenant_mgt.list_tenants(app=tenant_mgt_app) + assert str(excinfo.value) == 'Unexpected error response: {"error":"test"}' + + def _assert_tenants_page(self, page): + assert isinstance(page, tenant_mgt.ListTenantsPage) + assert len(page.tenants) == 2 + for idx, tenant in enumerate(page.tenants): + _assert_tenant(tenant, f'tenant{idx}') + + def _assert_request(self, recorder, expected=None): + if expected is None: + expected = {'pageSize' : '100'} + + assert len(recorder) == 1 + req = recorder[0] + assert req.method == 'GET' + assert req.headers['X-Client-Version'] == f'Python/Admin/{firebase_admin.__version__}' + expected_metrics_header = _utils.get_metrics_header() + ' mock-cred-metric-tag' + assert req.headers['x-goog-api-client'] == expected_metrics_header + request = dict(parse.parse_qsl(parse.urlsplit(req.url).query)) + assert request == expected + + +class TestAuthForTenant: + + @pytest.mark.parametrize('tenant_id', INVALID_TENANT_IDS) + def test_invalid_tenant_id(self, tenant_id, tenant_mgt_app): + with pytest.raises(ValueError): + tenant_mgt.auth_for_tenant(tenant_id, app=tenant_mgt_app) + + def test_client(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('tenant1', app=tenant_mgt_app) + assert client.tenant_id == 'tenant1' + + def test_client_reuse(self, tenant_mgt_app): + client1 = tenant_mgt.auth_for_tenant('tenant1', app=tenant_mgt_app) + client2 = tenant_mgt.auth_for_tenant('tenant1', app=tenant_mgt_app) + client3 = tenant_mgt.auth_for_tenant('tenant2', app=tenant_mgt_app) + assert client1 is client2 + assert client1 is not client3 + + +class TestTenantAwareUserManagement: + + def test_get_user(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('tenant-id', app=tenant_mgt_app) + recorder = _instrument_user_mgt(client, 200, MOCK_GET_USER_RESPONSE) + + user = client.get_user('testuser') + + assert isinstance(user, auth.UserRecord) + assert user.uid == 'testuser' + assert user.email == 'testuser@example.com' + self._assert_request(recorder, '/accounts:lookup', {'localId': ['testuser']}) + + def test_get_user_by_email(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('tenant-id', app=tenant_mgt_app) + recorder = _instrument_user_mgt(client, 200, MOCK_GET_USER_RESPONSE) + + user = client.get_user_by_email('testuser@example.com') + + assert isinstance(user, auth.UserRecord) + assert user.uid == 'testuser' + assert user.email == 'testuser@example.com' + self._assert_request(recorder, '/accounts:lookup', {'email': ['testuser@example.com']}) + + def test_get_user_by_phone_number(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('tenant-id', app=tenant_mgt_app) + recorder = _instrument_user_mgt(client, 200, MOCK_GET_USER_RESPONSE) + + user = client.get_user_by_phone_number('+1234567890') + + assert isinstance(user, auth.UserRecord) + assert user.uid == 'testuser' + assert user.email == 'testuser@example.com' + self._assert_request(recorder, '/accounts:lookup', {'phoneNumber': ['+1234567890']}) + + def test_create_user(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('tenant-id', app=tenant_mgt_app) + recorder = _instrument_user_mgt(client, 200, '{"localId":"testuser"}') + + uid = client._user_manager.create_user() + + assert uid == 'testuser' + self._assert_request(recorder, '/accounts', {}) + + def test_update_user(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('tenant-id', app=tenant_mgt_app) + recorder = _instrument_user_mgt(client, 200, '{"localId":"testuser"}') + + uid = client._user_manager.update_user('testuser', email='testuser@example.com') + + assert uid == 'testuser' + self._assert_request(recorder, '/accounts:update', { + 'localId': 'testuser', + 'email': 'testuser@example.com', + }) + + def test_delete_user(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('tenant-id', app=tenant_mgt_app) + recorder = _instrument_user_mgt(client, 200, '{"kind":"deleteresponse"}') + + client.delete_user('testuser') + + self._assert_request(recorder, '/accounts:delete', {'localId': 'testuser'}) + + def test_set_custom_user_claims(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('tenant-id', app=tenant_mgt_app) + recorder = _instrument_user_mgt(client, 200, '{"localId":"testuser"}') + claims = {'admin': True} + + client.set_custom_user_claims('testuser', claims) + + self._assert_request(recorder, '/accounts:update', { + 'localId': 'testuser', + 'customAttributes': json.dumps(claims), + }) + + def test_revoke_refresh_tokens(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('tenant-id', app=tenant_mgt_app) + recorder = _instrument_user_mgt(client, 200, '{"localId":"testuser"}') + + client.revoke_refresh_tokens('testuser') + + assert len(recorder) == 1 + req = recorder[0] + assert req.method == 'POST' + assert req.url == f'{USER_MGT_URL_PREFIX}/tenants/tenant-id/accounts:update' + body = json.loads(req.body.decode()) + assert body['localId'] == 'testuser' + assert 'validSince' in body + + def test_list_users(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('tenant-id', app=tenant_mgt_app) + recorder = _instrument_user_mgt(client, 200, MOCK_LIST_USERS_RESPONSE) + + page = client.list_users() + + assert isinstance(page, auth.ListUsersPage) + assert page.next_page_token == '' + assert page.has_next_page is False + assert page.get_next_page() is None + users = list(user for user in page.iterate_all()) + assert len(users) == 2 + + assert len(recorder) == 1 + req = recorder[0] + assert req.method == 'GET' + assert req.url == ( + f'{USER_MGT_URL_PREFIX}/tenants/tenant-id/accounts:batchGet?maxResults=1000' + ) + + def test_import_users(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('tenant-id', app=tenant_mgt_app) + recorder = _instrument_user_mgt(client, 200, '{}') + users = [ + auth.ImportUserRecord(uid='user1'), + auth.ImportUserRecord(uid='user2'), + ] + + result = client.import_users(users) + + assert isinstance(result, auth.UserImportResult) + assert result.success_count == 2 + assert result.failure_count == 0 + assert result.errors == [] + self._assert_request(recorder, '/accounts:batchCreate', { + 'users': [{'localId': 'user1'}, {'localId': 'user2'}], + }) + + def test_generate_password_reset_link(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('tenant-id', app=tenant_mgt_app) + recorder = _instrument_user_mgt(client, 200, '{"oobLink":"https://testlink"}') + + link = client.generate_password_reset_link('test@test.com') + + assert link == 'https://testlink' + self._assert_request(recorder, '/accounts:sendOobCode', { + 'email': 'test@test.com', + 'requestType': 'PASSWORD_RESET', + 'returnOobLink': True, + }) + + def test_generate_email_verification_link(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('tenant-id', app=tenant_mgt_app) + recorder = _instrument_user_mgt(client, 200, '{"oobLink":"https://testlink"}') + + link = client.generate_email_verification_link('test@test.com') + + assert link == 'https://testlink' + self._assert_request(recorder, '/accounts:sendOobCode', { + 'email': 'test@test.com', + 'requestType': 'VERIFY_EMAIL', + 'returnOobLink': True, + }) + + def test_generate_sign_in_with_email_link(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('tenant-id', app=tenant_mgt_app) + recorder = _instrument_user_mgt(client, 200, '{"oobLink":"https://testlink"}') + settings = auth.ActionCodeSettings(url='http://localhost') + + link = client.generate_sign_in_with_email_link('test@test.com', settings) + + assert link == 'https://testlink' + self._assert_request(recorder, '/accounts:sendOobCode', { + 'email': 'test@test.com', + 'requestType': 'EMAIL_SIGNIN', + 'returnOobLink': True, + 'continueUrl': 'http://localhost', + }) + + def test_get_oidc_provider_config(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('tenant-id', app=tenant_mgt_app) + recorder = _instrument_provider_mgt(client, 200, OIDC_PROVIDER_CONFIG_RESPONSE) + + provider_config = client.get_oidc_provider_config('oidc.provider') + + self._assert_oidc_provider_config(provider_config) + assert len(recorder) == 1 + req = recorder[0] + assert req.method == 'GET' + assert req.url == ( + f'{PROVIDER_MGT_URL_PREFIX}/tenants/tenant-id/oauthIdpConfigs/oidc.provider' + ) + + def test_create_oidc_provider_config(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('tenant-id', app=tenant_mgt_app) + recorder = _instrument_provider_mgt(client, 200, OIDC_PROVIDER_CONFIG_RESPONSE) + + provider_config = client.create_oidc_provider_config( + 'oidc.provider', client_id='CLIENT_ID', issuer='https://oidc.com/issuer', + display_name='oidcProviderName', enabled=True) + + self._assert_oidc_provider_config(provider_config) + self._assert_request( + recorder, '/oauthIdpConfigs?oauthIdpConfigId=oidc.provider', + OIDC_PROVIDER_CONFIG_REQUEST, prefix=PROVIDER_MGT_URL_PREFIX) + + def test_update_oidc_provider_config(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('tenant-id', app=tenant_mgt_app) + recorder = _instrument_provider_mgt(client, 200, OIDC_PROVIDER_CONFIG_RESPONSE) + + provider_config = client.update_oidc_provider_config( + 'oidc.provider', client_id='CLIENT_ID', issuer='https://oidc.com/issuer', + display_name='oidcProviderName', enabled=True) + + self._assert_oidc_provider_config(provider_config) + mask = ['clientId', 'displayName', 'enabled', 'issuer'] + url = f'/oauthIdpConfigs/oidc.provider?updateMask={",".join(mask)}' + self._assert_request( + recorder, url, OIDC_PROVIDER_CONFIG_REQUEST, method='PATCH', + prefix=PROVIDER_MGT_URL_PREFIX) + + def test_delete_oidc_provider_config(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('tenant-id', app=tenant_mgt_app) + recorder = _instrument_provider_mgt(client, 200, '{}') + + client.delete_oidc_provider_config('oidc.provider') + + assert len(recorder) == 1 + req = recorder[0] + assert req.method == 'DELETE' + assert req.url == ( + f'{PROVIDER_MGT_URL_PREFIX}/tenants/tenant-id/oauthIdpConfigs/oidc.provider' + ) + + def test_list_oidc_provider_configs(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('tenant-id', app=tenant_mgt_app) + recorder = _instrument_provider_mgt(client, 200, LIST_OIDC_PROVIDER_CONFIGS_RESPONSE) + + page = client.list_oidc_provider_configs() + + assert isinstance(page, auth.ListProviderConfigsPage) + index = 0 + assert len(page.provider_configs) == 2 + for provider_config in page.provider_configs: + self._assert_oidc_provider_config( + provider_config, want_id=f'oidc.provider{index}') + index += 1 + + assert page.next_page_token == '' + assert page.has_next_page is False + assert page.get_next_page() is None + provider_configs = list(config for config in page.iterate_all()) + assert len(provider_configs) == 2 + + assert len(recorder) == 1 + req = recorder[0] + assert req.method == 'GET' + assert req.url == ( + f'{PROVIDER_MGT_URL_PREFIX}/tenants/tenant-id/oauthIdpConfigs?pageSize=100' + ) + + def test_get_saml_provider_config(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('tenant-id', app=tenant_mgt_app) + recorder = _instrument_provider_mgt(client, 200, SAML_PROVIDER_CONFIG_RESPONSE) + + provider_config = client.get_saml_provider_config('saml.provider') + + self._assert_saml_provider_config(provider_config) + assert len(recorder) == 1 + req = recorder[0] + assert req.method == 'GET' + assert req.url == ( + f'{PROVIDER_MGT_URL_PREFIX}/tenants/tenant-id/inboundSamlConfigs/saml.provider' + ) + + def test_create_saml_provider_config(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('tenant-id', app=tenant_mgt_app) + recorder = _instrument_provider_mgt(client, 200, SAML_PROVIDER_CONFIG_RESPONSE) + + provider_config = client.create_saml_provider_config( + 'saml.provider', idp_entity_id='IDP_ENTITY_ID', sso_url='https://example.com/login', + x509_certificates=['CERT1', 'CERT2'], rp_entity_id='RP_ENTITY_ID', + callback_url='https://projectId.firebaseapp.com/__/auth/handler', + display_name='samlProviderName', enabled=True) + + self._assert_saml_provider_config(provider_config) + self._assert_request( + recorder, '/inboundSamlConfigs?inboundSamlConfigId=saml.provider', + SAML_PROVIDER_CONFIG_REQUEST, prefix=PROVIDER_MGT_URL_PREFIX) + + def test_update_saml_provider_config(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('tenant-id', app=tenant_mgt_app) + recorder = _instrument_provider_mgt(client, 200, SAML_PROVIDER_CONFIG_RESPONSE) + + provider_config = client.update_saml_provider_config( + 'saml.provider', idp_entity_id='IDP_ENTITY_ID', sso_url='https://example.com/login', + x509_certificates=['CERT1', 'CERT2'], rp_entity_id='RP_ENTITY_ID', + callback_url='https://projectId.firebaseapp.com/__/auth/handler', + display_name='samlProviderName', enabled=True) + + self._assert_saml_provider_config(provider_config) + mask = [ + 'displayName', 'enabled', 'idpConfig.idpCertificates', 'idpConfig.idpEntityId', + 'idpConfig.ssoUrl', 'spConfig.callbackUri', 'spConfig.spEntityId', + ] + url = f'/inboundSamlConfigs/saml.provider?updateMask={",".join(mask)}' + self._assert_request( + recorder, url, SAML_PROVIDER_CONFIG_REQUEST, method='PATCH', + prefix=PROVIDER_MGT_URL_PREFIX) + + def test_delete_saml_provider_config(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('tenant-id', app=tenant_mgt_app) + recorder = _instrument_provider_mgt(client, 200, '{}') + + client.delete_saml_provider_config('saml.provider') + + assert len(recorder) == 1 + req = recorder[0] + assert req.method == 'DELETE' + assert req.url == ( + f'{PROVIDER_MGT_URL_PREFIX}/tenants/tenant-id/inboundSamlConfigs/saml.provider' + ) + + def test_list_saml_provider_configs(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('tenant-id', app=tenant_mgt_app) + recorder = _instrument_provider_mgt(client, 200, LIST_SAML_PROVIDER_CONFIGS_RESPONSE) + + page = client.list_saml_provider_configs() + + assert isinstance(page, auth.ListProviderConfigsPage) + index = 0 + assert len(page.provider_configs) == 2 + for provider_config in page.provider_configs: + self._assert_saml_provider_config( + provider_config, want_id=f'saml.provider{index}') + index += 1 + + assert page.next_page_token == '' + assert page.has_next_page is False + assert page.get_next_page() is None + provider_configs = list(config for config in page.iterate_all()) + assert len(provider_configs) == 2 + + assert len(recorder) == 1 + req = recorder[0] + assert req.method == 'GET' + assert req.url == ( + f'{PROVIDER_MGT_URL_PREFIX}/tenants/tenant-id/inboundSamlConfigs?pageSize=100' + ) + + def test_tenant_not_found(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('tenant-id', app=tenant_mgt_app) + _instrument_user_mgt(client, 500, TENANT_NOT_FOUND_RESPONSE) + with pytest.raises(tenant_mgt.TenantNotFoundError) as excinfo: + client.get_user('testuser') + + error_msg = 'No tenant found for the given identifier (TENANT_NOT_FOUND).' + assert excinfo.value.code == exceptions.NOT_FOUND + assert str(excinfo.value) == error_msg + assert excinfo.value.http_response is not None + assert excinfo.value.cause is not None + + def _assert_request( + self, recorder, want_url, want_body, method='POST', prefix=USER_MGT_URL_PREFIX): + assert len(recorder) == 1 + req = recorder[0] + assert req.method == method + assert req.url == f'{prefix}/tenants/tenant-id{want_url}' + assert req.headers['X-Client-Version'] == f'Python/Admin/{firebase_admin.__version__}' + expected_metrics_header = _utils.get_metrics_header() + ' mock-cred-metric-tag' + assert req.headers['x-goog-api-client'] == expected_metrics_header + body = json.loads(req.body.decode()) + assert body == want_body + + def _assert_oidc_provider_config(self, provider_config, want_id='oidc.provider'): + assert isinstance(provider_config, auth.OIDCProviderConfig) + assert provider_config.provider_id == want_id + assert provider_config.display_name == 'oidcProviderName' + assert provider_config.enabled is True + assert provider_config.client_id == 'CLIENT_ID' + assert provider_config.issuer == 'https://oidc.com/issuer' + + def _assert_saml_provider_config(self, provider_config, want_id='saml.provider'): + assert isinstance(provider_config, auth.SAMLProviderConfig) + assert provider_config.provider_id == want_id + assert provider_config.display_name == 'samlProviderName' + assert provider_config.enabled is True + assert provider_config.idp_entity_id == 'IDP_ENTITY_ID' + assert provider_config.sso_url == 'https://example.com/login' + assert provider_config.x509_certificates == ['CERT1', 'CERT2'] + assert provider_config.rp_entity_id == 'RP_ENTITY_ID' + assert provider_config.callback_url == 'https://projectId.firebaseapp.com/__/auth/handler' + + +class TestVerifyIdToken: + + def setup_method(self): + self.time_patch = unittest.mock.patch('time.time', return_value=MOCK_CURRENT_TIME) + self.mock_time = self.time_patch.start() + self.utcnow_patch = unittest.mock.patch( + 'google.auth.jwt._helpers.utcnow', return_value=MOCK_CURRENT_TIME_UTC) + self.mock_utcnow = self.utcnow_patch.start() + + def teardown_method(self): + self.time_patch.stop() + self.utcnow_patch.stop() + + def test_valid_token(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('test-tenant', app=tenant_mgt_app) + client._token_verifier.request = test_token_gen.MOCK_REQUEST + + claims = client.verify_id_token(test_token_gen.TEST_ID_TOKEN_WITH_TENANT) + + assert claims['admin'] is True + assert claims['uid'] == claims['sub'] + assert claims['firebase']['tenant'] == 'test-tenant' + + def test_invalid_tenant_id(self, tenant_mgt_app): + client = tenant_mgt.auth_for_tenant('other-tenant', app=tenant_mgt_app) + client._token_verifier.request = test_token_gen.MOCK_REQUEST + + with pytest.raises(tenant_mgt.TenantIdMismatchError) as excinfo: + client.verify_id_token(test_token_gen.TEST_ID_TOKEN_WITH_TENANT) + + assert 'Invalid tenant ID: test-tenant' in str(excinfo.value) + assert isinstance(excinfo.value, exceptions.InvalidArgumentError) + assert excinfo.value.cause is None + assert excinfo.value.http_response is None + + +@pytest.fixture(scope='module') +def tenant_aware_custom_token_app(): + cred = credentials.Certificate(testutils.resource_filename('service_account.json')) + app = firebase_admin.initialize_app(cred, name='tenantAwareCustomToken') + yield app + firebase_admin.delete_app(app) + + +class TestCreateCustomToken: + + def setup_method(self): + self.time_patch = unittest.mock.patch('time.time', return_value=MOCK_CURRENT_TIME) + self.mock_time = self.time_patch.start() + self.utcnow_patch = unittest.mock.patch( + 'google.auth.jwt._helpers.utcnow', return_value=MOCK_CURRENT_TIME_UTC) + self.mock_utcnow = self.utcnow_patch.start() + + def teardown_method(self): + self.time_patch.stop() + self.utcnow_patch.stop() + + def test_custom_token(self, tenant_aware_custom_token_app): + client = tenant_mgt.auth_for_tenant('test-tenant', app=tenant_aware_custom_token_app) + + custom_token = client.create_custom_token('user1') + + test_token_gen.verify_custom_token( + custom_token, expected_claims=None, tenant_id='test-tenant') + + def test_custom_token_with_claims(self, tenant_aware_custom_token_app): + client = tenant_mgt.auth_for_tenant('test-tenant', app=tenant_aware_custom_token_app) + claims = {'admin': True} + + custom_token = client.create_custom_token('user1', claims) + + test_token_gen.verify_custom_token( + custom_token, expected_claims=claims, tenant_id='test-tenant') + + +def _assert_tenant(tenant, tenant_id='tenant-id'): + assert isinstance(tenant, tenant_mgt.Tenant) + assert tenant.tenant_id == tenant_id + assert tenant.display_name == 'Test Tenant' + assert tenant.allow_password_sign_up is True + assert tenant.enable_email_link_sign_in is True diff --git a/tests/test_token_gen.py b/tests/test_token_gen.py new file mode 100644 index 000000000..384bc22c3 --- /dev/null +++ b/tests/test_token_gen.py @@ -0,0 +1,905 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test cases for the firebase_admin._token_gen module.""" + +import base64 +import datetime +import json +import os +import time +import unittest.mock + +from google.auth import crypt +from google.auth import jwt +import google.auth.exceptions +import google.oauth2.id_token +import pytest +from pytest_localserver import plugin + +import firebase_admin +from firebase_admin import auth +from firebase_admin import credentials +from firebase_admin import exceptions +from firebase_admin import _http_client +from firebase_admin import _token_gen +from tests import testutils + + +MOCK_CURRENT_TIME = 1500000000 +MOCK_CURRENT_TIME_UTC = datetime.datetime.fromtimestamp( + MOCK_CURRENT_TIME, tz=datetime.timezone.utc) +MOCK_UID = 'user1' +MOCK_CREDENTIAL = credentials.Certificate( + testutils.resource_filename('service_account.json')) +MOCK_PUBLIC_CERTS = testutils.resource('public_certs.json') +MOCK_PRIVATE_KEY = testutils.resource('private_key.pem') +MOCK_SERVICE_ACCOUNT_EMAIL = MOCK_CREDENTIAL.service_account_email +MOCK_REQUEST = testutils.MockRequest(200, MOCK_PUBLIC_CERTS) + +INVALID_STRINGS = [None, '', 0, 1, True, False, [], tuple(), {}] +INVALID_BOOLS = [None, '', 'foo', 0, 1, [], tuple(), {}] +INVALID_JWT_ARGS = { + 'NoneToken': None, + 'EmptyToken': '', + 'BoolToken': True, + 'IntToken': 1, + 'ListToken': [], + 'EmptyDictToken': {}, + 'NonEmptyDictToken': {'a': 1}, +} + +ID_TOOLKIT_URL = 'https://identitytoolkit.googleapis.com/v1' +EMULATOR_HOST_ENV_VAR = 'FIREBASE_AUTH_EMULATOR_HOST' +AUTH_EMULATOR_HOST = 'localhost:9099' +EMULATED_ID_TOOLKIT_URL = f'http://{AUTH_EMULATOR_HOST}/identitytoolkit.googleapis.com/v1' +TOKEN_MGT_URLS = { + 'ID_TOOLKIT': ID_TOOLKIT_URL, +} + +# Fixture for mocking a HTTP server +httpserver = plugin.httpserver + + +def _merge_jwt_claims(defaults, overrides): + defaults.update(overrides) + for key, value in overrides.items(): + if value is None: + del defaults[key] + return defaults + + +def verify_custom_token(custom_token, expected_claims, tenant_id=None): + assert isinstance(custom_token, bytes) + expected_email = MOCK_SERVICE_ACCOUNT_EMAIL + header = jwt.decode_header(custom_token) + assert header.get('typ') == 'JWT' + if _is_emulated(): + assert header.get('alg') == 'none' + assert custom_token.split(b'.')[2] == b'' + expected_email = _token_gen.AUTH_EMULATOR_EMAIL + token = jwt.decode(custom_token, verify=False) + else: + assert header.get('alg') == 'RS256' + token = google.oauth2.id_token.verify_token( + custom_token, + testutils.MockRequest(200, MOCK_PUBLIC_CERTS), + _token_gen.FIREBASE_AUDIENCE) + + assert token['uid'] == MOCK_UID + assert token['iss'] == expected_email + assert token['sub'] == expected_email + if tenant_id is None: + assert 'tenant_id' not in token + else: + assert token['tenant_id'] == tenant_id + + if expected_claims: + for key, value in expected_claims.items(): + assert value == token['claims'][key] + +def _get_id_token(payload_overrides=None, header_overrides=None, current_time=MOCK_CURRENT_TIME): + signer = crypt.RSASigner.from_string(MOCK_PRIVATE_KEY) + headers = { + 'kid': 'mock-key-id-1' + } + now = int(current_time if current_time is not None else time.time()) + payload = { + 'aud': MOCK_CREDENTIAL.project_id, + 'iss': 'https://securetoken.google.com/' + MOCK_CREDENTIAL.project_id, + 'iat': now - 100, + 'exp': now + 3600, + 'sub': '1234567890', + 'admin': True, + 'firebase': { + 'sign_in_provider': 'provider', + }, + } + if header_overrides: + headers = _merge_jwt_claims(headers, header_overrides) + if payload_overrides: + payload = _merge_jwt_claims(payload, payload_overrides) + return jwt.encode(signer, payload, header=headers) + +def _get_session_cookie( + payload_overrides=None, header_overrides=None, current_time=MOCK_CURRENT_TIME): + payload_overrides = payload_overrides or {} + if 'iss' not in payload_overrides: + payload_overrides['iss'] = ( + f'https://session.firebase.google.com/{MOCK_CREDENTIAL.project_id}' + ) + return _get_id_token(payload_overrides, header_overrides, current_time=current_time) + +def _instrument_user_manager(app, status, payload): + client = auth._get_client(app) + user_manager = client._user_manager + recorder = [] + user_manager.http_client.session.mount( + TOKEN_MGT_URLS['ID_TOOLKIT'], + testutils.MockAdapter(payload, status, recorder)) + return user_manager, recorder + +def _overwrite_cert_request(app, request): + client = auth._get_client(app) + client._token_verifier.request = request + +def _overwrite_iam_request(app, request): + client = auth._get_client(app) + client._token_generator.request = request + + +def _is_emulated(): + emulator_host = os.getenv(EMULATOR_HOST_ENV_VAR, '') + return emulator_host and '//' not in emulator_host + + +# These fixtures are set to the default function scope as the emulator environment variable bleeds +# over when in module scope. +@pytest.fixture(params=[{'emulated': False}, {'emulated': True}]) +def auth_app(request): + """Returns an App initialized with a mock service account credential. + + This can be used in any scenario where the private key is required. Use user_mgt_app + for everything else. + """ + monkeypatch = testutils.new_monkeypatch() + if request.param['emulated']: + monkeypatch.setenv(EMULATOR_HOST_ENV_VAR, AUTH_EMULATOR_HOST) + monkeypatch.setitem(TOKEN_MGT_URLS, 'ID_TOOLKIT', EMULATED_ID_TOOLKIT_URL) + app = firebase_admin.initialize_app(MOCK_CREDENTIAL, name='tokenGen') + yield app + firebase_admin.delete_app(app) + monkeypatch.undo() + +@pytest.fixture(params=[{'emulated': False}, {'emulated': True}]) +def user_mgt_app(request): + monkeypatch = testutils.new_monkeypatch() + if request.param['emulated']: + monkeypatch.setenv(EMULATOR_HOST_ENV_VAR, AUTH_EMULATOR_HOST) + monkeypatch.setitem(TOKEN_MGT_URLS, 'ID_TOOLKIT', EMULATED_ID_TOOLKIT_URL) + app = firebase_admin.initialize_app(testutils.MockCredential(), name='userMgt', + options={'projectId': 'mock-project-id'}) + yield app + firebase_admin.delete_app(app) + monkeypatch.undo() + +@pytest.fixture +def env_var_app(request): + """Returns an App instance initialized with the given set of environment variables. + + The lines of code following the yield statement are guaranteed to run after each test case + that depends on this fixture. This ensures that the environment is left intact after the + tests. + """ + environ = os.environ + os.environ = request.param + app = firebase_admin.initialize_app(testutils.MockCredential(), name='env-var-app') + yield app + os.environ = environ + firebase_admin.delete_app(app) + +@pytest.fixture(scope='module') +def revoked_tokens(): + mock_user = json.loads(testutils.resource('get_user.json')) + mock_user['users'][0]['validSince'] = str(MOCK_CURRENT_TIME + 100) + return json.dumps(mock_user) + +@pytest.fixture(scope='module') +def user_disabled(): + mock_user = json.loads(testutils.resource('get_user.json')) + mock_user['users'][0]['disabled'] = True + return json.dumps(mock_user) + +@pytest.fixture(scope='module') +def user_disabled_and_revoked(): + mock_user = json.loads(testutils.resource('get_user.json')) + mock_user['users'][0]['disabled'] = True + mock_user['users'][0]['validSince'] = str(MOCK_CURRENT_TIME + 100) + return json.dumps(mock_user) + + +class TestCreateCustomToken: + + valid_args = { + 'Basic': (MOCK_UID, {'one': 2, 'three': 'four'}), + 'NoDevClaims': (MOCK_UID, None), + 'EmptyDevClaims': (MOCK_UID, {}), + } + + invalid_args = { + 'NoUid': (None, None, ValueError), + 'EmptyUid': ('', None, ValueError), + 'LongUid': ('x'*129, None, ValueError), + 'BoolUid': (True, None, ValueError), + 'IntUid': (1, None, ValueError), + 'ListUid': ([], None, ValueError), + 'EmptyDictUid': ({}, None, ValueError), + 'NonEmptyDictUid': ({'a':1}, None, ValueError), + 'BoolClaims': (MOCK_UID, True, ValueError), + 'IntClaims': (MOCK_UID, 1, ValueError), + 'StrClaims': (MOCK_UID, 'foo', ValueError), + 'ListClaims': (MOCK_UID, [], ValueError), + 'TupleClaims': (MOCK_UID, (1, 2), ValueError), + 'SingleReservedClaim': (MOCK_UID, {'sub':'1234'}, ValueError), + 'MultipleReservedClaims': (MOCK_UID, {'sub':'1234', 'aud':'foo'}, ValueError), + } + + @pytest.mark.parametrize('values', valid_args.values(), ids=list(valid_args)) + def test_valid_params(self, auth_app, values): + user, claims = values + custom_token = auth.create_custom_token(user, claims, app=auth_app) + verify_custom_token(custom_token, claims) + + @pytest.mark.parametrize('values', invalid_args.values(), ids=list(invalid_args)) + def test_invalid_params(self, auth_app, values): + user, claims, error = values + with pytest.raises(error): + auth.create_custom_token(user, claims, app=auth_app) + + def test_noncert_credential(self, user_mgt_app): + if _is_emulated(): + # Should work fine with the emulator, so do a condensed version of + # test_sign_with_iam below. + custom_token = auth.create_custom_token(MOCK_UID, app=user_mgt_app).decode() + self._verify_signer(custom_token, _token_gen.AUTH_EMULATOR_EMAIL) + return + with pytest.raises(ValueError): + auth.create_custom_token(MOCK_UID, app=user_mgt_app) + + def test_sign_with_iam(self): + options = {'serviceAccountId': 'test-service-account', 'projectId': 'mock-project-id'} + app = firebase_admin.initialize_app( + testutils.MockCredential(), name='iam-signer-app', options=options) + try: + signature = base64.b64encode(b'test').decode() + iam_resp = json.dumps({'signedBlob': signature}) + _overwrite_iam_request(app, testutils.MockRequest(200, iam_resp)) + custom_token = auth.create_custom_token(MOCK_UID, app=app).decode() + assert custom_token.endswith('.' + signature.rstrip('=')) + self._verify_signer(custom_token, 'test-service-account') + finally: + firebase_admin.delete_app(app) + + def test_sign_with_iam_error(self): + options = {'serviceAccountId': 'test-service-account', 'projectId': 'mock-project-id'} + app = firebase_admin.initialize_app( + testutils.MockCredential(), name='iam-signer-app', options=options) + try: + iam_resp = '{"error": {"code": 403, "message": "test error"}}' + _overwrite_iam_request(app, testutils.MockRequest(403, iam_resp)) + with pytest.raises(auth.TokenSignError) as excinfo: + auth.create_custom_token(MOCK_UID, app=app) + error = excinfo.value + assert error.code == exceptions.UNKNOWN + assert iam_resp in str(error) + assert isinstance(error.cause, google.auth.exceptions.TransportError) + finally: + firebase_admin.delete_app(app) + + def test_sign_with_discovered_service_account(self): + request = testutils.MockRequest(200, 'discovered-service-account') + options = {'projectId': 'mock-project-id'} + app = firebase_admin.initialize_app(testutils.MockCredential(), name='iam-signer-app', + options=options) + try: + _overwrite_iam_request(app, request) + # Force initialization of the signing provider. This will invoke the Metadata service. + client = auth._get_client(app) + assert client._token_generator.signing_provider is not None + + # Now invoke the IAM signer. + signature = base64.b64encode(b'test').decode() + request.response = testutils.MockResponse(200, json.dumps({'signedBlob': signature})) + custom_token = auth.create_custom_token(MOCK_UID, app=app).decode() + assert custom_token.endswith('.' + signature.rstrip('=')) + self._verify_signer(custom_token, 'discovered-service-account') + assert len(request.log) == 2 + assert request.log[0][1]['headers'] == {'Metadata-Flavor': 'Google'} + finally: + firebase_admin.delete_app(app) + + def test_sign_with_discovery_failure(self): + request = testutils.MockFailedRequest(Exception('test error')) + options = {'projectId': 'mock-project-id'} + app = firebase_admin.initialize_app(testutils.MockCredential(), name='iam-signer-app', + options=options) + try: + _overwrite_iam_request(app, request) + with pytest.raises(ValueError) as excinfo: + auth.create_custom_token(MOCK_UID, app=app) + assert str(excinfo.value).startswith('Failed to determine service account: test error') + assert len(request.log) == 1 + assert request.log[0][1]['headers'] == {'Metadata-Flavor': 'Google'} + finally: + firebase_admin.delete_app(app) + + def _verify_signer(self, token, signer): + segments = token.split('.') + assert len(segments) == 3 + body = jwt.decode(token, verify=False) + assert body['iss'] == signer + assert body['sub'] == signer + + +class TestCreateSessionCookie: + + @pytest.mark.parametrize('id_token', [None, '', 0, 1, True, False, [], {}, tuple()]) + def test_invalid_id_token(self, user_mgt_app, id_token): + with pytest.raises(ValueError): + auth.create_session_cookie(id_token, expires_in=3600, app=user_mgt_app) + + @pytest.mark.parametrize('expires_in', [ + None, '', True, False, [], {}, tuple(), + _token_gen.MIN_SESSION_COOKIE_DURATION_SECONDS - 1, + _token_gen.MAX_SESSION_COOKIE_DURATION_SECONDS + 1, + ]) + def test_invalid_expires_in(self, user_mgt_app, expires_in): + with pytest.raises(ValueError): + auth.create_session_cookie('id_token', expires_in=expires_in, app=user_mgt_app) + + @pytest.mark.parametrize('expires_in', [ + 3600, datetime.timedelta(hours=1), datetime.timedelta(milliseconds=3600500) + ]) + def test_valid_args(self, user_mgt_app, expires_in): + _, recorder = _instrument_user_manager(user_mgt_app, 200, '{"sessionCookie": "cookie"}') + cookie = auth.create_session_cookie('id_token', expires_in=expires_in, app=user_mgt_app) + assert cookie == 'cookie' + request = json.loads(recorder[0].body.decode()) + assert request == {'idToken' : 'id_token', 'validDuration': 3600} + + def test_error(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 500, '{"error":{"message": "INVALID_ID_TOKEN"}}') + with pytest.raises(auth.InvalidIdTokenError) as excinfo: + auth.create_session_cookie('id_token', expires_in=3600, app=user_mgt_app) + assert excinfo.value.code == exceptions.INVALID_ARGUMENT + assert str(excinfo.value) == 'The provided ID token is invalid (INVALID_ID_TOKEN).' + + def test_error_with_details(self, user_mgt_app): + _instrument_user_manager( + user_mgt_app, 500, '{"error":{"message": "INVALID_ID_TOKEN: More details."}}') + with pytest.raises(auth.InvalidIdTokenError) as excinfo: + auth.create_session_cookie('id_token', expires_in=3600, app=user_mgt_app) + assert excinfo.value.code == exceptions.INVALID_ARGUMENT + expected = 'The provided ID token is invalid (INVALID_ID_TOKEN). More details.' + assert str(excinfo.value) == expected + + def test_unexpected_error_code(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 500, '{"error":{"message": "SOMETHING_UNUSUAL"}}') + with pytest.raises(exceptions.InternalError) as excinfo: + auth.create_session_cookie('id_token', expires_in=3600, app=user_mgt_app) + assert str(excinfo.value) == 'Error while calling Auth service (SOMETHING_UNUSUAL).' + + def test_unexpected_error_response(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 500, '{}') + with pytest.raises(exceptions.InternalError) as excinfo: + auth.create_session_cookie('id_token', expires_in=3600, app=user_mgt_app) + assert str(excinfo.value) == 'Unexpected error response: {}' + + def test_unexpected_response(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 200, '{}') + with pytest.raises(auth.UnexpectedResponseError) as excinfo: + auth.create_session_cookie('id_token', expires_in=3600, app=user_mgt_app) + assert excinfo.value.code == exceptions.UNKNOWN + assert 'Failed to create session cookie' in str(excinfo.value) + + +MOCK_GET_USER_RESPONSE = testutils.resource('get_user.json') +TEST_ID_TOKEN = _get_id_token() +TEST_ID_TOKEN_WITH_TENANT = _get_id_token({ + 'firebase': { + 'tenant': 'test-tenant', + } +}) +TEST_SESSION_COOKIE = _get_session_cookie() + + +class TestVerifyIdToken: + + def setup_method(self): + self.time_patch = unittest.mock.patch('time.time', return_value=MOCK_CURRENT_TIME) + self.time_patch.start() + self.utcnow_patch = unittest.mock.patch( + 'google.auth.jwt._helpers.utcnow', return_value=MOCK_CURRENT_TIME_UTC) + self.utcnow_patch.start() + + def teardown_method(self): + self.time_patch.stop() + self.utcnow_patch.stop() + + valid_tokens = { + 'BinaryToken': TEST_ID_TOKEN, + 'TextToken': TEST_ID_TOKEN.decode('utf-8'), + } + + invalid_tokens = { + 'NoKid': _get_id_token(header_overrides={'kid': None}), + 'WrongKid': _get_id_token(header_overrides={'kid': 'foo'}), + 'BadAudience': _get_id_token({'aud': 'bad-audience'}), + 'BadIssuer': _get_id_token({ + 'iss': 'https://securetoken.google.com/wrong-issuer' + }), + 'EmptySubject': _get_id_token({'sub': ''}), + 'IntSubject': _get_id_token({'sub': 10}), + 'LongStrSubject': _get_id_token({'sub': 'a' * 129}), + 'FutureToken': _get_id_token({'iat': MOCK_CURRENT_TIME + 1000}), + 'ExpiredToken': _get_id_token({ + 'iat': MOCK_CURRENT_TIME - 10000, + 'exp': MOCK_CURRENT_TIME - 3600 + }), + 'ExpiredTokenShort': _get_id_token({ + 'iat': MOCK_CURRENT_TIME - 10000, + 'exp': MOCK_CURRENT_TIME - 30 + }), + 'BadFormatToken': 'foobar' + } + + tokens_accepted_in_emulator = [ + 'NoKid', + 'WrongKid', + 'FutureToken', + 'ExpiredToken', + 'ExpiredTokenShort', + ] + + def _assert_valid_token(self, id_token, app): + claims = auth.verify_id_token(id_token, app=app) + assert claims['admin'] is True + assert claims['uid'] == claims['sub'] + assert claims['firebase']['sign_in_provider'] == 'provider' + + @pytest.mark.parametrize('id_token', valid_tokens.values(), ids=list(valid_tokens)) + def test_valid_token(self, user_mgt_app, id_token): + _overwrite_cert_request(user_mgt_app, MOCK_REQUEST) + self._assert_valid_token(id_token, app=user_mgt_app) + + def test_valid_token_with_tenant(self, user_mgt_app): + _overwrite_cert_request(user_mgt_app, MOCK_REQUEST) + claims = auth.verify_id_token(TEST_ID_TOKEN_WITH_TENANT, app=user_mgt_app) + assert claims['admin'] is True + assert claims['uid'] == claims['sub'] + assert claims['firebase']['tenant'] == 'test-tenant' + + @pytest.mark.parametrize('id_token', valid_tokens.values(), ids=list(valid_tokens)) + def test_valid_token_check_revoked(self, user_mgt_app, id_token): + _overwrite_cert_request(user_mgt_app, MOCK_REQUEST) + _instrument_user_manager(user_mgt_app, 200, MOCK_GET_USER_RESPONSE) + claims = auth.verify_id_token(id_token, app=user_mgt_app, check_revoked=True) + assert claims['admin'] is True + assert claims['uid'] == claims['sub'] + + @pytest.mark.parametrize('id_token', valid_tokens.values(), ids=list(valid_tokens)) + def test_revoked_token_check_revoked(self, user_mgt_app, revoked_tokens, id_token): + _overwrite_cert_request(user_mgt_app, MOCK_REQUEST) + _instrument_user_manager(user_mgt_app, 200, revoked_tokens) + with pytest.raises(auth.RevokedIdTokenError) as excinfo: + auth.verify_id_token(id_token, app=user_mgt_app, check_revoked=True) + assert str(excinfo.value) == 'The Firebase ID token has been revoked.' + + @pytest.mark.parametrize('id_token', valid_tokens.values(), ids=list(valid_tokens)) + def test_disabled_user_check_revoked(self, user_mgt_app, user_disabled, id_token): + _overwrite_cert_request(user_mgt_app, MOCK_REQUEST) + _instrument_user_manager(user_mgt_app, 200, user_disabled) + with pytest.raises(auth.UserDisabledError) as excinfo: + auth.verify_id_token(id_token, app=user_mgt_app, check_revoked=True) + assert str(excinfo.value) == 'The user record is disabled.' + + @pytest.mark.parametrize('id_token', valid_tokens.values(), ids=list(valid_tokens)) + def test_check_disabled_before_revoked( + self, user_mgt_app, user_disabled_and_revoked, id_token): + _overwrite_cert_request(user_mgt_app, MOCK_REQUEST) + _instrument_user_manager(user_mgt_app, 200, user_disabled_and_revoked) + with pytest.raises(auth.UserDisabledError) as excinfo: + auth.verify_id_token(id_token, app=user_mgt_app, check_revoked=True) + assert str(excinfo.value) == 'The user record is disabled.' + + @pytest.mark.parametrize('arg', INVALID_BOOLS) + def test_invalid_check_revoked(self, user_mgt_app, arg): + _overwrite_cert_request(user_mgt_app, MOCK_REQUEST) + with pytest.raises(ValueError): + auth.verify_id_token('id_token', check_revoked=arg, app=user_mgt_app) + + @pytest.mark.parametrize('id_token', valid_tokens.values(), ids=list(valid_tokens)) + def test_revoked_token_do_not_check_revoked(self, user_mgt_app, revoked_tokens, id_token): + _overwrite_cert_request(user_mgt_app, MOCK_REQUEST) + _instrument_user_manager(user_mgt_app, 200, revoked_tokens) + claims = auth.verify_id_token(id_token, app=user_mgt_app, check_revoked=False) + assert claims['admin'] is True + assert claims['uid'] == claims['sub'] + + @pytest.mark.parametrize('id_token', valid_tokens.values(), ids=list(valid_tokens)) + def test_disabled_user_do_not_check_revoked(self, user_mgt_app, user_disabled, id_token): + _overwrite_cert_request(user_mgt_app, MOCK_REQUEST) + _instrument_user_manager(user_mgt_app, 200, user_disabled) + claims = auth.verify_id_token(id_token, app=user_mgt_app, check_revoked=False) + assert claims['admin'] is True + assert claims['uid'] == claims['sub'] + + @pytest.mark.parametrize('id_token', INVALID_JWT_ARGS.values(), ids=list(INVALID_JWT_ARGS)) + def test_invalid_arg(self, user_mgt_app, id_token): + _overwrite_cert_request(user_mgt_app, MOCK_REQUEST) + with pytest.raises(ValueError) as excinfo: + auth.verify_id_token(id_token, app=user_mgt_app) + assert 'Illegal ID token provided' in str(excinfo.value) + + @pytest.mark.parametrize('id_token_key', list(invalid_tokens)) + def test_invalid_token(self, user_mgt_app, id_token_key): + id_token = self.invalid_tokens[id_token_key] + if _is_emulated() and id_token_key in self.tokens_accepted_in_emulator: + self._assert_valid_token(id_token, user_mgt_app) + return + _overwrite_cert_request(user_mgt_app, MOCK_REQUEST) + with pytest.raises(auth.InvalidIdTokenError) as excinfo: + auth.verify_id_token(id_token, app=user_mgt_app) + assert isinstance(excinfo.value, exceptions.InvalidArgumentError) + assert excinfo.value.http_response is None + + def test_expired_token(self, user_mgt_app): + _overwrite_cert_request(user_mgt_app, MOCK_REQUEST) + id_token = self.invalid_tokens['ExpiredToken'] + if _is_emulated(): + self._assert_valid_token(id_token, user_mgt_app) + return + with pytest.raises(auth.ExpiredIdTokenError) as excinfo: + auth.verify_id_token(id_token, app=user_mgt_app) + assert isinstance(excinfo.value, auth.InvalidIdTokenError) + assert 'Token expired' in str(excinfo.value) + assert excinfo.value.cause is not None + assert excinfo.value.http_response is None + + def test_expired_token_with_tolerance(self, user_mgt_app): + _overwrite_cert_request(user_mgt_app, MOCK_REQUEST) + id_token = self.invalid_tokens['ExpiredTokenShort'] + if _is_emulated(): + self._assert_valid_token(id_token, user_mgt_app) + return + claims = auth.verify_id_token(id_token, app=user_mgt_app, + clock_skew_seconds=60) + assert claims['admin'] is True + assert claims['uid'] == claims['sub'] + with pytest.raises(auth.ExpiredIdTokenError): + auth.verify_id_token(id_token, app=user_mgt_app, + clock_skew_seconds=20) + + def test_project_id_option(self): + app = firebase_admin.initialize_app( + testutils.MockCredential(), options={'projectId': 'mock-project-id'}, name='myApp') + _overwrite_cert_request(app, MOCK_REQUEST) + try: + claims = auth.verify_id_token(TEST_ID_TOKEN, app) + assert claims['admin'] is True + assert claims['uid'] == claims['sub'] + finally: + firebase_admin.delete_app(app) + + @pytest.mark.parametrize('env_var_app', [ + {'GCLOUD_PROJECT': 'mock-project-id'}, + {'GOOGLE_CLOUD_PROJECT': 'mock-project-id'} + ], indirect=True) + def test_project_id_env_var(self, env_var_app): + _overwrite_cert_request(env_var_app, MOCK_REQUEST) + claims = auth.verify_id_token(TEST_ID_TOKEN, env_var_app) + assert claims['admin'] is True + + def test_custom_token(self, auth_app): + id_token = auth.create_custom_token(MOCK_UID, app=auth_app) + _overwrite_cert_request(auth_app, MOCK_REQUEST) + with pytest.raises(auth.InvalidIdTokenError) as excinfo: + auth.verify_id_token(id_token, app=auth_app) + message = 'verify_id_token() expects an ID token, but was given a custom token.' + assert str(excinfo.value) == message + + def test_certificate_request_failure(self, user_mgt_app): + _overwrite_cert_request(user_mgt_app, testutils.MockRequest(404, 'not found')) + if _is_emulated(): + # Shouldn't fetch certificates in emulator mode. + self._assert_valid_token(TEST_ID_TOKEN, app=user_mgt_app) + return + with pytest.raises(auth.CertificateFetchError) as excinfo: + auth.verify_id_token(TEST_ID_TOKEN, app=user_mgt_app) + assert 'Could not fetch certificates' in str(excinfo.value) + assert isinstance(excinfo.value, exceptions.UnknownError) + assert excinfo.value.cause is not None + assert excinfo.value.http_response is None + + +class TestVerifySessionCookie: + + def setup_method(self): + self.time_patch = unittest.mock.patch('time.time', return_value=MOCK_CURRENT_TIME) + self.time_patch.start() + self.utcnow_patch = unittest.mock.patch( + 'google.auth.jwt._helpers.utcnow', return_value=MOCK_CURRENT_TIME_UTC) + self.utcnow_patch.start() + + def teardown_method(self): + self.time_patch.stop() + self.utcnow_patch.stop() + + valid_cookies = { + 'BinaryCookie': TEST_SESSION_COOKIE, + 'TextCookie': TEST_SESSION_COOKIE.decode('utf-8'), + } + + invalid_cookies = { + 'NoKid': _get_session_cookie(header_overrides={'kid': None}), + 'WrongKid': _get_session_cookie(header_overrides={'kid': 'foo'}), + 'BadAudience': _get_session_cookie({'aud': 'bad-audience'}), + 'BadIssuer': _get_session_cookie({ + 'iss': 'https://session.firebase.google.com/wrong-issuer' + }), + 'EmptySubject': _get_session_cookie({'sub': ''}), + 'IntSubject': _get_session_cookie({'sub': 10}), + 'LongStrSubject': _get_session_cookie({'sub': 'a' * 129}), + 'FutureCookie': _get_session_cookie({'iat': MOCK_CURRENT_TIME + 1000}), + 'ExpiredCookie': _get_session_cookie({ + 'iat': MOCK_CURRENT_TIME - 10000, + 'exp': MOCK_CURRENT_TIME - 3600 + }), + 'ExpiredCookieShort': _get_session_cookie({ + 'iat': MOCK_CURRENT_TIME - 10000, + 'exp': MOCK_CURRENT_TIME - 30 + }), + 'BadFormatCookie': 'foobar', + 'IDToken': TEST_ID_TOKEN, + } + + cookies_accepted_in_emulator = [ + 'NoKid', + 'WrongKid', + 'FutureCookie', + 'ExpiredCookie', + 'ExpiredCookieShort', + ] + + def _assert_valid_cookie(self, cookie, app, check_revoked=False): + claims = auth.verify_session_cookie(cookie, app=app, check_revoked=check_revoked) + assert claims['admin'] is True + assert claims['uid'] == claims['sub'] + + @pytest.mark.parametrize('cookie', valid_cookies.values(), ids=list(valid_cookies)) + def test_valid_cookie(self, user_mgt_app, cookie): + _overwrite_cert_request(user_mgt_app, MOCK_REQUEST) + self._assert_valid_cookie(cookie, user_mgt_app) + + @pytest.mark.parametrize('cookie', valid_cookies.values(), ids=list(valid_cookies)) + def test_valid_cookie_check_revoked(self, user_mgt_app, cookie): + _overwrite_cert_request(user_mgt_app, MOCK_REQUEST) + _instrument_user_manager(user_mgt_app, 200, MOCK_GET_USER_RESPONSE) + self._assert_valid_cookie(cookie, app=user_mgt_app, check_revoked=True) + + @pytest.mark.parametrize('cookie', valid_cookies.values(), ids=list(valid_cookies)) + def test_revoked_cookie_check_revoked(self, user_mgt_app, revoked_tokens, cookie): + _overwrite_cert_request(user_mgt_app, MOCK_REQUEST) + _instrument_user_manager(user_mgt_app, 200, revoked_tokens) + with pytest.raises(auth.RevokedSessionCookieError) as excinfo: + auth.verify_session_cookie(cookie, app=user_mgt_app, check_revoked=True) + assert str(excinfo.value) == 'The Firebase session cookie has been revoked.' + + @pytest.mark.parametrize('cookie', valid_cookies.values(), ids=list(valid_cookies)) + def test_revoked_cookie_does_not_check_revoked(self, user_mgt_app, revoked_tokens, cookie): + _overwrite_cert_request(user_mgt_app, MOCK_REQUEST) + _instrument_user_manager(user_mgt_app, 200, revoked_tokens) + self._assert_valid_cookie(cookie, app=user_mgt_app, check_revoked=False) + + @pytest.mark.parametrize('cookie', valid_cookies.values(), ids=list(valid_cookies)) + def test_disabled_user_check_revoked(self, user_mgt_app, user_disabled, cookie): + _overwrite_cert_request(user_mgt_app, MOCK_REQUEST) + _instrument_user_manager(user_mgt_app, 200, user_disabled) + with pytest.raises(auth.UserDisabledError) as excinfo: + auth.verify_session_cookie(cookie, app=user_mgt_app, check_revoked=True) + assert str(excinfo.value) == 'The user record is disabled.' + + @pytest.mark.parametrize('cookie', valid_cookies.values(), ids=list(valid_cookies)) + def test_check_disabled_before_revoked( + self, user_mgt_app, user_disabled_and_revoked, cookie): + _overwrite_cert_request(user_mgt_app, MOCK_REQUEST) + _instrument_user_manager(user_mgt_app, 200, user_disabled_and_revoked) + with pytest.raises(auth.UserDisabledError) as excinfo: + auth.verify_session_cookie(cookie, app=user_mgt_app, check_revoked=True) + assert str(excinfo.value) == 'The user record is disabled.' + + @pytest.mark.parametrize('cookie', valid_cookies.values(), ids=list(valid_cookies)) + def test_disabled_user_does_not_check_revoked(self, user_mgt_app, user_disabled, cookie): + _overwrite_cert_request(user_mgt_app, MOCK_REQUEST) + _instrument_user_manager(user_mgt_app, 200, user_disabled) + self._assert_valid_cookie(cookie, app=user_mgt_app, check_revoked=False) + + @pytest.mark.parametrize('cookie', INVALID_JWT_ARGS.values(), ids=list(INVALID_JWT_ARGS)) + def test_invalid_args(self, user_mgt_app, cookie): + _overwrite_cert_request(user_mgt_app, MOCK_REQUEST) + with pytest.raises(ValueError) as excinfo: + auth.verify_session_cookie(cookie, app=user_mgt_app) + assert 'Illegal session cookie provided' in str(excinfo.value) + + @pytest.mark.parametrize('cookie_key', list(invalid_cookies)) + def test_invalid_cookie(self, user_mgt_app, cookie_key): + cookie = self.invalid_cookies[cookie_key] + if _is_emulated() and cookie_key in self.cookies_accepted_in_emulator: + self._assert_valid_cookie(cookie, user_mgt_app) + return + _overwrite_cert_request(user_mgt_app, MOCK_REQUEST) + with pytest.raises(auth.InvalidSessionCookieError) as excinfo: + auth.verify_session_cookie(cookie, app=user_mgt_app) + assert isinstance(excinfo.value, exceptions.InvalidArgumentError) + assert excinfo.value.http_response is None + + def test_expired_cookie(self, user_mgt_app): + _overwrite_cert_request(user_mgt_app, MOCK_REQUEST) + cookie = self.invalid_cookies['ExpiredCookie'] + if _is_emulated(): + self._assert_valid_cookie(cookie, user_mgt_app) + return + with pytest.raises(auth.ExpiredSessionCookieError) as excinfo: + auth.verify_session_cookie(cookie, app=user_mgt_app) + assert isinstance(excinfo.value, auth.InvalidSessionCookieError) + assert 'Token expired' in str(excinfo.value) + assert excinfo.value.cause is not None + assert excinfo.value.http_response is None + + def test_expired_cookie_with_tolerance(self, user_mgt_app): + _overwrite_cert_request(user_mgt_app, MOCK_REQUEST) + cookie = self.invalid_cookies['ExpiredCookieShort'] + if _is_emulated(): + self._assert_valid_cookie(cookie, user_mgt_app) + return + claims = auth.verify_session_cookie(cookie, app=user_mgt_app, check_revoked=False, + clock_skew_seconds=59) + assert claims['admin'] is True + assert claims['uid'] == claims['sub'] + with pytest.raises(auth.ExpiredSessionCookieError): + auth.verify_session_cookie(cookie, app=user_mgt_app, check_revoked=False, + clock_skew_seconds=29) + + def test_project_id_option(self): + app = firebase_admin.initialize_app( + testutils.MockCredential(), options={'projectId': 'mock-project-id'}, name='myApp') + _overwrite_cert_request(app, MOCK_REQUEST) + try: + claims = auth.verify_session_cookie(TEST_SESSION_COOKIE, app=app) + assert claims['admin'] is True + assert claims['uid'] == claims['sub'] + finally: + firebase_admin.delete_app(app) + + @pytest.mark.parametrize('env_var_app', [{'GCLOUD_PROJECT': 'mock-project-id'}], indirect=True) + def test_project_id_env_var(self, env_var_app): + _overwrite_cert_request(env_var_app, MOCK_REQUEST) + claims = auth.verify_session_cookie(TEST_SESSION_COOKIE, app=env_var_app) + assert claims['admin'] is True + + def test_custom_token(self, auth_app): + custom_token = auth.create_custom_token(MOCK_UID, app=auth_app) + _overwrite_cert_request(auth_app, MOCK_REQUEST) + with pytest.raises(auth.InvalidSessionCookieError): + auth.verify_session_cookie(custom_token, app=auth_app) + + def test_certificate_request_failure(self, user_mgt_app): + _overwrite_cert_request(user_mgt_app, testutils.MockRequest(404, 'not found')) + if _is_emulated(): + # Shouldn't fetch certificates in emulator mode. + auth.verify_session_cookie(TEST_SESSION_COOKIE, app=user_mgt_app) + return + with pytest.raises(auth.CertificateFetchError) as excinfo: + auth.verify_session_cookie(TEST_SESSION_COOKIE, app=user_mgt_app) + assert 'Could not fetch certificates' in str(excinfo.value) + assert isinstance(excinfo.value, exceptions.UnknownError) + assert excinfo.value.cause is not None + assert excinfo.value.http_response is None + + +class TestCertificateCaching: + + def setup_method(self): + self.time_patch = unittest.mock.patch('time.time', return_value=MOCK_CURRENT_TIME) + self.time_patch.start() + self.utcnow_patch = unittest.mock.patch( + 'google.auth.jwt._helpers.utcnow', return_value=MOCK_CURRENT_TIME_UTC) + self.utcnow_patch.start() + + def teardown_method(self): + self.time_patch.stop() + self.utcnow_patch.stop() + + def test_certificate_caching(self, user_mgt_app, httpserver): + httpserver.serve_content(MOCK_PUBLIC_CERTS, 200, headers={'Cache-Control': 'max-age=3600'}) + verifier = _token_gen.TokenVerifier(user_mgt_app) + verifier.cookie_verifier.cert_url = httpserver.url + verifier.id_token_verifier.cert_url = httpserver.url + verifier.verify_session_cookie(TEST_SESSION_COOKIE) + # No requests should be made in emulated mode + request_count = 0 if _is_emulated() else 1 + assert len(httpserver.requests) == request_count + # Subsequent requests should not fetch certs from the server + verifier.verify_session_cookie(TEST_SESSION_COOKIE) + assert len(httpserver.requests) == request_count + verifier.verify_id_token(TEST_ID_TOKEN) + assert len(httpserver.requests) == request_count + + +class TestCertificateFetchTimeout: + + def setup_method(self): + self.time_patch = unittest.mock.patch('time.time', return_value=MOCK_CURRENT_TIME) + self.time_patch.start() + self.utcnow_patch = unittest.mock.patch( + 'google.auth.jwt._helpers.utcnow', return_value=MOCK_CURRENT_TIME_UTC) + self.utcnow_patch.start() + + def teardown_method(self): + self.time_patch.stop() + self.utcnow_patch.stop() + testutils.cleanup_apps() + + timeout_configs = [ + ({'httpTimeout': 4}, 4), + ({'httpTimeout': None}, None), + ({}, _http_client.DEFAULT_TIMEOUT_SECONDS), + ] + + @pytest.mark.parametrize('options, timeout', timeout_configs) + def test_init_request(self, options, timeout): + app = firebase_admin.initialize_app(MOCK_CREDENTIAL, options=options) + + client = auth._get_client(app) + request = client._token_verifier.request + + assert isinstance(request, _token_gen.CertificateFetchRequest) + assert request.timeout_seconds == timeout + + @pytest.mark.parametrize('options, timeout', timeout_configs) + def test_verify_id_token_timeout(self, options, timeout): + app = firebase_admin.initialize_app(MOCK_CREDENTIAL, options=options) + recorder = self._instrument_session(app) + + auth.verify_id_token(TEST_ID_TOKEN) + + assert len(recorder) == 1 + assert recorder[0]._extra_kwargs['timeout'] == timeout + + @pytest.mark.parametrize('options, timeout', timeout_configs) + def test_verify_session_cookie_timeout(self, options, timeout): + app = firebase_admin.initialize_app(MOCK_CREDENTIAL, options=options) + recorder = self._instrument_session(app) + + auth.verify_session_cookie(TEST_SESSION_COOKIE) + + assert len(recorder) == 1 + assert recorder[0]._extra_kwargs['timeout'] == timeout + + def _instrument_session(self, app): + client = auth._get_client(app) + request = client._token_verifier.request + recorder = [] + request.session.mount('https://', testutils.MockAdapter(MOCK_PUBLIC_CERTS, 200, recorder)) + return recorder diff --git a/tests/test_user_mgt.py b/tests/test_user_mgt.py new file mode 100644 index 000000000..4623f5e54 --- /dev/null +++ b/tests/test_user_mgt.py @@ -0,0 +1,1561 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test cases for the firebase_admin._user_mgt module.""" + +import base64 +import json +import time +from urllib import parse + +import pytest + +import firebase_admin +from firebase_admin import auth +from firebase_admin import exceptions +from firebase_admin import _auth_utils +from firebase_admin import _http_client +from firebase_admin import _user_import +from firebase_admin import _user_mgt +from firebase_admin import _utils +from tests import testutils + + +INVALID_STRINGS = [None, '', 0, 1, True, False, [], tuple(), {}] +INVALID_DICTS = [None, 'foo', 0, 1, True, False, [], tuple()] +INVALID_INTS = [None, 'foo', '1', -1, 1.1, True, False, [], tuple(), {}] +INVALID_TIMESTAMPS = ['foo', '1', 0, -1, 1.1, True, False, [], tuple(), {}] + +MOCK_GET_USER_RESPONSE = testutils.resource('get_user.json') +MOCK_LIST_USERS_RESPONSE = testutils.resource('list_users.json') + +MOCK_ACTION_CODE_DATA = { + 'url': 'http://localhost', + 'handle_code_in_app': True, + 'dynamic_link_domain': 'http://dynamic-link-domain', + 'link_domain': 'http://link-domain', + 'ios_bundle_id': 'test.bundle', + 'android_package_name': 'test.bundle', + 'android_minimum_version': '7', + 'android_install_app': True, +} +MOCK_ACTION_CODE_SETTINGS = auth.ActionCodeSettings(**MOCK_ACTION_CODE_DATA) + +TEST_TIMEOUT = 42 + +ID_TOOLKIT_URL = 'https://identitytoolkit.googleapis.com/v1' +EMULATOR_HOST_ENV_VAR = 'FIREBASE_AUTH_EMULATOR_HOST' +AUTH_EMULATOR_HOST = 'localhost:9099' +EMULATED_ID_TOOLKIT_URL = f'http://{AUTH_EMULATOR_HOST}/identitytoolkit.googleapis.com/v1' +URL_PROJECT_SUFFIX = '/projects/mock-project-id' +USER_MGT_URLS = { + 'ID_TOOLKIT': ID_TOOLKIT_URL, + 'PREFIX': ID_TOOLKIT_URL + URL_PROJECT_SUFFIX, +} + +@pytest.fixture(params=[{'emulated': False}, {'emulated': True}]) +def user_mgt_app(request): + monkeypatch = testutils.new_monkeypatch() + if request.param['emulated']: + monkeypatch.setenv(EMULATOR_HOST_ENV_VAR, AUTH_EMULATOR_HOST) + monkeypatch.setitem(USER_MGT_URLS, 'ID_TOOLKIT', EMULATED_ID_TOOLKIT_URL) + monkeypatch.setitem(USER_MGT_URLS, 'PREFIX', EMULATED_ID_TOOLKIT_URL + URL_PROJECT_SUFFIX) + app = firebase_admin.initialize_app(testutils.MockCredential(), name='userMgt', + options={'projectId': 'mock-project-id'}) + yield app + firebase_admin.delete_app(app) + monkeypatch.undo() + +@pytest.fixture +def user_mgt_app_with_timeout(): + app = firebase_admin.initialize_app( + testutils.MockCredential(), + name='userMgtTimeout', + options={'projectId': 'mock-project-id', 'httpTimeout': TEST_TIMEOUT} + ) + yield app + firebase_admin.delete_app(app) + +def _instrument_user_manager(app, status, payload): + client = auth._get_client(app) + user_manager = client._user_manager + recorder = [] + user_manager.http_client.session.mount( + USER_MGT_URLS['ID_TOOLKIT'], + testutils.MockAdapter(payload, status, recorder)) + return user_manager, recorder + +def _check_user_record(user, expected_uid='testuser'): + assert isinstance(user, auth.UserRecord) + assert user.uid == expected_uid + assert user.email == 'testuser@example.com' + assert user.phone_number == '+1234567890' + assert user.display_name == 'Test User' + assert user.photo_url == 'http://www.example.com/testuser/photo.png' + assert user.disabled is False + assert user.email_verified is True + assert user.user_metadata.creation_timestamp == 1234567890000 + assert user.user_metadata.last_sign_in_timestamp is None + assert user.provider_id == 'firebase' + assert user.tenant_id is None + + claims = user.custom_claims + assert claims['admin'] is True + assert claims['package'] == 'gold' + + assert len(user.provider_data) == 2 + provider = user.provider_data[0] + assert provider.uid == 'testuser@example.com' + assert provider.email == 'testuser@example.com' + assert provider.phone_number is None + assert provider.display_name == 'Test User' + assert provider.photo_url == 'http://www.example.com/testuser/photo.png' + assert provider.provider_id == 'password' + + provider = user.provider_data[1] + assert provider.uid == '+1234567890' + assert provider.email is None + assert provider.phone_number == '+1234567890' + assert provider.display_name is None + assert provider.photo_url is None + assert provider.provider_id == 'phone' + + +def _check_request(recorder, want_url, want_body=None, want_timeout=None): + assert len(recorder) == 1 + req = recorder[0] + assert req.method == 'POST' + assert req.url == f'{USER_MGT_URLS["PREFIX"]}{want_url}' + expected_metrics_header = [ + _utils.get_metrics_header(), + _utils.get_metrics_header() + ' mock-cred-metric-tag' + ] + assert req.headers['x-goog-api-client'] in expected_metrics_header + if want_body: + body = json.loads(req.body.decode()) + assert body == want_body + if want_timeout: + assert recorder[0]._extra_kwargs['timeout'] == pytest.approx(want_timeout, 0.001) + + +class TestAuthServiceInitialization: + + def test_default_timeout(self, user_mgt_app): + client = auth._get_client(user_mgt_app) + user_manager = client._user_manager + assert user_manager.http_client.timeout == _http_client.DEFAULT_TIMEOUT_SECONDS + + def test_app_options_timeout(self, user_mgt_app_with_timeout): + client = auth._get_client(user_mgt_app_with_timeout) + user_manager = client._user_manager + assert user_manager.http_client.timeout == TEST_TIMEOUT + + def test_fail_on_no_project_id(self): + app = firebase_admin.initialize_app(testutils.MockCredential(), name='userMgt2') + with pytest.raises(ValueError): + auth._get_client(app) + firebase_admin.delete_app(app) + + +class TestUserRecord: + + # Input dict must be non-empty, and must not contain unsupported keys. + @pytest.mark.parametrize('data', INVALID_DICTS + [{}, {'foo':'bar'}]) + def test_invalid_record(self, data): + with pytest.raises(ValueError): + auth.UserRecord(data) + + def test_metadata(self): + metadata = auth.UserMetadata(10, 20) + assert metadata.creation_timestamp == 10 + assert metadata.last_sign_in_timestamp == 20 + metadata = auth.UserMetadata() + assert metadata.creation_timestamp is None + assert metadata.last_sign_in_timestamp is None + + def test_exported_record(self): + user = auth.ExportedUserRecord({ + 'localId' : 'user', + 'passwordHash' : 'passwordHash', + 'salt' : 'passwordSalt', + }) + assert user.uid == 'user' + assert user.password_hash == 'passwordHash' + assert user.password_salt == 'passwordSalt' + + def test_exported_record_no_password(self): + user = auth.ExportedUserRecord({ + 'localId' : 'user', + }) + assert user.uid == 'user' + assert user.password_hash is None + assert user.password_salt is None + + def test_exported_record_empty_password(self): + user = auth.ExportedUserRecord({ + 'localId' : 'user', + 'passwordHash' : '', + 'salt' : '', + }) + assert user.uid == 'user' + assert user.password_hash == '' + assert user.password_salt == '' + + def test_redacted_passwords_cleared(self): + user = auth.ExportedUserRecord({ + 'localId': 'user', + 'passwordHash': base64.b64encode(b'REDACTED'), + }) + assert user.password_hash is None + + def test_custom_claims(self): + user = auth.UserRecord({ + 'localId' : 'user', + 'customAttributes': '{"admin": true, "package": "gold"}' + }) + assert user.custom_claims == {'admin' : True, 'package' : 'gold'} + + def test_no_custom_claims(self): + user = auth.UserRecord({'localId' : 'user'}) + assert user.custom_claims is None + + def test_empty_custom_claims(self): + user = auth.UserRecord({'localId' : 'user', 'customAttributes' : '{}'}) + assert user.custom_claims is None + + @pytest.mark.parametrize('data', INVALID_DICTS + [{}, {'foo':'bar'}]) + def test_invalid_provider(self, data): + with pytest.raises(ValueError): + _user_mgt.ProviderUserInfo(data) + + def test_tokens_valid_after_time(self): + user = auth.UserRecord({'localId' : 'user', 'validSince' : 100}) + assert user.tokens_valid_after_timestamp == 100000 + + def test_no_tokens_valid_after_time(self): + user = auth.UserRecord({'localId' : 'user'}) + assert user.tokens_valid_after_timestamp == 0 + + def test_tenant_id(self): + user = auth.UserRecord({'localId' : 'user', 'tenantId': 'test-tenant'}) + assert user.tenant_id == 'test-tenant' + + +class TestGetUser: + + @pytest.mark.parametrize('arg', INVALID_STRINGS + ['a'*129]) + def test_invalid_get_user(self, arg, user_mgt_app): + with pytest.raises(ValueError): + auth.get_user(arg, app=user_mgt_app) + + def test_get_user(self, user_mgt_app): + _, recorder = _instrument_user_manager(user_mgt_app, 200, MOCK_GET_USER_RESPONSE) + _check_user_record(auth.get_user('testuser', user_mgt_app)) + _check_request(recorder, '/accounts:lookup', {'localId': ['testuser']}) + + def test_get_user_with_timeout(self, user_mgt_app_with_timeout): + _, recorder = _instrument_user_manager( + user_mgt_app_with_timeout, 200, MOCK_GET_USER_RESPONSE) + _check_user_record(auth.get_user('testuser', user_mgt_app_with_timeout)) + _check_request(recorder, '/accounts:lookup', {'localId': ['testuser']}, TEST_TIMEOUT) + + @pytest.mark.parametrize('arg', INVALID_STRINGS + ['not-an-email']) + def test_invalid_get_user_by_email(self, arg, user_mgt_app): + with pytest.raises(ValueError): + auth.get_user_by_email(arg, app=user_mgt_app) + + def test_get_user_by_email(self, user_mgt_app): + _, recorder = _instrument_user_manager(user_mgt_app, 200, MOCK_GET_USER_RESPONSE) + _check_user_record(auth.get_user_by_email('testuser@example.com', user_mgt_app)) + _check_request(recorder, '/accounts:lookup', {'email': ['testuser@example.com']}) + + @pytest.mark.parametrize('arg', INVALID_STRINGS + ['not-a-phone']) + def test_invalid_get_user_by_phone(self, arg, user_mgt_app): + with pytest.raises(ValueError): + auth.get_user_by_phone_number(arg, app=user_mgt_app) + + def test_get_user_by_phone(self, user_mgt_app): + _, recorder = _instrument_user_manager(user_mgt_app, 200, MOCK_GET_USER_RESPONSE) + _check_user_record(auth.get_user_by_phone_number('+1234567890', user_mgt_app)) + _check_request(recorder, '/accounts:lookup', {'phoneNumber': ['+1234567890']}) + + def test_get_user_non_existing(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 200, '{"users":[]}') + with pytest.raises(auth.UserNotFoundError) as excinfo: + auth.get_user('nonexistentuser', user_mgt_app) + error_msg = 'No user record found for the provided user ID: nonexistentuser.' + assert excinfo.value.code == exceptions.NOT_FOUND + assert str(excinfo.value) == error_msg + assert excinfo.value.http_response is not None + assert excinfo.value.cause is None + + def test_get_user_by_email_non_existing(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 200, '{"users":[]}') + with pytest.raises(auth.UserNotFoundError) as excinfo: + auth.get_user_by_email('nonexistent@user', user_mgt_app) + error_msg = 'No user record found for the provided email: nonexistent@user.' + assert excinfo.value.code == exceptions.NOT_FOUND + assert str(excinfo.value) == error_msg + assert excinfo.value.http_response is not None + assert excinfo.value.cause is None + + def test_get_user_by_phone_non_existing(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 200, '{"users":[]}') + with pytest.raises(auth.UserNotFoundError) as excinfo: + auth.get_user_by_phone_number('+1234567890', user_mgt_app) + error_msg = 'No user record found for the provided phone number: +1234567890.' + assert excinfo.value.code == exceptions.NOT_FOUND + assert str(excinfo.value) == error_msg + assert excinfo.value.http_response is not None + assert excinfo.value.cause is None + + def test_get_user_http_error(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 500, '{"error":{"message": "USER_NOT_FOUND"}}') + with pytest.raises(auth.UserNotFoundError) as excinfo: + auth.get_user('testuser', user_mgt_app) + error_msg = 'No user record found for the given identifier (USER_NOT_FOUND).' + assert excinfo.value.code == exceptions.NOT_FOUND + assert str(excinfo.value) == error_msg + assert excinfo.value.http_response is not None + assert excinfo.value.cause is not None + + def test_get_user_http_error_unexpected_code(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 500, '{"error":{"message": "UNEXPECTED_CODE"}}') + with pytest.raises(exceptions.InternalError) as excinfo: + auth.get_user('testuser', user_mgt_app) + assert str(excinfo.value) == 'Error while calling Auth service (UNEXPECTED_CODE).' + assert excinfo.value.http_response is not None + assert excinfo.value.cause is not None + + def test_get_user_http_error_malformed_response(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 500, '{"error": "UNEXPECTED_CODE"}') + with pytest.raises(exceptions.InternalError) as excinfo: + auth.get_user('testuser', user_mgt_app) + assert str(excinfo.value) == 'Unexpected error response: {"error": "UNEXPECTED_CODE"}' + assert excinfo.value.http_response is not None + assert excinfo.value.cause is not None + + def test_get_user_by_email_http_error(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 500, '{"error":{"message": "USER_NOT_FOUND"}}') + with pytest.raises(auth.UserNotFoundError) as excinfo: + auth.get_user_by_email('non.existent.user@example.com', user_mgt_app) + error_msg = 'No user record found for the given identifier (USER_NOT_FOUND).' + assert excinfo.value.code == exceptions.NOT_FOUND + assert str(excinfo.value) == error_msg + assert excinfo.value.http_response is not None + assert excinfo.value.cause is not None + + def test_get_user_by_phone_http_error(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 500, '{"error":{"message": "USER_NOT_FOUND"}}') + with pytest.raises(auth.UserNotFoundError) as excinfo: + auth.get_user_by_phone_number('+1234567890', user_mgt_app) + error_msg = 'No user record found for the given identifier (USER_NOT_FOUND).' + assert excinfo.value.code == exceptions.NOT_FOUND + assert str(excinfo.value) == error_msg + assert excinfo.value.http_response is not None + assert excinfo.value.cause is not None + + +class TestGetUsers: + + @staticmethod + def _map_user_record_to_uid_email_phones(user_record): + return { + 'uid': user_record.uid, + 'email': user_record.email, + 'phone_number': user_record.phone_number + } + + def test_more_than_100_identifiers(self, user_mgt_app): + identifiers = [auth.UidIdentifier('id' + str(i)) for i in range(101)] + with pytest.raises(ValueError): + auth.get_users(identifiers, app=user_mgt_app) + + def test_no_identifiers(self, user_mgt_app): + get_users_results = auth.get_users([], app=user_mgt_app) + assert get_users_results.users == [] + assert get_users_results.not_found == [] + + def test_identifiers_that_do_not_exist(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 200, '{}') + not_found_ids = [auth.UidIdentifier('id that doesnt exist')] + get_users_results = auth.get_users(not_found_ids, app=user_mgt_app) + assert get_users_results.users == [] + assert get_users_results.not_found == not_found_ids + + def test_invalid_uid(self): + with pytest.raises(ValueError): + auth.UidIdentifier('too long ' + '.'*128) + + def test_invalid_email(self): + with pytest.raises(ValueError): + auth.EmailIdentifier('invalid email addr') + + def test_invalid_phone_number(self): + with pytest.raises(ValueError): + auth.PhoneIdentifier('invalid phone number') + + def test_invalid_provider(self): + with pytest.raises(ValueError): + auth.ProviderIdentifier(provider_id='', provider_uid='') + + def test_success(self, user_mgt_app): + mock_users = [{ + "localId": "uid1", + "email": "user1@example.com", + "phoneNumber": "+15555550001" + }, { + "localId": "uid2", + "email": "user2@example.com", + "phoneNumber": "+15555550002" + }, { + "localId": "uid3", + "email": "user3@example.com", + "phoneNumber": "+15555550003" + }, { + "localId": "uid4", + "email": "user4@example.com", + "phoneNumber": "+15555550004", + "providerUserInfo": [{ + "providerId": "google.com", + "rawId": "google_uid4" + }] + }] + _instrument_user_manager(user_mgt_app, 200, '{ "users": ' + json.dumps(mock_users) + '}') + + get_users_results = auth.get_users([ + auth.UidIdentifier('uid1'), + auth.EmailIdentifier('user2@example.com'), + auth.PhoneIdentifier('+15555550003'), + auth.ProviderIdentifier(provider_id='google.com', provider_uid='google_uid4'), + auth.UidIdentifier('this-user-doesnt-exist'), + ], app=user_mgt_app) + + actual = sorted( + [self._map_user_record_to_uid_email_phones(user) for user in get_users_results.users], + key=lambda user: user['uid']) + expected = sorted([ + self._map_user_record_to_uid_email_phones(auth.UserRecord(user)) + for user in mock_users + ], key=lambda user: user['uid']) + assert actual == expected + assert [u.uid for u in get_users_results.not_found] == ['this-user-doesnt-exist'] + + +class TestCreateUser: + + already_exists_errors = { + 'DUPLICATE_EMAIL': auth.EmailAlreadyExistsError, + 'DUPLICATE_LOCAL_ID': auth.UidAlreadyExistsError, + 'EMAIL_EXISTS': auth.EmailAlreadyExistsError, + 'PHONE_NUMBER_EXISTS': auth.PhoneNumberAlreadyExistsError, + } + + @pytest.mark.parametrize('arg', INVALID_STRINGS[1:] + ['a'*129]) + def test_invalid_uid(self, user_mgt_app, arg): + with pytest.raises(ValueError): + auth.create_user(uid=arg, app=user_mgt_app) + + @pytest.mark.parametrize('arg', INVALID_STRINGS[1:] + ['not-an-email']) + def test_invalid_email(self, user_mgt_app, arg): + with pytest.raises(ValueError): + auth.create_user(email=arg, app=user_mgt_app) + + @pytest.mark.parametrize('arg', INVALID_STRINGS[1:] + ['not-a-phone', '+']) + def test_invalid_phone(self, user_mgt_app, arg): + with pytest.raises(ValueError): + auth.create_user(phone_number=arg, app=user_mgt_app) + + @pytest.mark.parametrize('arg', INVALID_STRINGS[1:]) + def test_invalid_display_name(self, user_mgt_app, arg): + with pytest.raises(ValueError): + auth.create_user(display_name=arg, app=user_mgt_app) + + @pytest.mark.parametrize('arg', INVALID_STRINGS[1:] + ['not-a-url']) + def test_invalid_photo_url(self, user_mgt_app, arg): + with pytest.raises(ValueError): + auth.create_user(photo_url=arg, app=user_mgt_app) + + @pytest.mark.parametrize('arg', INVALID_STRINGS[1:] + ['short']) + def test_invalid_password(self, user_mgt_app, arg): + with pytest.raises(ValueError): + auth.create_user(password=arg, app=user_mgt_app) + + def test_invalid_property(self, user_mgt_app): + with pytest.raises(TypeError): + auth.create_user(unsupported='value', app=user_mgt_app) + + def test_create_user(self, user_mgt_app): + user_mgt, recorder = _instrument_user_manager(user_mgt_app, 200, '{"localId":"testuser"}') + assert user_mgt.create_user() == 'testuser' + request = json.loads(recorder[0].body.decode()) + assert request == {} + + @pytest.mark.parametrize('phone', [ + '+11234567890', '+1 123 456 7890', '+1 (123) 456-7890', + ]) + def test_create_user_with_phone(self, user_mgt_app, phone): + user_mgt, recorder = _instrument_user_manager(user_mgt_app, 200, '{"localId":"testuser"}') + assert user_mgt.create_user(phone_number=phone) == 'testuser' + request = json.loads(recorder[0].body.decode()) + assert request == {'phoneNumber' : phone} + + def test_create_user_with_email(self, user_mgt_app): + user_mgt, recorder = _instrument_user_manager(user_mgt_app, 200, '{"localId":"testuser"}') + assert user_mgt.create_user(email='test@example.com', email_verified=True) == 'testuser' + request = json.loads(recorder[0].body.decode()) + assert request == {'email' : 'test@example.com', 'emailVerified' : True} + + def test_create_user_with_id(self, user_mgt_app): + user_mgt, recorder = _instrument_user_manager(user_mgt_app, 200, '{"localId":"testuser"}') + assert user_mgt.create_user(uid='testuser') == 'testuser' + request = json.loads(recorder[0].body.decode()) + assert request == {'localId' : 'testuser'} + + def test_create_user_error(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 500, '{"error": {"message": "UNEXPECTED_CODE"}}') + with pytest.raises(exceptions.InternalError) as excinfo: + auth.create_user(app=user_mgt_app) + assert str(excinfo.value) == 'Error while calling Auth service (UNEXPECTED_CODE).' + assert excinfo.value.http_response is not None + assert excinfo.value.cause is not None + + @pytest.mark.parametrize('error_code', already_exists_errors.keys()) + def test_user_already_exists(self, user_mgt_app, error_code): + resp = {'error': {'message': error_code}} + _instrument_user_manager(user_mgt_app, 500, json.dumps(resp)) + exc_type = self.already_exists_errors[error_code] + with pytest.raises(exc_type) as excinfo: + auth.create_user(app=user_mgt_app) + assert isinstance(excinfo.value, exceptions.AlreadyExistsError) + assert str(excinfo.value) == f'{exc_type.default_message} ({error_code}).' + assert excinfo.value.http_response is not None + assert excinfo.value.cause is not None + + def test_create_user_unexpected_response(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 200, '{"error": "test"}') + with pytest.raises(auth.UnexpectedResponseError) as excinfo: + auth.create_user(app=user_mgt_app) + assert str(excinfo.value) == 'Failed to create new user.' + assert excinfo.value.http_response is not None + assert excinfo.value.cause is None + assert isinstance(excinfo.value, exceptions.UnknownError) + + +class TestUpdateUser: + + @pytest.mark.parametrize('arg', INVALID_STRINGS + ['a'*129]) + def test_invalid_uid(self, user_mgt_app, arg): + with pytest.raises(ValueError): + auth.update_user(arg, app=user_mgt_app) + + @pytest.mark.parametrize('arg', INVALID_STRINGS[1:] + ['not-an-email']) + def test_invalid_email(self, user_mgt_app, arg): + with pytest.raises(ValueError): + auth.update_user('user', email=arg, app=user_mgt_app) + + @pytest.mark.parametrize('arg', INVALID_STRINGS[1:] + ['not-a-phone', '+']) + def test_invalid_phone(self, user_mgt_app, arg): + with pytest.raises(ValueError): + auth.update_user('user', phone_number=arg, app=user_mgt_app) + + @pytest.mark.parametrize('arg', INVALID_STRINGS[1:]) + def test_invalid_display_name(self, user_mgt_app, arg): + with pytest.raises(ValueError): + auth.update_user('user', display_name=arg, app=user_mgt_app) + + @pytest.mark.parametrize('arg', INVALID_STRINGS[1:] + ['not-a-url']) + def test_invalid_photo_url(self, user_mgt_app, arg): + with pytest.raises(ValueError): + auth.update_user('user', photo_url=arg, app=user_mgt_app) + + @pytest.mark.parametrize('arg', INVALID_STRINGS[1:] + ['short']) + def test_invalid_password(self, user_mgt_app, arg): + with pytest.raises(ValueError): + auth.update_user('user', password=arg, app=user_mgt_app) + + @pytest.mark.parametrize('arg', INVALID_DICTS[1:] + ['"json"']) + def test_invalid_custom_claims(self, user_mgt_app, arg): + with pytest.raises(ValueError): + auth.update_user('user', custom_claims=arg, app=user_mgt_app) + + def test_invalid_property(self, user_mgt_app): + with pytest.raises(TypeError): + auth.update_user('user', unsupported='arg', app=user_mgt_app) + + @pytest.mark.parametrize('arg', INVALID_TIMESTAMPS) + def test_invalid_valid_since(self, user_mgt_app, arg): + with pytest.raises(ValueError): + auth.update_user('user', valid_since=arg, app=user_mgt_app) + + def test_update_user(self, user_mgt_app): + user_mgt, recorder = _instrument_user_manager(user_mgt_app, 200, '{"localId":"testuser"}') + user_mgt.update_user('testuser') + request = json.loads(recorder[0].body.decode()) + assert request == {'localId' : 'testuser'} + + @pytest.mark.parametrize('arg', [True, False, 1, 0, 'foo']) + def test_disable_user(self, arg, user_mgt_app): + user_mgt, recorder = _instrument_user_manager(user_mgt_app, 200, '{"localId":"testuser"}') + user_mgt.update_user('testuser', disabled=arg) + request = json.loads(recorder[0].body.decode()) + assert request == {'localId' : 'testuser', 'disableUser' : bool(arg)} + + @pytest.mark.parametrize('arg', [True, False, 1, 0, 'foo']) + def test_set_email_verified(self, arg, user_mgt_app): + user_mgt, recorder = _instrument_user_manager(user_mgt_app, 200, '{"localId":"testuser"}') + user_mgt.update_user('testuser', email_verified=arg) + request = json.loads(recorder[0].body.decode()) + assert request == {'localId' : 'testuser', 'emailVerified' : bool(arg)} + + def test_update_user_custom_claims(self, user_mgt_app): + user_mgt, recorder = _instrument_user_manager(user_mgt_app, 200, '{"localId":"testuser"}') + claims = {'admin':True, 'package':'gold'} + user_mgt.update_user('testuser', custom_claims=claims) + request = json.loads(recorder[0].body.decode()) + assert request == {'localId' : 'testuser', 'customAttributes' : json.dumps(claims)} + + def test_delete_user_custom_claims(self, user_mgt_app): + user_mgt, recorder = _instrument_user_manager(user_mgt_app, 200, '{"localId":"testuser"}') + user_mgt.update_user('testuser', custom_claims=auth.DELETE_ATTRIBUTE) + request = json.loads(recorder[0].body.decode()) + assert request == {'localId' : 'testuser', 'customAttributes' : json.dumps({})} + + def test_update_user_delete_fields(self, user_mgt_app): + user_mgt, recorder = _instrument_user_manager(user_mgt_app, 200, '{"localId":"testuser"}') + user_mgt.update_user( + 'testuser', + display_name=auth.DELETE_ATTRIBUTE, + photo_url=auth.DELETE_ATTRIBUTE, + phone_number=auth.DELETE_ATTRIBUTE) + request = json.loads(recorder[0].body.decode()) + assert request == { + 'localId' : 'testuser', + 'deleteAttribute' : ['DISPLAY_NAME', 'PHOTO_URL'], + 'deleteProvider' : ['phone'], + } + + def test_update_user_error(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 500, '{"error": {"message": "UNEXPECTED_CODE"}}') + with pytest.raises(exceptions.InternalError) as excinfo: + auth.update_user('user', app=user_mgt_app) + assert str(excinfo.value) == 'Error while calling Auth service (UNEXPECTED_CODE).' + assert excinfo.value.http_response is not None + assert excinfo.value.cause is not None + + def test_update_user_unexpected_response(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 200, '{"error": "test"}') + with pytest.raises(auth.UnexpectedResponseError) as excinfo: + auth.update_user('user', app=user_mgt_app) + assert str(excinfo.value) == 'Failed to update user: user.' + assert excinfo.value.http_response is not None + assert excinfo.value.cause is None + assert isinstance(excinfo.value, exceptions.UnknownError) + + @pytest.mark.parametrize('arg', [1, 1.0]) + def test_update_user_valid_since(self, user_mgt_app, arg): + user_mgt, recorder = _instrument_user_manager(user_mgt_app, 200, '{"localId":"testuser"}') + user_mgt.update_user('testuser', valid_since=arg) + request = json.loads(recorder[0].body.decode()) + assert request == {'localId': 'testuser', 'validSince': int(arg)} + + @pytest.mark.parametrize('arg', [['phone'], ['google.com', 'phone']]) + def test_update_user_delete_provider(self, user_mgt_app, arg): + user_mgt, recorder = _instrument_user_manager(user_mgt_app, 200, '{"localId":"testuser"}') + user_mgt.update_user('testuser', providers_to_delete=arg) + request = json.loads(recorder[0].body.decode()) + assert set(request['deleteProvider']) == set(arg) + + @pytest.mark.parametrize('arg', [[], ['phone'], ['google.com'], ['google.com', 'phone']]) + def test_update_user_delete_provider_and_phone(self, user_mgt_app, arg): + user_mgt, recorder = _instrument_user_manager(user_mgt_app, 200, '{"localId":"testuser"}') + user_mgt.update_user('testuser', + providers_to_delete=arg, + phone_number=auth.DELETE_ATTRIBUTE) + request = json.loads(recorder[0].body.decode()) + assert 'phone' in request['deleteProvider'] + assert len(set(request['deleteProvider'])) == len(request['deleteProvider']) + assert set(arg) - set(request['deleteProvider']) == set() + +class TestSetCustomUserClaims: + + @pytest.mark.parametrize('arg', INVALID_STRINGS + ['a'*129]) + def test_invalid_uid(self, user_mgt_app, arg): + with pytest.raises(ValueError): + auth.set_custom_user_claims(arg, {'foo': 'bar'}, app=user_mgt_app) + + @pytest.mark.parametrize('arg', INVALID_DICTS[1:] + ['"json"']) + def test_invalid_custom_claims(self, user_mgt_app, arg): + with pytest.raises(ValueError): + auth.set_custom_user_claims('user', arg, app=user_mgt_app) + + @pytest.mark.parametrize('key', _auth_utils.RESERVED_CLAIMS) + def test_single_reserved_claim(self, user_mgt_app, key): + claims = {key : 'value'} + with pytest.raises(ValueError) as excinfo: + auth.set_custom_user_claims('user', claims, app=user_mgt_app) + assert str(excinfo.value) == f'Claim "{key}" is reserved, and must not be set.' + + def test_multiple_reserved_claims(self, user_mgt_app): + claims = {key : 'value' for key in _auth_utils.RESERVED_CLAIMS} + with pytest.raises(ValueError) as excinfo: + auth.set_custom_user_claims('user', claims, app=user_mgt_app) + joined = ', '.join(sorted(claims.keys())) + assert str(excinfo.value) == f'Claims "{joined}" are reserved, and must not be set.' + + def test_large_claims_payload(self, user_mgt_app): + claims = {'key' : 'A'*1000} + with pytest.raises(ValueError) as excinfo: + auth.set_custom_user_claims('user', claims, app=user_mgt_app) + assert str(excinfo.value) == 'Custom claims payload must not exceed 1000 characters.' + + def test_set_custom_user_claims(self, user_mgt_app): + _, recorder = _instrument_user_manager(user_mgt_app, 200, '{"localId":"testuser"}') + claims = {'admin':True, 'package':'gold'} + auth.set_custom_user_claims('testuser', claims, app=user_mgt_app) + request = json.loads(recorder[0].body.decode()) + assert request == {'localId' : 'testuser', 'customAttributes' : json.dumps(claims)} + + def test_set_custom_user_claims_str(self, user_mgt_app): + _, recorder = _instrument_user_manager(user_mgt_app, 200, '{"localId":"testuser"}') + claims = json.dumps({'admin':True, 'package':'gold'}) + auth.set_custom_user_claims('testuser', claims, app=user_mgt_app) + request = json.loads(recorder[0].body.decode()) + assert request == {'localId' : 'testuser', 'customAttributes' : claims} + + @pytest.mark.parametrize('claims', [None, auth.DELETE_ATTRIBUTE]) + def test_set_custom_user_claims_remove(self, user_mgt_app, claims): + _, recorder = _instrument_user_manager(user_mgt_app, 200, '{"localId":"testuser"}') + auth.set_custom_user_claims('testuser', claims, app=user_mgt_app) + request = json.loads(recorder[0].body.decode()) + assert request == {'localId' : 'testuser', 'customAttributes' : json.dumps({})} + + def test_set_custom_user_claims_error(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 500, '{"error": {"message": "UNEXPECTED_CODE"}}') + with pytest.raises(exceptions.InternalError) as excinfo: + auth.set_custom_user_claims('user', {}, app=user_mgt_app) + assert str(excinfo.value) == 'Error while calling Auth service (UNEXPECTED_CODE).' + assert excinfo.value.http_response is not None + assert excinfo.value.cause is not None + + +class TestDeleteUser: + + @pytest.mark.parametrize('arg', INVALID_STRINGS + ['a'*129]) + def test_invalid_delete_user(self, user_mgt_app, arg): + with pytest.raises(ValueError): + auth.delete_user(arg, app=user_mgt_app) + + def test_delete_user(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 200, '{"kind":"deleteresponse"}') + # should not raise + auth.delete_user('testuser', user_mgt_app) + + def test_delete_user_error(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 500, '{"error": {"message": "UNEXPECTED_CODE"}}') + with pytest.raises(exceptions.InternalError) as excinfo: + auth.delete_user('user', app=user_mgt_app) + assert str(excinfo.value) == 'Error while calling Auth service (UNEXPECTED_CODE).' + assert excinfo.value.http_response is not None + assert excinfo.value.cause is not None + + def test_delete_user_unexpected_response(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 200, '{"error": "test"}') + with pytest.raises(auth.UnexpectedResponseError) as excinfo: + auth.delete_user('user', app=user_mgt_app) + assert str(excinfo.value) == 'Failed to delete user: user.' + assert excinfo.value.http_response is not None + assert excinfo.value.cause is None + assert isinstance(excinfo.value, exceptions.UnknownError) + + +class TestDeleteUsers: + + def test_empty_list(self, user_mgt_app): + delete_users_result = auth.delete_users([], app=user_mgt_app) + assert delete_users_result.success_count == 0 + assert delete_users_result.failure_count == 0 + assert len(delete_users_result.errors) == 0 + + def test_too_many_identifiers_should_fail(self, user_mgt_app): + ids = ['id' + str(i) for i in range(1001)] + with pytest.raises(ValueError): + auth.delete_users(ids, app=user_mgt_app) + + def test_invalid_id_should_fail(self, user_mgt_app): + ids = ['too long ' + '.'*128] + with pytest.raises(ValueError): + auth.delete_users(ids, app=user_mgt_app) + + def test_should_index_errors_correctly_in_results(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 200, """{ + "errors": [{ + "index": 0, + "localId": "uid1", + "message": "NOT_DISABLED : Disable the account before batch deletion." + }, { + "index": 2, + "localId": "uid3", + "message": "something awful" + }] + }""") + + delete_users_result = auth.delete_users(['uid1', 'uid2', 'uid3', 'uid4'], app=user_mgt_app) + assert delete_users_result.success_count == 2 + assert delete_users_result.failure_count == 2 + assert len(delete_users_result.errors) == 2 + assert delete_users_result.errors[0].index == 0 + assert delete_users_result.errors[0].reason.startswith('NOT_DISABLED') + assert delete_users_result.errors[1].index == 2 + assert delete_users_result.errors[1].reason == 'something awful' + + def test_success(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 200, '{}') + delete_users_result = auth.delete_users(['uid1', 'uid2', 'uid3'], app=user_mgt_app) + assert delete_users_result.success_count == 3 + assert delete_users_result.failure_count == 0 + assert len(delete_users_result.errors) == 0 + + +class TestListUsers: + + @pytest.mark.parametrize('arg', [None, 'foo', [], {}, 0, -1, 1001, False]) + def test_invalid_max_results(self, user_mgt_app, arg): + with pytest.raises(ValueError): + auth.list_users(max_results=arg, app=user_mgt_app) + + @pytest.mark.parametrize('arg', ['', [], {}, 0, -1, 1001, False]) + def test_invalid_page_token(self, user_mgt_app, arg): + with pytest.raises(ValueError): + auth.list_users(page_token=arg, app=user_mgt_app) + + def test_list_single_page(self, user_mgt_app): + _, recorder = _instrument_user_manager(user_mgt_app, 200, MOCK_LIST_USERS_RESPONSE) + page = auth.list_users(app=user_mgt_app) + self._check_page(page) + assert page.next_page_token == '' + assert page.has_next_page is False + assert page.get_next_page() is None + users = list(user for user in page.iterate_all()) + assert len(users) == 2 + self._check_rpc_calls(recorder) + + def test_list_multiple_pages(self, user_mgt_app): + # Page 1 + response = { + 'users': [{'localId': 'user1'}, {'localId': 'user2'}, {'localId': 'user3'}], + 'nextPageToken': 'token' + } + _, recorder = _instrument_user_manager(user_mgt_app, 200, json.dumps(response)) + page = auth.list_users(app=user_mgt_app) + assert len(page.users) == 3 + assert page.next_page_token == 'token' + assert page.has_next_page is True + self._check_rpc_calls(recorder) + + # Page 2 (also the last page) + response = {'users': [{'localId': 'user4'}]} + _, recorder = _instrument_user_manager(user_mgt_app, 200, json.dumps(response)) + page = page.get_next_page() + assert len(page.users) == 1 + assert page.next_page_token == '' + assert page.has_next_page is False + assert page.get_next_page() is None + self._check_rpc_calls(recorder, {'maxResults': '1000', 'nextPageToken': 'token'}) + + def test_list_users_paged_iteration(self, user_mgt_app): + # Page 1 + response = { + 'users': [{'localId': 'user1'}, {'localId': 'user2'}, {'localId': 'user3'}], + 'nextPageToken': 'token' + } + _, recorder = _instrument_user_manager(user_mgt_app, 200, json.dumps(response)) + page = auth.list_users(app=user_mgt_app) + assert page.next_page_token == 'token' + assert page.has_next_page is True + iterator = page.iterate_all() + for index in range(3): + user = next(iterator) + assert user.uid == f'user{index+1}' + assert len(recorder) == 1 + self._check_rpc_calls(recorder) + + # Page 2 (also the last page) + response = {'users': [{'localId': 'user4'}]} + _, recorder = _instrument_user_manager(user_mgt_app, 200, json.dumps(response)) + user = next(iterator) + assert user.uid == 'user4' + with pytest.raises(StopIteration): + next(iterator) + self._check_rpc_calls(recorder, {'maxResults': '1000', 'nextPageToken': 'token'}) + + def test_list_users_iterator_state(self, user_mgt_app): + response = { + 'users': [{'localId': 'user1'}, {'localId': 'user2'}, {'localId': 'user3'}] + } + _, recorder = _instrument_user_manager(user_mgt_app, 200, json.dumps(response)) + page = auth.list_users(app=user_mgt_app) + + # Iterate through 2 results and break. + index = 0 + iterator = page.iterate_all() + for user in iterator: + index += 1 + assert user.uid == f'user{index}' + if index == 2: + break + + # Iterator should resume from where left off. + user = next(iterator) + assert user.uid == 'user3' + with pytest.raises(StopIteration): + next(iterator) + self._check_rpc_calls(recorder) + + def test_list_users_stop_iteration(self, user_mgt_app): + response = { + 'users': [{'localId': 'user1'}, {'localId': 'user2'}, {'localId': 'user3'}] + } + _, recorder = _instrument_user_manager(user_mgt_app, 200, json.dumps(response)) + page = auth.list_users(app=user_mgt_app) + assert len(page.users) == 3 + + iterator = page.iterate_all() + users = list(user for user in iterator) + assert len(page.users) == 3 + with pytest.raises(StopIteration): + next(iterator) + assert len(users) == 3 + self._check_rpc_calls(recorder) + + def test_list_users_no_users_response(self, user_mgt_app): + response = {'users': []} + _instrument_user_manager(user_mgt_app, 200, json.dumps(response)) + page = auth.list_users(app=user_mgt_app) + assert len(page.users) == 0 + users = list(user for user in page.iterate_all()) + assert len(users) == 0 + + def test_list_users_with_max_results(self, user_mgt_app): + _, recorder = _instrument_user_manager(user_mgt_app, 200, MOCK_LIST_USERS_RESPONSE) + page = auth.list_users(max_results=500, app=user_mgt_app) + self._check_page(page) + self._check_rpc_calls(recorder, {'maxResults' : '500'}) + + def test_list_users_with_all_args(self, user_mgt_app): + _, recorder = _instrument_user_manager(user_mgt_app, 200, MOCK_LIST_USERS_RESPONSE) + page = auth.list_users(page_token='foo', max_results=500, app=user_mgt_app) + self._check_page(page) + self._check_rpc_calls(recorder, {'nextPageToken' : 'foo', 'maxResults' : '500'}) + + def test_list_users_error(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 500, '{"error":"test"}') + with pytest.raises(exceptions.InternalError) as excinfo: + auth.list_users(app=user_mgt_app) + assert str(excinfo.value) == 'Unexpected error response: {"error":"test"}' + + def test_permission_error(self, user_mgt_app): + _instrument_user_manager( + user_mgt_app, 400, '{"error": {"message": "INSUFFICIENT_PERMISSION"}}') + with pytest.raises(auth.InsufficientPermissionError) as excinfo: + auth.list_users(app=user_mgt_app) + assert isinstance(excinfo.value, exceptions.PermissionDeniedError) + msg = ('The credential used to initialize the SDK has insufficient ' + 'permissions to perform the requested operation. See ' + 'https://firebase.google.com/docs/admin/setup for details ' + 'on how to initialize the Admin SDK with appropriate permissions ' + '(INSUFFICIENT_PERMISSION).') + assert str(excinfo.value) == msg + assert excinfo.value.http_response is not None + assert excinfo.value.cause is not None + + def _check_page(self, page): + assert isinstance(page, auth.ListUsersPage) + index = 0 + assert len(page.users) == 2 + for user in page.users: + assert isinstance(user, auth.ExportedUserRecord) + _check_user_record(user, f'testuser{index}') + assert user.password_hash == 'passwordHash' + assert user.password_salt == 'passwordSalt' + index += 1 + + def _check_rpc_calls(self, recorder, expected=None): + if expected is None: + expected = {'maxResults' : '1000'} + assert len(recorder) == 1 + request = dict(parse.parse_qsl(parse.urlsplit(recorder[0].url).query)) + assert request == expected + + +class TestUserProvider: + + _INVALID_PROVIDERS = ( + [{'display_name': arg} for arg in INVALID_STRINGS[1:]] + + [{'email': arg} for arg in INVALID_STRINGS[1:] + ['not-an-email']] + + [{'photo_url': arg} for arg in INVALID_STRINGS[1:] + ['not-a-url']] + ) + + def test_uid_and_provider_id(self): + provider = auth.UserProvider(uid='test', provider_id='google.com') + expected = {'rawId': 'test', 'providerId': 'google.com'} + assert provider.to_dict() == expected + + def test_all_params(self): + provider = auth.UserProvider( + uid='test', provider_id='google.com', email='test@example.com', + display_name='Test Name', photo_url='https://test.com/user.png') + expected = { + 'rawId': 'test', + 'providerId': 'google.com', + 'email': 'test@example.com', + 'displayName': 'Test Name', + 'photoUrl': 'https://test.com/user.png' + } + assert provider.to_dict() == expected + + @pytest.mark.parametrize('arg', INVALID_STRINGS + ['a'*129]) + def test_invalid_uid(self, arg): + with pytest.raises(ValueError): + auth.UserProvider(uid=arg, provider_id='google.com') + + @pytest.mark.parametrize('arg', INVALID_STRINGS) + def test_invalid_provider_id(self, arg): + with pytest.raises(ValueError): + auth.UserProvider(uid='test', provider_id=arg) + + @pytest.mark.parametrize('arg', _INVALID_PROVIDERS) + def test_invalid_arg(self, arg): + with pytest.raises(ValueError): + auth.UserProvider(uid='test', provider_id='google.com', **arg) + + +class TestUserMetadata: + + _INVALID_ARGS = ( + [{'creation_timestamp': arg} for arg in INVALID_TIMESTAMPS] + + [{'last_sign_in_timestamp': arg} for arg in INVALID_TIMESTAMPS] + ) + + @pytest.mark.parametrize('arg', _INVALID_ARGS) + def test_invalid_args(self, arg): + with pytest.raises(ValueError): + auth.UserMetadata(**arg) + + +class TestImportUserRecord: + + _INVALID_USERS = ( + [{'display_name': arg} for arg in INVALID_STRINGS[1:]] + + [{'email': arg} for arg in INVALID_STRINGS[1:] + ['not-an-email']] + + [{'photo_url': arg} for arg in INVALID_STRINGS[1:] + ['not-a-url']] + + [{'phone_number': arg} for arg in INVALID_STRINGS[1:] + ['not-a-phone']] + + [{'password_hash': arg} for arg in INVALID_STRINGS[1:] + ['test']] + + [{'password_salt': arg} for arg in INVALID_STRINGS[1:] + ['test']] + + [{'custom_claims': arg} for arg in INVALID_DICTS[1:] + ['"json"', {'key': 'a'*1000}]] + + [{'provider_data': arg} for arg in ['foo', 1, True]] + ) + + def test_uid(self): + user = auth.ImportUserRecord(uid='test') + assert user.uid == 'test' + assert user.custom_claims is None + assert user.user_metadata is None + assert user.to_dict() == {'localId': 'test'} + + def test_all_params(self): + providers = [auth.UserProvider(uid='test', provider_id='google.com')] + metadata = auth.UserMetadata(100, 150) + user = auth.ImportUserRecord( + uid='test', email='test@example.com', photo_url='https://test.com/user.png', + phone_number='+1234567890', display_name='name', user_metadata=metadata, + password_hash=b'password', password_salt=b'NaCl', custom_claims={'admin': True}, + email_verified=True, disabled=False, provider_data=providers) + expected = { + 'localId': 'test', + 'email': 'test@example.com', + 'photoUrl': 'https://test.com/user.png', + 'phoneNumber': '+1234567890', + 'displayName': 'name', + 'createdAt': 100, + 'lastLoginAt': 150, + 'passwordHash': _user_import.b64_encode(b'password'), + 'salt': _user_import.b64_encode(b'NaCl'), + 'customAttributes': json.dumps({'admin': True}), + 'emailVerified': True, + 'disabled': False, + 'providerUserInfo': [{'rawId': 'test', 'providerId': 'google.com'}], + } + assert user.to_dict() == expected + + @pytest.mark.parametrize('arg', INVALID_STRINGS + ['a'*129]) + def test_invalid_uid(self, arg): + with pytest.raises(ValueError): + auth.ImportUserRecord(uid=arg) + + @pytest.mark.parametrize('args', _INVALID_USERS) + def test_invalid_args(self, args): + with pytest.raises(ValueError): + auth.ImportUserRecord(uid='test', **args) + + @pytest.mark.parametrize('claims', [{}, {'admin': True}, '{"admin": true}']) + def test_custom_claims(self, claims): + user = auth.ImportUserRecord(uid='test', custom_claims=claims) + assert user.custom_claims == claims + json_claims = json.dumps(claims) if isinstance(claims, dict) else claims + expected = {'localId': 'test', 'customAttributes': json_claims} + assert user.to_dict() == expected + + @pytest.mark.parametrize('email_verified', [True, False]) + def test_email_verified(self, email_verified): + user = auth.ImportUserRecord(uid='test', email_verified=email_verified) + assert user.email_verified == email_verified + assert user.to_dict() == {'localId': 'test', 'emailVerified': email_verified} + + @pytest.mark.parametrize('disabled', [True, False]) + def test_disabled(self, disabled): + user = auth.ImportUserRecord(uid='test', disabled=disabled) + assert user.disabled == disabled + assert user.to_dict() == {'localId': 'test', 'disabled': disabled} + + +class TestUserImportHash: + + @pytest.mark.parametrize('func,name', [ + (auth.UserImportHash.hmac_sha512, 'HMAC_SHA512'), + (auth.UserImportHash.hmac_sha256, 'HMAC_SHA256'), + (auth.UserImportHash.hmac_sha1, 'HMAC_SHA1'), + (auth.UserImportHash.hmac_md5, 'HMAC_MD5'), + ]) + def test_hmac(self, func, name): + hmac = func(key=b'key') + expected = { + 'hashAlgorithm': name, + 'signerKey': _user_import.b64_encode(b'key'), + } + assert hmac.to_dict() == expected + + @pytest.mark.parametrize('func', [ + auth.UserImportHash.hmac_sha512, auth.UserImportHash.hmac_sha256, + auth.UserImportHash.hmac_sha1, auth.UserImportHash.hmac_md5, + ]) + @pytest.mark.parametrize('key', INVALID_STRINGS) + def test_invalid_hmac(self, func, key): + with pytest.raises(ValueError): + func(key=key) + + @pytest.mark.parametrize('func,name,rounds', [ + (auth.UserImportHash.md5, 'MD5', [0, 8192]), + (auth.UserImportHash.sha1, 'SHA1', [1, 8192]), + (auth.UserImportHash.sha256, 'SHA256', [1, 8192]), + (auth.UserImportHash.sha512, 'SHA512', [1, 8192]), + (auth.UserImportHash.pbkdf_sha1, 'PBKDF_SHA1', [0, 120000]), + (auth.UserImportHash.pbkdf2_sha256, 'PBKDF2_SHA256', [0, 120000]), + ]) + def test_basic(self, func, name, rounds): + for rnds in rounds: + basic = func(rounds=rnds) + expected = { + 'hashAlgorithm': name, + 'rounds': rnds, + } + assert basic.to_dict() == expected + + @pytest.mark.parametrize('func,rounds', [ + (auth.UserImportHash.md5, INVALID_INTS + [-1, 8193]), + (auth.UserImportHash.sha1, INVALID_INTS + [0, 8193]), + (auth.UserImportHash.sha256, INVALID_INTS + [0, 8193]), + (auth.UserImportHash.sha512, INVALID_INTS + [0, 8193]), + (auth.UserImportHash.pbkdf_sha1, INVALID_INTS + [-1, 120001]), + (auth.UserImportHash.pbkdf2_sha256, INVALID_INTS + [-1, 120001]), + ]) + def test_invalid_basic(self, func, rounds): + for rnds in rounds: + with pytest.raises(ValueError): + func(rounds=rnds) + + def test_scrypt(self): + scrypt = auth.UserImportHash.scrypt( + key=b'key', salt_separator=b'sep', rounds=8, memory_cost=14) + expected = { + 'hashAlgorithm': 'SCRYPT', + 'signerKey': _user_import.b64_encode(b'key'), + 'rounds': 8, + 'memoryCost': 14, + 'saltSeparator': _user_import.b64_encode(b'sep'), + } + assert scrypt.to_dict() == expected + + @pytest.mark.parametrize('arg', ( + [{'key': arg} for arg in INVALID_STRINGS] + + [{'rounds': arg} for arg in INVALID_INTS + [0, 9]] + + [{'memory_cost': arg} for arg in INVALID_INTS + [0, 15]] + + [{'salt_separator': arg} for arg in INVALID_STRINGS] + )) + def test_invalid_scrypt(self, arg): + params = {'key': 'key', 'rounds': 0, 'memory_cost': 14} + params.update(arg) + with pytest.raises(ValueError): + auth.UserImportHash.scrypt(**params) + + def test_bcrypt(self): + bcrypt = auth.UserImportHash.bcrypt() + assert bcrypt.to_dict() == {'hashAlgorithm': 'BCRYPT'} + + def test_standard_scrypt(self): + scrypt = auth.UserImportHash.standard_scrypt( + memory_cost=14, parallelization=2, block_size=10, derived_key_length=128) + expected = { + 'hashAlgorithm': 'STANDARD_SCRYPT', + 'cpuMemCost': 14, + 'parallelization': 2, + 'blockSize': 10, + 'dkLen': 128, + } + assert scrypt.to_dict() == expected + + @pytest.mark.parametrize('arg', ( + [{'memory_cost': arg} for arg in INVALID_INTS] + + [{'parallelization': arg} for arg in INVALID_INTS] + + [{'block_size': arg} for arg in INVALID_INTS] + + [{'derived_key_length': arg} for arg in INVALID_INTS] + )) + def test_invalid_standard_scrypt(self, arg): + params = { + 'memory_cost': 14, + 'parallelization': 2, + 'block_size': 10, + 'derived_key_length': 128, + } + params.update(arg) + with pytest.raises(ValueError): + auth.UserImportHash.standard_scrypt(**params) + + +class TestImportUsers: + + @pytest.mark.parametrize('arg', [None, [], tuple(), {}, 0, 1, 'foo']) + def test_invalid_users(self, user_mgt_app, arg): + with pytest.raises(Exception): + auth.import_users(arg, app=user_mgt_app) + + def test_too_many_users(self, user_mgt_app): + users = [auth.ImportUserRecord(uid=f'test{i}') for i in range(1001)] + with pytest.raises(ValueError): + auth.import_users(users, app=user_mgt_app) + + def test_import_users(self, user_mgt_app): + _, recorder = _instrument_user_manager(user_mgt_app, 200, '{}') + users = [ + auth.ImportUserRecord(uid='user1'), + auth.ImportUserRecord(uid='user2'), + ] + result = auth.import_users(users, app=user_mgt_app) + assert result.success_count == 2 + assert result.failure_count == 0 + assert result.errors == [] + expected = {'users': [{'localId': 'user1'}, {'localId': 'user2'}]} + _check_request(recorder, '/accounts:batchCreate', expected) + + def test_import_users_error(self, user_mgt_app): + _, recorder = _instrument_user_manager(user_mgt_app, 200, """{"error": [ + {"index": 0, "message": "Some error occured in user1"}, + {"index": 2, "message": "Another error occured in user3"} + ]}""") + users = [ + auth.ImportUserRecord(uid='user1'), + auth.ImportUserRecord(uid='user2'), + auth.ImportUserRecord(uid='user3'), + ] + result = auth.import_users(users, app=user_mgt_app) + assert result.success_count == 1 + assert result.failure_count == 2 + assert len(result.errors) == 2 + err = result.errors[0] + assert err.index == 0 + assert err.reason == 'Some error occured in user1' + err = result.errors[1] + assert err.index == 2 + assert err.reason == 'Another error occured in user3' + expected = {'users': [{'localId': 'user1'}, {'localId': 'user2'}, {'localId': 'user3'}]} + _check_request(recorder, '/accounts:batchCreate', expected) + + def test_import_users_missing_required_hash(self, user_mgt_app): + users = [ + auth.ImportUserRecord(uid='user1', password_hash=b'password'), + auth.ImportUserRecord(uid='user2'), + ] + with pytest.raises(ValueError): + auth.import_users(users, app=user_mgt_app) + + def test_import_users_with_hash(self, user_mgt_app): + _, recorder = _instrument_user_manager(user_mgt_app, 200, '{}') + users = [ + auth.ImportUserRecord(uid='user1', password_hash=b'password'), + auth.ImportUserRecord(uid='user2'), + ] + hash_alg = auth.UserImportHash.scrypt( + b'key', rounds=8, memory_cost=14, salt_separator=b'sep') + result = auth.import_users(users, hash_alg=hash_alg, app=user_mgt_app) + assert result.success_count == 2 + assert result.failure_count == 0 + assert result.errors == [] + expected = { + 'users': [ + {'localId': 'user1', 'passwordHash': _user_import.b64_encode(b'password')}, + {'localId': 'user2'} + ], + 'hashAlgorithm': 'SCRYPT', + 'signerKey': _user_import.b64_encode(b'key'), + 'rounds': 8, + 'memoryCost': 14, + 'saltSeparator': _user_import.b64_encode(b'sep'), + } + _check_request(recorder, '/accounts:batchCreate', expected) + + def test_import_users_http_error(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 401, '{"error": {"message": "ERROR_CODE"}}') + users = [ + auth.ImportUserRecord(uid='user1'), + auth.ImportUserRecord(uid='user2'), + ] + with pytest.raises(exceptions.UnauthenticatedError) as excinfo: + auth.import_users(users, app=user_mgt_app) + assert str(excinfo.value) == 'Error while calling Auth service (ERROR_CODE).' + + def test_import_users_unexpected_response(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 200, '"not dict"') + users = [ + auth.ImportUserRecord(uid='user1'), + auth.ImportUserRecord(uid='user2'), + ] + with pytest.raises(auth.UnexpectedResponseError): + auth.import_users(users, app=user_mgt_app) + + +class TestRevokeRefreshTokkens: + + def test_revoke_refresh_tokens(self, user_mgt_app): + _, recorder = _instrument_user_manager(user_mgt_app, 200, '{"localId":"testuser"}') + before_time = time.time() + auth.revoke_refresh_tokens('testuser', app=user_mgt_app) + after_time = time.time() + + request = json.loads(recorder[0].body.decode()) + assert request['localId'] == 'testuser' + assert int(request['validSince']) >= int(before_time) + assert int(request['validSince']) <= int(after_time) + + +class TestActionCodeSetting: + + def test_valid_data(self): + data = { + 'url': 'http://localhost', + 'handle_code_in_app': True, + 'dynamic_link_domain': 'http://dynamic-link-domain', + 'link_domain': 'http://link-domain', + 'ios_bundle_id': 'test.bundle', + 'android_package_name': 'test.bundle', + 'android_minimum_version': '7', + 'android_install_app': True, + } + settings = auth.ActionCodeSettings(**data) + parameters = _user_mgt.encode_action_code_settings(settings) + assert parameters['continueUrl'] == data['url'] + assert parameters['canHandleCodeInApp'] == data['handle_code_in_app'] + assert parameters['dynamicLinkDomain'] == data['dynamic_link_domain'] + assert parameters['linkDomain'] == data['link_domain'] + assert parameters['iOSBundleId'] == data['ios_bundle_id'] + assert parameters['androidPackageName'] == data['android_package_name'] + assert parameters['androidMinimumVersion'] == data['android_minimum_version'] + assert parameters['androidInstallApp'] == data['android_install_app'] + + @pytest.mark.parametrize('data', [{'handle_code_in_app':'nonboolean'}, + {'android_install_app':'nonboolean'}, + {'dynamic_link_domain': False}, + {'ios_bundle_id':11}, + {'android_package_name':{}}, + {'android_minimum_version':tuple()}, + {'android_minimum_version':'7'}, + {'android_install_app': True}]) + def test_bad_data(self, data): + settings = auth.ActionCodeSettings('http://localhost', **data) + with pytest.raises(ValueError): + _user_mgt.encode_action_code_settings(settings) + + def test_bad_url(self): + settings = auth.ActionCodeSettings('http:') + with pytest.raises(ValueError): + _user_mgt.encode_action_code_settings(settings) + + def test_encode_action_code_bad_data(self): + with pytest.raises(AttributeError): + _user_mgt.encode_action_code_settings({"foo":"bar"}) + + +class TestGenerateEmailActionLink: + + def test_email_verification_no_settings(self, user_mgt_app): + _, recorder = _instrument_user_manager(user_mgt_app, 200, '{"oobLink":"https://testlink"}') + link = auth.generate_email_verification_link('test@test.com', app=user_mgt_app) + request = json.loads(recorder[0].body.decode()) + + assert link == 'https://testlink' + assert request['requestType'] == 'VERIFY_EMAIL' + self._validate_request(request) + + def test_password_reset_no_settings(self, user_mgt_app): + _, recorder = _instrument_user_manager(user_mgt_app, 200, '{"oobLink":"https://testlink"}') + link = auth.generate_password_reset_link('test@test.com', app=user_mgt_app) + request = json.loads(recorder[0].body.decode()) + + assert link == 'https://testlink' + assert request['requestType'] == 'PASSWORD_RESET' + self._validate_request(request) + + def test_email_signin_with_settings(self, user_mgt_app): + _, recorder = _instrument_user_manager(user_mgt_app, 200, '{"oobLink":"https://testlink"}') + link = auth.generate_sign_in_with_email_link('test@test.com', + action_code_settings=MOCK_ACTION_CODE_SETTINGS, + app=user_mgt_app) + request = json.loads(recorder[0].body.decode()) + + assert link == 'https://testlink' + assert request['requestType'] == 'EMAIL_SIGNIN' + self._validate_request(request, MOCK_ACTION_CODE_SETTINGS) + + def test_email_verification_with_settings(self, user_mgt_app): + _, recorder = _instrument_user_manager(user_mgt_app, 200, '{"oobLink":"https://testlink"}') + link = auth.generate_email_verification_link('test@test.com', + action_code_settings=MOCK_ACTION_CODE_SETTINGS, + app=user_mgt_app) + request = json.loads(recorder[0].body.decode()) + + assert link == 'https://testlink' + assert request['requestType'] == 'VERIFY_EMAIL' + self._validate_request(request, MOCK_ACTION_CODE_SETTINGS) + + def test_password_reset_with_settings(self, user_mgt_app): + _, recorder = _instrument_user_manager(user_mgt_app, 200, '{"oobLink":"https://testlink"}') + link = auth.generate_password_reset_link('test@test.com', + action_code_settings=MOCK_ACTION_CODE_SETTINGS, + app=user_mgt_app) + request = json.loads(recorder[0].body.decode()) + + assert link == 'https://testlink' + assert request['requestType'] == 'PASSWORD_RESET' + self._validate_request(request, MOCK_ACTION_CODE_SETTINGS) + + @pytest.mark.parametrize('func', [ + auth.generate_sign_in_with_email_link, + auth.generate_email_verification_link, + auth.generate_password_reset_link, + ]) + def test_api_call_failure(self, user_mgt_app, func): + _instrument_user_manager(user_mgt_app, 500, '{"error":{"message": "UNEXPECTED_CODE"}}') + with pytest.raises(exceptions.InternalError) as excinfo: + func('test@test.com', MOCK_ACTION_CODE_SETTINGS, app=user_mgt_app) + assert str(excinfo.value) == 'Error while calling Auth service (UNEXPECTED_CODE).' + assert excinfo.value.http_response is not None + assert excinfo.value.cause is not None + + def test_password_reset_non_existing(self, user_mgt_app): + _instrument_user_manager(user_mgt_app, 400, '{"error":{"message": "EMAIL_NOT_FOUND"}}') + with pytest.raises(auth.EmailNotFoundError) as excinfo: + auth.generate_password_reset_link( + 'nonexistent@user', MOCK_ACTION_CODE_SETTINGS, app=user_mgt_app) + error_msg = 'No user record found for the given email (EMAIL_NOT_FOUND).' + assert excinfo.value.code == exceptions.NOT_FOUND + assert str(excinfo.value) == error_msg + assert excinfo.value.http_response is not None + assert excinfo.value.cause is not None + + @pytest.mark.parametrize('func', [ + auth.generate_sign_in_with_email_link, + auth.generate_email_verification_link, + auth.generate_password_reset_link, + ]) + def test_invalid_dynamic_link(self, user_mgt_app, func): + resp = '{"error":{"message": "INVALID_DYNAMIC_LINK_DOMAIN: Because of this reason."}}' + _instrument_user_manager(user_mgt_app, 500, resp) + with pytest.raises(auth.InvalidDynamicLinkDomainError) as excinfo: + func('test@test.com', MOCK_ACTION_CODE_SETTINGS, app=user_mgt_app) + assert isinstance(excinfo.value, exceptions.InvalidArgumentError) + assert str(excinfo.value) == ('Dynamic link domain specified in ActionCodeSettings is ' + 'not authorized (INVALID_DYNAMIC_LINK_DOMAIN). Because ' + 'of this reason.') + assert excinfo.value.http_response is not None + assert excinfo.value.cause is not None + + @pytest.mark.parametrize('func', [ + auth.generate_sign_in_with_email_link, + auth.generate_email_verification_link, + auth.generate_password_reset_link, + ]) + def test_invalid_hosting_link(self, user_mgt_app, func): + resp = '{"error":{"message": "INVALID_HOSTING_LINK_DOMAIN: Because of this reason."}}' + _instrument_user_manager(user_mgt_app, 500, resp) + with pytest.raises(auth.InvalidHostingLinkDomainError) as excinfo: + func('test@test.com', MOCK_ACTION_CODE_SETTINGS, app=user_mgt_app) + assert isinstance(excinfo.value, exceptions.InvalidArgumentError) + assert str(excinfo.value) == ('The provided hosting link domain is not configured in ' + 'Firebase Hosting or is not owned by the current project ' + '(INVALID_HOSTING_LINK_DOMAIN). Because of this reason.') + assert excinfo.value.http_response is not None + assert excinfo.value.cause is not None + + @pytest.mark.parametrize('func', [ + auth.generate_sign_in_with_email_link, + auth.generate_email_verification_link, + auth.generate_password_reset_link, + ]) + def test_api_call_no_link(self, user_mgt_app, func): + _instrument_user_manager(user_mgt_app, 200, '{}') + with pytest.raises(auth.UnexpectedResponseError) as excinfo: + func('test@test.com', MOCK_ACTION_CODE_SETTINGS, app=user_mgt_app) + assert str(excinfo.value) == 'Failed to generate email action link.' + assert excinfo.value.http_response is not None + assert excinfo.value.cause is None + assert isinstance(excinfo.value, exceptions.UnknownError) + + @pytest.mark.parametrize('func', [ + auth.generate_sign_in_with_email_link, + auth.generate_email_verification_link, + auth.generate_password_reset_link, + ]) + def test_bad_settings_data(self, user_mgt_app, func): + _instrument_user_manager(user_mgt_app, 200, '{"oobLink":"https://testlink"}') + with pytest.raises(AttributeError): + func('test@test.com', app=user_mgt_app, action_code_settings=1234) + + def test_bad_action_type(self, user_mgt_app): + with pytest.raises(ValueError): + auth._get_client(user_mgt_app) \ + ._user_manager \ + .generate_email_action_link('BAD_TYPE', 'test@test.com', + action_code_settings=MOCK_ACTION_CODE_SETTINGS) + + def _validate_request(self, request, settings=None): + assert request['email'] == 'test@test.com' + assert request['returnOobLink'] + if settings: + assert request['continueUrl'] == settings.url + assert request['canHandleCodeInApp'] == settings.handle_code_in_app + assert request['dynamicLinkDomain'] == settings.dynamic_link_domain + assert request['linkDomain'] == settings.link_domain + assert request['iOSBundleId'] == settings.ios_bundle_id + assert request['androidPackageName'] == settings.android_package_name + assert request['androidMinimumVersion'] == settings.android_minimum_version + assert request['androidInstallApp'] == settings.android_install_app diff --git a/tests/testutils.py b/tests/testutils.py index d5df4d590..7546595af 100644 --- a/tests/testutils.py +++ b/tests/testutils.py @@ -13,9 +13,16 @@ # limitations under the License. """Common utility classes and functions for testing.""" +import io import os +import pytest + +from google.auth import credentials, compute_engine from google.auth import transport +from requests import adapters +from requests import models + import firebase_admin @@ -26,7 +33,7 @@ def resource_filename(filename): def resource(filename): """Returns the contents of a test resource.""" - with open(resource_filename(filename), 'r') as file_obj: + with open(resource_filename(filename), 'r', encoding='utf-8') as file_obj: return file_obj.read() @@ -36,6 +43,26 @@ def cleanup_apps(): for app in apps: firebase_admin.delete_app(app) +def run_without_project_id(func): + env_vars = ['GCLOUD_PROJECT', 'GOOGLE_CLOUD_PROJECT'] + env_values = [] + for env_var in env_vars: + gcloud_project = os.environ.get(env_var) + if gcloud_project: + del os.environ[env_var] + env_values.append(gcloud_project) + try: + func() + finally: + for idx, env_var in enumerate(env_vars): + gcloud_project = env_values[idx] + if gcloud_project: + os.environ[env_var] = gcloud_project + + +def new_monkeypatch(): + return pytest.MonkeyPatch() + class MockResponse(transport.Response): def __init__(self, status, response): @@ -65,6 +92,202 @@ class MockRequest(transport.Request): def __init__(self, status, response): self.response = MockResponse(status, response) + self.log = [] - def __call__(self, *args, **kwargs): + def __call__(self, *args, **kwargs): # pylint: disable=arguments-differ + self.log.append((args, kwargs)) return self.response + + +class MockFailedRequest(transport.Request): + """A mock HTTP request that fails by raising an exception.""" + + def __init__(self, error): + self.error = error + self.log = [] + + def __call__(self, *args, **kwargs): # pylint: disable=arguments-differ + self.log.append((args, kwargs)) + raise self.error + + +# Temporarily disable the lint rule. For more information see: +# https://github.com/googleapis/google-auth-library-python/pull/561 +# pylint: disable=abstract-method +class MockGoogleCredential(credentials.Credentials): + """A mock Google authentication credential.""" + + def __init__(self): + super().__init__() + self.token = None + self._service_account_email = None + self._token_state = credentials.TokenState.INVALID + + def refresh(self, request): + self.token = 'mock-token' + self._service_account_email = 'mock-email' + self._token_state = credentials.TokenState.FRESH + + @property + def token_state(self): + return self._token_state + + @property + def service_account_email(self): + return self._service_account_email + + # Simulate x-goog-api-client modification in credential refresh + def _metric_header_for_usage(self): + return 'mock-cred-metric-tag' + + +class MockCredential(firebase_admin.credentials.Base): + """A mock Firebase credential implementation.""" + + def __init__(self): + self._g_credential = MockGoogleCredential() + + def get_credential(self): + return self._g_credential + +class MockGoogleComputeEngineCredential(compute_engine.Credentials): + """A mock Compute Engine credential""" + + def __init__(self): + super().__init__() + self.token = None + self._service_account_email = None + self._token_state = credentials.TokenState.INVALID + + def refresh(self, request): + self.token = 'mock-compute-engine-token' + self._service_account_email = 'mock-gce-email' + self._token_state = credentials.TokenState.FRESH + + @property + def token_state(self): + return self._token_state + + def _metric_header_for_usage(self): + return 'mock-gce-cred-metric-tag' + +class MockComputeEngineCredential(firebase_admin.credentials.Base): + """A mock Firebase credential implementation.""" + + def __init__(self): + self._g_credential = MockGoogleComputeEngineCredential() + + def get_credential(self): + return self._g_credential + +class MockMultiRequestAdapter(adapters.HTTPAdapter): + """A mock HTTP adapter that supports multiple responses for the Python requests module.""" + def __init__(self, responses, statuses, recorder): + """Constructs a MockMultiRequestAdapter. + + The lengths of the responses and statuses parameters must match. + + Each incoming request consumes a response and a status, in order. If all responses and + statuses are exhausted, further requests will reuse the last response and status. + """ + adapters.HTTPAdapter.__init__(self) + if len(responses) != len(statuses): + raise ValueError('The lengths of responses and statuses do not match.') + self._current_response = 0 + self._responses = list(responses) # Make a copy. + self._statuses = list(statuses) + self._recorder = recorder + + def send(self, request, **kwargs): # pylint: disable=arguments-differ + request._extra_kwargs = kwargs + self._recorder.append(request) + resp = models.Response() + resp.url = request.url + resp.status_code = self._statuses[self._current_response] + resp.raw = io.BytesIO(self._responses[self._current_response].encode()) + self._current_response = min(self._current_response + 1, len(self._responses) - 1) + return resp + + +class MockAdapter(MockMultiRequestAdapter): + """A mock HTTP adapter for the Python requests module.""" + def __init__(self, data, status, recorder): + super().__init__([data], [status], recorder) + + @property + def status(self): + return self._statuses[0] + + @property + def data(self): + return self._responses[0] + +class MockRequestBasedMultiRequestAdapter(adapters.HTTPAdapter): + """A mock HTTP adapter that supports multiple responses for the Python requests module. + The response for each incoming request should be specified in response_dict during + initialization. Each incoming request should contain an identifier in the its body.""" + def __init__(self, response_dict, recorder): + """Constructs a MockRequestBasedMultiRequestAdapter. + + Each incoming request consumes the response and status mapped to it. If no response + is specified for the request, the response will be 404 with an empty body. + """ + adapters.HTTPAdapter.__init__(self) + self._current_response = 0 + self._response_dict = dict(response_dict) + self._recorder = recorder + + def send(self, request, **kwargs): # pylint: disable=arguments-differ + request._extra_kwargs = kwargs + self._recorder.append(request) + resp = models.Response() + resp.url = request.url + resp.status_code = 404 # Not found. + resp.raw = None + for req_id, pair in self._response_dict.items(): + if req_id in str(request.body): + status, response = pair + resp.status_code = status + resp.raw = io.BytesIO(response.encode()) + break + return resp + +def build_mock_condition(name, condition): + return { + 'name': name, + 'condition': condition, + } + +def build_mock_parameter(name, description, value=None, + conditional_values=None, default_value=None, parameter_groups=None): + return { + 'name': name, + 'description': description, + 'value': value, + 'conditionalValues': conditional_values, + 'defaultValue': default_value, + 'parameterGroups': parameter_groups, + } + +def build_mock_conditional_value(condition_name, value): + return { + 'conditionName': condition_name, + 'value': value, + } + +def build_mock_default_value(value): + return { + 'value': value, + } + +def build_mock_parameter_group(name, description, parameters): + return { + 'name': name, + 'description': description, + 'parameters': parameters, + } + +def build_mock_version(version_number): + return { + 'versionNumber': version_number, + } diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 4be4c04f0..000000000 --- a/tox.ini +++ /dev/null @@ -1,33 +0,0 @@ -# Tox (https://tox.readthedocs.io/) is a tool for running tests -# in multiple virtualenvs. This configuration file will run the -# test suite on all supported python versions. To use it, "pip install tox" -# and then run "tox" from this directory. - -[tox] -envlist = py27,py33,py35,pypy,cover - -[testenv] -commands = pytest -deps = - pytest - google-auth - requests - six - -[coverbase] -basepython = python2.7 -commands = - pytest \ - --cov=firebase_admin \ - --cov=tests -deps = {[testenv]deps} - coverage - pytest-cov - -[testenv:cover] -basepython = {[coverbase]basepython} -commands = - {[coverbase]commands} - coverage report --show-missing -deps = - {[coverbase]deps}