diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 8be2b791..f2ba168f 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -41,7 +41,7 @@ jobs: steps: - name: Harden Runner - uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 # v2.7.0 + uses: step-security/harden-runner@a4aa98b93cab29d9b1101a6143fb8bce00e2eac4 # v2.7.1 with: disable-sudo: true egress-policy: block @@ -53,11 +53,11 @@ jobs: objects.githubusercontent.com:443 - name: Checkout repository - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 + uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@d39d31e687223d841ef683f52467bd88e9b21c14 # v3.25.3 + uses: github/codeql-action/init@b7cec7526559c32f1616476ff32d17ba4c59b2d6 # v3.25.5 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -67,7 +67,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@d39d31e687223d841ef683f52467bd88e9b21c14 # v3.25.3 + uses: github/codeql-action/autobuild@b7cec7526559c32f1616476ff32d17ba4c59b2d6 # v3.25.5 # â„šī¸ Command-line programs to run using the OS shell. # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun @@ -80,6 +80,6 @@ jobs: # ./location_of_script_within_repo/buildscript.sh - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@d39d31e687223d841ef683f52467bd88e9b21c14 # v3.25.3 + uses: github/codeql-action/analyze@b7cec7526559c32f1616476ff32d17ba4c59b2d6 # v3.25.5 with: category: "/language:${{matrix.language}}" diff --git a/.github/workflows/conformance.yml b/.github/workflows/conformance.yml index 80ad8afb..9369f779 100644 --- a/.github/workflows/conformance.yml +++ b/.github/workflows/conformance.yml @@ -16,7 +16,7 @@ jobs: python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12'] steps: - name: Harden Runner - uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 # v2.7.0 + uses: step-security/harden-runner@a4aa98b93cab29d9b1101a6143fb8bce00e2eac4 # v2.7.1 with: disable-sudo: true egress-policy: block @@ -30,7 +30,7 @@ jobs: storage.googleapis.com:443 - name: Checkout code - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 + uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 - name: Setup Python uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 @@ -41,7 +41,7 @@ jobs: run: python -m pip install -e . - name: Setup Go - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 + uses: actions/setup-go@cdcb36043654635271a94b9a6d1392de5bb323a7 # v5.0.1 with: go-version: '1.20' diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml index a79373ca..29b80a11 100644 --- a/.github/workflows/dependency-review.yml +++ b/.github/workflows/dependency-review.yml @@ -17,7 +17,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Harden Runner - uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 # v2.7.0 + uses: step-security/harden-runner@a4aa98b93cab29d9b1101a6143fb8bce00e2eac4 # v2.7.1 with: disable-sudo: true egress-policy: block @@ -25,6 +25,6 @@ jobs: api.github.com:443 github.com:443 - name: 'Checkout Repository' - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 + uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 - name: 'Dependency Review' - uses: actions/dependency-review-action@5bbc3ba658137598168acb2ab73b21c432dd411b # v4.2.5 + uses: actions/dependency-review-action@0c155c5e8556a497adf53f2c18edabf945ed8e70 # v4.3.2 diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index d560d95f..efbce442 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Harden Runner - uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 # v2.7.0 + uses: step-security/harden-runner@a4aa98b93cab29d9b1101a6143fb8bce00e2eac4 # v2.7.1 with: disable-sudo: true egress-policy: block @@ -21,7 +21,7 @@ jobs: github.com:443 pypi.org:443 - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 + - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 - name: Setup Python uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 - name: Install tox diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 63b9f7f8..9c11e448 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -13,12 +13,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Harden Runner - uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 # v2.7.0 + uses: step-security/harden-runner@a4aa98b93cab29d9b1101a6143fb8bce00e2eac4 # v2.7.1 with: egress-policy: audit # TODO: change to 'egress-policy: block' after couple of runs - name: Checkout - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 + uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 with: ref: ${{ github.event.release.tag_name }} - name: Install Python @@ -28,7 +28,7 @@ jobs: - name: Build distributions run: python -m build - name: Publish - uses: pypa/gh-action-pypi-publish@c12cc61414480c03e10ea76e2a0a1a17d6c764e2 # main + uses: pypa/gh-action-pypi-publish@699cd6103f50bf5c3b2f070c70712d109c168e6c # main with: user: __token__ password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index 150d7e49..9f84e511 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -26,16 +26,18 @@ jobs: steps: - name: Harden Runner - uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 # v2.7.0 + uses: step-security/harden-runner@a4aa98b93cab29d9b1101a6143fb8bce00e2eac4 # v2.7.1 with: disable-sudo: true egress-policy: block allowed-endpoints: > api.github.com:443 api.osv.dev:443 + api.scorecard.dev:443 api.securityscorecards.dev:443 auth.docker.io:443 bestpractices.coreinfrastructure.org:443 + bestpractices.dev:443 github.com:443 index.docker.io:443 oss-fuzz-build-logs.storage.googleapis.com:443 @@ -44,12 +46,12 @@ jobs: - name: "Checkout code" - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 + uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 with: persist-credentials: false - name: "Run analysis" - uses: ossf/scorecard-action@0864cf19026789058feabb7e87baa5f140aac736 # v2.3.1 + uses: ossf/scorecard-action@dc50aa9510b46c811795eb24b2f1ba02a914e534 # v2.3.3 with: results_file: results.sarif results_format: sarif @@ -61,6 +63,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard. - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@d39d31e687223d841ef683f52467bd88e9b21c14 # v3.25.3 + uses: github/codeql-action/upload-sarif@b7cec7526559c32f1616476ff32d17ba4c59b2d6 # v3.25.5 with: sarif_file: results.sarif diff --git a/.github/workflows/unit.yml b/.github/workflows/unit.yml index f396ca2c..7943f75a 100644 --- a/.github/workflows/unit.yml +++ b/.github/workflows/unit.yml @@ -32,7 +32,7 @@ jobs: runs-on: ${{ matrix.platform }} steps: - name: Harden Runner - uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 # v2.7.0 + uses: step-security/harden-runner@a4aa98b93cab29d9b1101a6143fb8bce00e2eac4 # v2.7.1 with: disable-sudo: true egress-policy: block @@ -45,7 +45,7 @@ jobs: registry-1.docker.io:443 - name: Checkout - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 + uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 - name: Use Python ${{ matrix.python }} uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: diff --git a/CHANGELOG.md b/CHANGELOG.md index 23665c2e..5f0e13a1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,22 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [3.7.0](https://github.com/GoogleCloudPlatform/functions-framework-python/compare/v3.6.0...v3.7.0) (2024-05-17) + + +### Features + +* (opt-in): terminate handling of work when the request has already timed out ([#328](https://github.com/GoogleCloudPlatform/functions-framework-python/issues/328)) ([2601975](https://github.com/GoogleCloudPlatform/functions-framework-python/commit/2601975285386fc573de8033381edc99527ef3c9)) +* Add execution id ([#320](https://github.com/GoogleCloudPlatform/functions-framework-python/issues/320)) ([662bf4c](https://github.com/GoogleCloudPlatform/functions-framework-python/commit/662bf4ced9aa52efe774662a0f0f496d3d3534fc)) +* restore gunicorn worker default configs from 3.5.0 ([#326](https://github.com/GoogleCloudPlatform/functions-framework-python/issues/326)) ([f08757a](https://github.com/GoogleCloudPlatform/functions-framework-python/commit/f08757a17267d768e4c3ca4c6979f2a7db25e83c)) +* support disabling execution id logging ([#325](https://github.com/GoogleCloudPlatform/functions-framework-python/issues/325)) ([2e7de92](https://github.com/GoogleCloudPlatform/functions-framework-python/commit/2e7de92e5f9cd83f01222eb06385d66fe0211777)) + + +### Bug Fixes + +* update scorecard.yml ([#327](https://github.com/GoogleCloudPlatform/functions-framework-python/issues/327)) ([fff38ae](https://github.com/GoogleCloudPlatform/functions-framework-python/commit/fff38ae6ecb1054bad676900216663050e6edf10)) +* Update scorecard.yml ([#329](https://github.com/GoogleCloudPlatform/functions-framework-python/issues/329)) ([04c1fdc](https://github.com/GoogleCloudPlatform/functions-framework-python/commit/04c1fdc4185b8a97eb46d72b6432d32d5d70dffc)) + ## [3.6.0](https://github.com/GoogleCloudPlatform/functions-framework-python/compare/v3.5.0...v3.6.0) (2024-04-29) diff --git a/examples/cloud_run_cloud_events/send_cloud_event.py b/examples/cloud_run_cloud_events/send_cloud_event.py index b523c31a..c08b8f93 100644 --- a/examples/cloud_run_cloud_events/send_cloud_event.py +++ b/examples/cloud_run_cloud_events/send_cloud_event.py @@ -13,9 +13,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from cloudevents.http import CloudEvent, to_structured import requests +from cloudevents.http import CloudEvent, to_structured # Create a cloudevent using https://github.com/cloudevents/sdk-python # Note we only need source and type because the cloudevents constructor by diff --git a/playground/main.py b/playground/main.py new file mode 100644 index 00000000..b974ddbc --- /dev/null +++ b/playground/main.py @@ -0,0 +1,16 @@ +import logging +import time + +import functions_framework + +logger = logging.getLogger(__name__) + + +@functions_framework.http +def main(request): + timeout = 2 + for _ in range(timeout * 10): + time.sleep(0.1) + logger.info("logging message after timeout elapsed") + return "Hello, world!" + diff --git a/setup.py b/setup.py index 6ee9902f..5e529845 100644 --- a/setup.py +++ b/setup.py @@ -25,7 +25,7 @@ setup( name="functions-framework", - version="3.6.0", + version="3.7.0", description="An open source FaaS (Function as a service) framework for writing portable Python functions -- brought to you by the Google Cloud Functions team.", long_description=long_description, long_description_content_type="text/markdown", @@ -55,6 +55,7 @@ "watchdog>=1.0.0", "gunicorn>=19.2.0; platform_system!='Windows'", "cloudevents>=1.2.0,<2.0.0", + "Werkzeug>=0.14,<4.0.0", ], entry_points={ "console_scripts": [ diff --git a/src/functions_framework/__init__.py b/src/functions_framework/__init__.py index 8c23e5c0..df4683cb 100644 --- a/src/functions_framework/__init__.py +++ b/src/functions_framework/__init__.py @@ -17,6 +17,8 @@ import io import json import logging +import logging.config +import os import os.path import pathlib import sys @@ -32,7 +34,12 @@ from cloudevents.http import from_http, is_binary from cloudevents.http.event import CloudEvent -from functions_framework import _function_registry, _typed_event, event_conversion +from functions_framework import ( + _function_registry, + _typed_event, + event_conversion, + execution_id, +) from functions_framework.background_event import BackgroundEvent from functions_framework.exceptions import ( EventConversionException, @@ -129,6 +136,7 @@ def setup_logging(): def _http_view_func_wrapper(function, request): + @execution_id.set_execution_context(request, _enable_execution_id_logging()) @functools.wraps(function) def view_func(path): return function(request._get_current_object()) @@ -143,6 +151,7 @@ def _run_cloud_event(function, request): def _typed_event_func_wrapper(function, request, inputType: Type): + @execution_id.set_execution_context(request, _enable_execution_id_logging()) def view_func(path): try: data = request.get_json() @@ -163,6 +172,7 @@ def view_func(path): def _cloud_event_view_func_wrapper(function, request): + @execution_id.set_execution_context(request, _enable_execution_id_logging()) def view_func(path): ce_exception = None event = None @@ -198,6 +208,7 @@ def view_func(path): def _event_view_func_wrapper(function, request): + @execution_id.set_execution_context(request, _enable_execution_id_logging()) def view_func(path): if event_conversion.is_convertable_cloud_event(request): # Convert this CloudEvent to the equivalent background event data and context. @@ -332,6 +343,9 @@ def create_app(target=None, source=None, signature_type=None): source_module, spec = _function_registry.load_function_module(source) + if _enable_execution_id_logging(): + _configure_app_execution_id_logging() + # Create the application _app = flask.Flask(target, template_folder=template_folder) _app.register_error_handler(500, crash_handler) @@ -355,6 +369,7 @@ def handle_none(rv): sys.stderr = _LoggingHandler("ERROR", sys.stderr) setup_logging() + _app.wsgi_app = execution_id.WsgiMiddleware(_app.wsgi_app) # Execute the module, within the application context with _app.app_context(): try: @@ -411,6 +426,29 @@ def __call__(self, *args, **kwargs): return self.app(*args, **kwargs) +def _configure_app_execution_id_logging(): + # Logging needs to be configured before app logger is accessed + logging.config.dictConfig( + { + "version": 1, + "handlers": { + "wsgi": { + "class": "logging.StreamHandler", + "stream": "ext://functions_framework.execution_id.logging_stream", + }, + }, + "root": {"level": "INFO", "handlers": ["wsgi"]}, + } + ) + + +def _enable_execution_id_logging(): + # Based on distutils.util.strtobool + truthy_values = ("y", "yes", "t", "true", "on", "1") + env_var_value = os.environ.get("LOG_EXECUTION_ID") + return env_var_value in truthy_values + + app = LazyWSGIApp() diff --git a/src/functions_framework/_http/gunicorn.py b/src/functions_framework/_http/gunicorn.py index 009a06b7..92cad90e 100644 --- a/src/functions_framework/_http/gunicorn.py +++ b/src/functions_framework/_http/gunicorn.py @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,17 +16,43 @@ import gunicorn.app.base +from gunicorn.workers.gthread import ThreadWorker + +from ..request_timeout import ThreadingTimeout + +# global for use in our custom gthread worker; the gunicorn arbiter spawns these +# and it's not possible to inject (and self.timeout means something different to +# async workers!) +# set/managed in gunicorn application init for test-friendliness +TIMEOUT_SECONDS = None + class GunicornApplication(gunicorn.app.base.BaseApplication): def __init__(self, app, host, port, debug, **options): + threads = int(os.environ.get("THREADS", (os.cpu_count() or 1) * 4)) + + global TIMEOUT_SECONDS + TIMEOUT_SECONDS = int(os.environ.get("CLOUD_RUN_TIMEOUT_SECONDS", 0)) + self.options = { "bind": "%s:%s" % (host, port), - "workers": os.environ.get("WORKERS", (os.cpu_count() or 1) * 4), - "threads": os.environ.get("THREADS", 1), - "timeout": os.environ.get("CLOUD_RUN_TIMEOUT_SECONDS", 300), - "loglevel": "error", + "workers": int(os.environ.get("WORKERS", 1)), + "threads": threads, + "loglevel": os.environ.get("GUNICORN_LOG_LEVEL", "error"), "limit_request_line": 0, } + + if ( + TIMEOUT_SECONDS > 0 + and threads > 1 + and (os.environ.get("THREADED_TIMEOUT_ENABLED", "False").lower() == "true") + ): # pragma: no cover + self.options["worker_class"] = ( + "functions_framework._http.gunicorn.GThreadWorkerWithTimeoutSupport" + ) + else: + self.options["timeout"] = TIMEOUT_SECONDS + self.options.update(options) self.app = app @@ -38,3 +64,9 @@ def load_config(self): def load(self): return self.app + + +class GThreadWorkerWithTimeoutSupport(ThreadWorker): # pragma: no cover + def handle_request(self, req, conn): + with ThreadingTimeout(TIMEOUT_SECONDS): + super(GThreadWorkerWithTimeoutSupport, self).handle_request(req, conn) diff --git a/src/functions_framework/exceptions.py b/src/functions_framework/exceptions.py index 671a28a4..81b9f8f0 100644 --- a/src/functions_framework/exceptions.py +++ b/src/functions_framework/exceptions.py @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -35,3 +35,7 @@ class MissingTargetException(FunctionsFrameworkException): class EventConversionException(FunctionsFrameworkException): pass + + +class RequestTimeoutException(FunctionsFrameworkException): + pass diff --git a/src/functions_framework/execution_id.py b/src/functions_framework/execution_id.py new file mode 100644 index 00000000..2b106531 --- /dev/null +++ b/src/functions_framework/execution_id.py @@ -0,0 +1,156 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import contextlib +import functools +import io +import json +import logging +import random +import re +import string +import sys + +import flask + +from werkzeug.local import LocalProxy + +_EXECUTION_ID_LENGTH = 12 +_EXECUTION_ID_CHARSET = string.digits + string.ascii_letters +_LOGGING_API_LABELS_FIELD = "logging.googleapis.com/labels" +_LOGGING_API_SPAN_ID_FIELD = "logging.googleapis.com/spanId" +_TRACE_CONTEXT_REGEX_PATTERN = re.compile( + r"^(?P[\w\d]+)/(?P\d+);o=(?P[01])$" +) +EXECUTION_ID_REQUEST_HEADER = "Function-Execution-Id" +TRACE_CONTEXT_REQUEST_HEADER = "X-Cloud-Trace-Context" + +logger = logging.getLogger(__name__) + + +class ExecutionContext: + def __init__(self, execution_id=None, span_id=None): + self.execution_id = execution_id + self.span_id = span_id + + +def _get_current_context(): + return ( + flask.g.execution_id_context + if flask.has_request_context() and "execution_id_context" in flask.g + else None + ) + + +def _set_current_context(context): + if flask.has_request_context(): + flask.g.execution_id_context = context + + +def _generate_execution_id(): + return "".join( + _EXECUTION_ID_CHARSET[random.randrange(len(_EXECUTION_ID_CHARSET))] + for _ in range(_EXECUTION_ID_LENGTH) + ) + + +# Middleware to add execution id to request header if one does not already exist +class WsgiMiddleware: + def __init__(self, wsgi_app): + self.wsgi_app = wsgi_app + + def __call__(self, environ, start_response): + execution_id = ( + environ.get("HTTP_FUNCTION_EXECUTION_ID") or _generate_execution_id() + ) + environ["HTTP_FUNCTION_EXECUTION_ID"] = execution_id + return self.wsgi_app(environ, start_response) + + +# Sets execution id and span id for the request +def set_execution_context(request, enable_id_logging=False): + if enable_id_logging: + stdout_redirect = contextlib.redirect_stdout( + LoggingHandlerAddExecutionId(sys.stdout) + ) + stderr_redirect = contextlib.redirect_stderr( + LoggingHandlerAddExecutionId(sys.stderr) + ) + else: + stdout_redirect = contextlib.nullcontext() + stderr_redirect = contextlib.nullcontext() + + def decorator(view_function): + @functools.wraps(view_function) + def wrapper(*args, **kwargs): + trace_context = re.match( + _TRACE_CONTEXT_REGEX_PATTERN, + request.headers.get(TRACE_CONTEXT_REQUEST_HEADER, ""), + ) + execution_id = request.headers.get(EXECUTION_ID_REQUEST_HEADER) + span_id = trace_context.group("span_id") if trace_context else None + _set_current_context(ExecutionContext(execution_id, span_id)) + + with stderr_redirect, stdout_redirect: + return view_function(*args, **kwargs) + + return wrapper + + return decorator + + +@LocalProxy +def logging_stream(): + return LoggingHandlerAddExecutionId(stream=flask.logging.wsgi_errors_stream) + + +class LoggingHandlerAddExecutionId(io.TextIOWrapper): + def __new__(cls, stream=sys.stdout): + if isinstance(stream, LoggingHandlerAddExecutionId): + return stream + else: + return super(LoggingHandlerAddExecutionId, cls).__new__(cls) + + def __init__(self, stream=sys.stdout): + io.TextIOWrapper.__init__(self, io.StringIO()) + self.stream = stream + + def write(self, contents): + if contents == "\n": + return + current_context = _get_current_context() + if current_context is None: + self.stream.write(contents + "\n") + self.stream.flush() + return + try: + execution_id = current_context.execution_id + span_id = current_context.span_id + payload = json.loads(contents) + if not isinstance(payload, dict): + payload = {"message": contents} + except json.JSONDecodeError: + if len(contents) > 0 and contents[-1] == "\n": + contents = contents[:-1] + payload = {"message": contents} + if execution_id: + payload[_LOGGING_API_LABELS_FIELD] = payload.get( + _LOGGING_API_LABELS_FIELD, {} + ) + payload[_LOGGING_API_LABELS_FIELD]["execution_id"] = execution_id + if span_id: + payload[_LOGGING_API_SPAN_ID_FIELD] = span_id + self.stream.write(json.dumps(payload)) + self.stream.write("\n") + self.stream.flush() diff --git a/src/functions_framework/request_timeout.py b/src/functions_framework/request_timeout.py new file mode 100644 index 00000000..ed34f66e --- /dev/null +++ b/src/functions_framework/request_timeout.py @@ -0,0 +1,42 @@ +import ctypes +import logging +import threading + +from .exceptions import RequestTimeoutException + +logger = logging.getLogger(__name__) + + +class ThreadingTimeout(object): # pragma: no cover + def __init__(self, seconds): + self.seconds = seconds + self.target_tid = threading.current_thread().ident + self.timer = None + + def __enter__(self): + self.timer = threading.Timer(self.seconds, self._raise_exc) + self.timer.start() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.timer.cancel() + if exc_type is RequestTimeoutException: + logger.warning( + "Request handling exceeded {0} seconds timeout; terminating request handling...".format( + self.seconds + ), + exc_info=(exc_type, exc_val, exc_tb), + ) + return False + + def _raise_exc(self): + ret = ctypes.pythonapi.PyThreadState_SetAsyncExc( + ctypes.c_long(self.target_tid), ctypes.py_object(RequestTimeoutException) + ) + if ret == 0: + raise ValueError("Invalid thread ID {}".format(self.target_tid)) + elif ret > 1: + ctypes.pythonapi.PyThreadState_SetAsyncExc( + ctypes.c_long(self.target_tid), None + ) + raise SystemError("PyThreadState_SetAsyncExc failed") diff --git a/tests/test_execution_id.py b/tests/test_execution_id.py new file mode 100644 index 00000000..a2601817 --- /dev/null +++ b/tests/test_execution_id.py @@ -0,0 +1,382 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import asyncio +import json +import pathlib +import re +import sys + +from functools import partial +from unittest.mock import Mock + +import pretend +import pytest + +from functions_framework import create_app, execution_id + +TEST_FUNCTIONS_DIR = pathlib.Path(__file__).resolve().parent / "test_functions" +TEST_EXECUTION_ID = "test_execution_id" +TEST_SPAN_ID = "123456" + + +def test_user_function_can_retrieve_execution_id_from_header(): + source = TEST_FUNCTIONS_DIR / "execution_id" / "main.py" + target = "function" + client = create_app(target, source).test_client() + resp = client.post( + "/", + headers={ + "Function-Execution-Id": TEST_EXECUTION_ID, + "Content-Type": "application/json", + }, + ) + + assert resp.get_json()["execution_id"] == TEST_EXECUTION_ID + + +def test_uncaught_exception_in_user_function_sets_execution_id(capsys, monkeypatch): + monkeypatch.setenv("LOG_EXECUTION_ID", "true") + source = TEST_FUNCTIONS_DIR / "execution_id" / "main.py" + target = "error" + app = create_app(target, source) + client = app.test_client() + resp = client.post( + "/", + headers={ + "Function-Execution-Id": TEST_EXECUTION_ID, + "Content-Type": "application/json", + }, + ) + assert resp.status_code == 500 + record = capsys.readouterr() + assert f'"execution_id": "{TEST_EXECUTION_ID}"' in record.err + + +def test_print_from_user_function_sets_execution_id(capsys, monkeypatch): + monkeypatch.setenv("LOG_EXECUTION_ID", "true") + source = TEST_FUNCTIONS_DIR / "execution_id" / "main.py" + target = "print_message" + app = create_app(target, source) + client = app.test_client() + client.post( + "/", + headers={ + "Function-Execution-Id": TEST_EXECUTION_ID, + "Content-Type": "application/json", + }, + json={"message": "some-message"}, + ) + record = capsys.readouterr() + assert f'"execution_id": "{TEST_EXECUTION_ID}"' in record.out + assert '"message": "some-message"' in record.out + + +def test_log_from_user_function_sets_execution_id(capsys, monkeypatch): + monkeypatch.setenv("LOG_EXECUTION_ID", "true") + source = TEST_FUNCTIONS_DIR / "execution_id" / "main.py" + target = "log_message" + app = create_app(target, source) + client = app.test_client() + client.post( + "/", + headers={ + "Function-Execution-Id": TEST_EXECUTION_ID, + "Content-Type": "application/json", + }, + json={"message": json.dumps({"custom-field": "some-message"})}, + ) + record = capsys.readouterr() + assert f'"execution_id": "{TEST_EXECUTION_ID}"' in record.err + assert '"custom-field": "some-message"' in record.err + + +def test_user_function_can_retrieve_generated_execution_id(monkeypatch): + monkeypatch.setattr( + execution_id, "_generate_execution_id", lambda: TEST_EXECUTION_ID + ) + source = TEST_FUNCTIONS_DIR / "execution_id" / "main.py" + target = "function" + client = create_app(target, source).test_client() + resp = client.post( + "/", + headers={ + "Content-Type": "application/json", + }, + ) + + assert resp.get_json()["execution_id"] == TEST_EXECUTION_ID + + +def test_does_not_set_execution_id_when_not_enabled(capsys): + source = TEST_FUNCTIONS_DIR / "execution_id" / "main.py" + target = "print_message" + app = create_app(target, source) + client = app.test_client() + client.post( + "/", + headers={ + "Function-Execution-Id": TEST_EXECUTION_ID, + "Content-Type": "application/json", + }, + json={"message": "some-message"}, + ) + record = capsys.readouterr() + assert f'"execution_id": "{TEST_EXECUTION_ID}"' not in record.out + assert "some-message" in record.out + + +def test_does_not_set_execution_id_when_env_var_is_false(capsys, monkeypatch): + monkeypatch.setenv("LOG_EXECUTION_ID", "false") + source = TEST_FUNCTIONS_DIR / "execution_id" / "main.py" + target = "print_message" + app = create_app(target, source) + client = app.test_client() + client.post( + "/", + headers={ + "Function-Execution-Id": TEST_EXECUTION_ID, + "Content-Type": "application/json", + }, + json={"message": "some-message"}, + ) + record = capsys.readouterr() + assert f'"execution_id": "{TEST_EXECUTION_ID}"' not in record.out + assert "some-message" in record.out + + +def test_does_not_set_execution_id_when_env_var_is_not_bool_like(capsys, monkeypatch): + monkeypatch.setenv("LOG_EXECUTION_ID", "maybe") + source = TEST_FUNCTIONS_DIR / "execution_id" / "main.py" + target = "print_message" + app = create_app(target, source) + client = app.test_client() + client.post( + "/", + headers={ + "Function-Execution-Id": TEST_EXECUTION_ID, + "Content-Type": "application/json", + }, + json={"message": "some-message"}, + ) + record = capsys.readouterr() + assert f'"execution_id": "{TEST_EXECUTION_ID}"' not in record.out + assert "some-message" in record.out + + +def test_generate_execution_id(): + expected_matching_regex = "^[0-9a-zA-Z]{12}$" + actual_execution_id = execution_id._generate_execution_id() + + match = re.match(expected_matching_regex, actual_execution_id).group(0) + assert match == actual_execution_id + + +@pytest.mark.parametrize( + "headers,expected_execution_id,expected_span_id", + [ + ( + { + "X-Cloud-Trace-Context": f"TRACE_ID/{TEST_SPAN_ID};o=1", + "Function-Execution-Id": TEST_EXECUTION_ID, + }, + TEST_EXECUTION_ID, + TEST_SPAN_ID, + ), + ( + { + "X-Cloud-Trace-Context": f"TRACE_ID/{TEST_SPAN_ID};o=1", + "Function-Execution-Id": TEST_EXECUTION_ID, + }, + TEST_EXECUTION_ID, + TEST_SPAN_ID, + ), + ({}, None, None), + ( + { + "X-Cloud-Trace-Context": "malformed trace context string", + "Function-Execution-Id": TEST_EXECUTION_ID, + }, + TEST_EXECUTION_ID, + None, + ), + ], +) +def test_set_execution_context( + headers, expected_execution_id, expected_span_id, monkeypatch +): + request = pretend.stub(headers=headers) + + def view_func(): + pass + + monkeypatch.setattr( + execution_id, "_generate_execution_id", lambda: TEST_EXECUTION_ID + ) + mock_g = Mock() + monkeypatch.setattr(execution_id.flask, "g", mock_g) + monkeypatch.setattr(execution_id.flask, "has_request_context", lambda: True) + execution_id.set_execution_context(request)(view_func)() + + assert mock_g.execution_id_context.span_id == expected_span_id + assert mock_g.execution_id_context.execution_id == expected_execution_id + + +@pytest.mark.parametrize( + "log_message,expected_log_json", + [ + ("text message", {"message": "text message"}), + ( + json.dumps({"custom-field1": "value1", "custom-field2": "value2"}), + {"custom-field1": "value1", "custom-field2": "value2"}, + ), + ("[]", {"message": "[]"}), + ], +) +def test_log_handler(monkeypatch, log_message, expected_log_json, capsys): + log_handler = execution_id.LoggingHandlerAddExecutionId(stream=sys.stdout) + monkeypatch.setattr( + execution_id, + "_get_current_context", + lambda: execution_id.ExecutionContext( + span_id=TEST_SPAN_ID, execution_id=TEST_EXECUTION_ID + ), + ) + expected_log_json.update( + { + "logging.googleapis.com/labels": { + "execution_id": TEST_EXECUTION_ID, + }, + "logging.googleapis.com/spanId": TEST_SPAN_ID, + } + ) + + log_handler.write(log_message) + record = capsys.readouterr() + assert json.loads(record.out) == expected_log_json + assert json.loads(record.out) == expected_log_json + + +def test_log_handler_without_context_logs_unmodified(monkeypatch, capsys): + log_handler = execution_id.LoggingHandlerAddExecutionId(stream=sys.stdout) + monkeypatch.setattr( + execution_id, + "_get_current_context", + lambda: None, + ) + expected_message = "log message\n" + + log_handler.write("log message") + record = capsys.readouterr() + assert record.out == expected_message + + +def test_log_handler_ignores_newlines(monkeypatch, capsys): + log_handler = execution_id.LoggingHandlerAddExecutionId(stream=sys.stdout) + monkeypatch.setattr( + execution_id, + "_get_current_context", + lambda: execution_id.ExecutionContext( + span_id=TEST_SPAN_ID, execution_id=TEST_EXECUTION_ID + ), + ) + + log_handler.write("\n") + record = capsys.readouterr() + assert record.out == "" + + +def test_log_handler_does_not_nest(): + log_handler_1 = execution_id.LoggingHandlerAddExecutionId(stream=sys.stdout) + log_handler_2 = execution_id.LoggingHandlerAddExecutionId(log_handler_1) + + assert log_handler_1 == log_handler_2 + + +def test_log_handler_omits_empty_execution_context(monkeypatch, capsys): + log_handler = execution_id.LoggingHandlerAddExecutionId(stream=sys.stdout) + monkeypatch.setattr( + execution_id, + "_get_current_context", + lambda: execution_id.ExecutionContext(span_id=None, execution_id=None), + ) + expected_json = { + "message": "some message", + } + + log_handler.write("some message") + record = capsys.readouterr() + assert json.loads(record.out) == expected_json + + +@pytest.mark.asyncio +async def test_maintains_execution_id_for_concurrent_requests(monkeypatch, capsys): + monkeypatch.setenv("LOG_EXECUTION_ID", "true") + monkeypatch.setattr( + execution_id, + "_generate_execution_id", + Mock(side_effect=("test-execution-id-1", "test-execution-id-2")), + ) + + expected_logs = ( + { + "message": "message1", + "logging.googleapis.com/labels": {"execution_id": "test-execution-id-1"}, + }, + { + "message": "message2", + "logging.googleapis.com/labels": {"execution_id": "test-execution-id-2"}, + }, + { + "message": "message1", + "logging.googleapis.com/labels": {"execution_id": "test-execution-id-1"}, + }, + { + "message": "message2", + "logging.googleapis.com/labels": {"execution_id": "test-execution-id-2"}, + }, + ) + + source = TEST_FUNCTIONS_DIR / "execution_id" / "main.py" + target = "sleep" + client = create_app(target, source).test_client() + loop = asyncio.get_event_loop() + response1 = loop.run_in_executor( + None, + partial( + client.post, + "/", + headers={ + "Content-Type": "application/json", + }, + json={"message": "message1"}, + ), + ) + response2 = loop.run_in_executor( + None, + partial( + client.post, + "/", + headers={ + "Content-Type": "application/json", + }, + json={"message": "message2"}, + ), + ) + await asyncio.wait((response1, response2)) + record = capsys.readouterr() + logs = record.err.strip().split("\n") + logs_as_json = tuple(json.loads(log) for log in logs) + + sort_key = lambda d: d["message"] + assert sorted(logs_as_json, key=sort_key) == sorted(expected_logs, key=sort_key) diff --git a/tests/test_functions/execution_id/main.py b/tests/test_functions/execution_id/main.py new file mode 100644 index 00000000..72d1eaff --- /dev/null +++ b/tests/test_functions/execution_id/main.py @@ -0,0 +1,33 @@ +import logging +import time + +logger = logging.getLogger(__name__) + + +def print_message(request): + json = request.get_json(silent=True) + print(json.get("message")) + return "success", 200 + + +def log_message(request): + json = request.get_json(silent=True) + logger.info(json.get("message")) + return "success", 200 + + +def function(request): + return {"execution_id": request.headers.get("Function-Execution-Id")} + + +def error(request): + return 1 / 0 + + +def sleep(request): + json = request.get_json(silent=True) + message = json.get("message") + logger.info(message) + time.sleep(1) + logger.info(message) + return "success", 200 diff --git a/tests/test_functions/timeout/main.py b/tests/test_functions/timeout/main.py new file mode 100644 index 00000000..09efeb88 --- /dev/null +++ b/tests/test_functions/timeout/main.py @@ -0,0 +1,12 @@ +import logging +import time + +logger = logging.getLogger(__name__) + + +def function(request): + # sleep for 1200 total ms (1.2 sec) + for _ in range(12): + time.sleep(0.1) + logger.info("some extra logging message") + return "success", 200 diff --git a/tests/test_http.py b/tests/test_http.py index 0a46fbea..fbfac9d2 100644 --- a/tests/test_http.py +++ b/tests/test_http.py @@ -97,17 +97,17 @@ def test_gunicorn_application(debug): assert gunicorn_app.app == app assert gunicorn_app.options == { "bind": "%s:%s" % (host, port), - "workers": os.cpu_count() * 4, - "threads": 1, - "timeout": 300, + "workers": 1, + "threads": os.cpu_count() * 4, + "timeout": 0, "loglevel": "error", "limit_request_line": 0, } assert gunicorn_app.cfg.bind == ["1.2.3.4:1234"] - assert gunicorn_app.cfg.workers == os.cpu_count() * 4 - assert gunicorn_app.cfg.threads == 1 - assert gunicorn_app.cfg.timeout == 300 + assert gunicorn_app.cfg.workers == 1 + assert gunicorn_app.cfg.threads == os.cpu_count() * 4 + assert gunicorn_app.cfg.timeout == 0 assert gunicorn_app.load() == app diff --git a/tests/test_timeouts.py b/tests/test_timeouts.py new file mode 100644 index 00000000..9637de67 --- /dev/null +++ b/tests/test_timeouts.py @@ -0,0 +1,265 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import pathlib +import socket +import time + +from multiprocessing import Process + +import pytest +import requests + +ff_gunicorn = pytest.importorskip("functions_framework._http.gunicorn") + + +from functions_framework import create_app + +TEST_FUNCTIONS_DIR = pathlib.Path(__file__).resolve().parent / "test_functions" +TEST_HOST = "0.0.0.0" +TEST_PORT = "8080" + + +@pytest.fixture(autouse=True) +def run_around_tests(): + # the test samples test also listens on 8080, so let's be good stewards of + # the port and make sure it's free + _wait_for_no_listen(TEST_HOST, TEST_PORT) + yield + _wait_for_no_listen(TEST_HOST, TEST_PORT) + + +@pytest.mark.skipif("platform.system() == 'Windows'") +@pytest.mark.skipif("platform.system() == 'Darwin'") +@pytest.mark.slow_integration_test +def test_no_timeout_allows_request_processing_to_finish(): + source = TEST_FUNCTIONS_DIR / "timeout" / "main.py" + target = "function" + + app = create_app(target, source) + + options = {} + + gunicorn_app = ff_gunicorn.GunicornApplication( + app, TEST_HOST, TEST_PORT, False, **options + ) + + gunicorn_p = Process(target=gunicorn_app.run) + gunicorn_p.start() + + _wait_for_listen(TEST_HOST, TEST_PORT) + + result = requests.get("http://{}:{}/".format(TEST_HOST, TEST_PORT)) + + gunicorn_p.terminate() + gunicorn_p.join() + + assert result.status_code == 200 + + +@pytest.mark.skipif("platform.system() == 'Windows'") +@pytest.mark.skipif("platform.system() == 'Darwin'") +@pytest.mark.slow_integration_test +def test_timeout_but_not_threaded_timeout_enabled_does_not_kill(monkeypatch): + monkeypatch.setenv("CLOUD_RUN_TIMEOUT_SECONDS", "1") + monkeypatch.setenv("THREADED_TIMEOUT_ENABLED", "false") + source = TEST_FUNCTIONS_DIR / "timeout" / "main.py" + target = "function" + + app = create_app(target, source) + + options = {} + + gunicorn_app = ff_gunicorn.GunicornApplication( + app, TEST_HOST, TEST_PORT, False, **options + ) + + gunicorn_p = Process(target=gunicorn_app.run) + gunicorn_p.start() + + _wait_for_listen(TEST_HOST, TEST_PORT) + + result = requests.get("http://{}:{}/".format(TEST_HOST, TEST_PORT)) + + gunicorn_p.terminate() + gunicorn_p.join() + + assert result.status_code == 200 + + +@pytest.mark.skipif("platform.system() == 'Windows'") +@pytest.mark.skipif("platform.system() == 'Darwin'") +@pytest.mark.slow_integration_test +def test_timeout_and_threaded_timeout_enabled_kills(monkeypatch): + monkeypatch.setenv("CLOUD_RUN_TIMEOUT_SECONDS", "1") + monkeypatch.setenv("THREADED_TIMEOUT_ENABLED", "true") + source = TEST_FUNCTIONS_DIR / "timeout" / "main.py" + target = "function" + + app = create_app(target, source) + + options = {} + + gunicorn_app = ff_gunicorn.GunicornApplication( + app, TEST_HOST, TEST_PORT, False, **options + ) + + gunicorn_p = Process(target=gunicorn_app.run) + gunicorn_p.start() + + _wait_for_listen(TEST_HOST, TEST_PORT) + + result = requests.get("http://{}:{}/".format(TEST_HOST, TEST_PORT)) + + gunicorn_p.terminate() + gunicorn_p.join() + + # Any exception raised in execution is a 500 error. Cloud Functions 1st gen and + # 2nd gen/Cloud Run infrastructure doing the timeout will return a 408 (gen 1) + # or 504 (gen 2/CR) at the infrastructure layer when request timeouts happen, + # and this code will only be available to the user in logs. + assert result.status_code == 500 + + +@pytest.mark.skipif("platform.system() == 'Windows'") +@pytest.mark.skipif("platform.system() == 'Darwin'") +@pytest.mark.slow_integration_test +def test_timeout_and_threaded_timeout_enabled_but_timeout_not_exceeded_doesnt_kill( + monkeypatch, +): + monkeypatch.setenv("CLOUD_RUN_TIMEOUT_SECONDS", "2") + monkeypatch.setenv("THREADED_TIMEOUT_ENABLED", "true") + source = TEST_FUNCTIONS_DIR / "timeout" / "main.py" + target = "function" + + app = create_app(target, source) + + options = {} + + gunicorn_app = ff_gunicorn.GunicornApplication( + app, TEST_HOST, TEST_PORT, False, **options + ) + + gunicorn_p = Process(target=gunicorn_app.run) + gunicorn_p.start() + + _wait_for_listen(TEST_HOST, TEST_PORT) + + result = requests.get("http://{}:{}/".format(TEST_HOST, TEST_PORT)) + + gunicorn_p.terminate() + gunicorn_p.join() + + assert result.status_code == 200 + + +@pytest.mark.skipif("platform.system() == 'Windows'") +@pytest.mark.skipif("platform.system() == 'Darwin'") +@pytest.mark.slow_integration_test +def test_timeout_sync_worker_kills_on_timeout( + monkeypatch, +): + monkeypatch.setenv("CLOUD_RUN_TIMEOUT_SECONDS", "1") + monkeypatch.setenv("WORKERS", 2) + monkeypatch.setenv("THREADS", 1) + source = TEST_FUNCTIONS_DIR / "timeout" / "main.py" + target = "function" + + app = create_app(target, source) + + options = {} + + gunicorn_app = ff_gunicorn.GunicornApplication( + app, TEST_HOST, TEST_PORT, False, **options + ) + + gunicorn_p = Process(target=gunicorn_app.run) + gunicorn_p.start() + + _wait_for_listen(TEST_HOST, TEST_PORT) + + result = requests.get("http://{}:{}/".format(TEST_HOST, TEST_PORT)) + + gunicorn_p.terminate() + gunicorn_p.join() + + assert result.status_code == 500 + + +@pytest.mark.skipif("platform.system() == 'Windows'") +@pytest.mark.skipif("platform.system() == 'Darwin'") +@pytest.mark.slow_integration_test +def test_timeout_sync_worker_does_not_kill_if_less_than_timeout( + monkeypatch, +): + monkeypatch.setenv("CLOUD_RUN_TIMEOUT_SECONDS", "2") + monkeypatch.setenv("WORKERS", 2) + monkeypatch.setenv("THREADS", 1) + source = TEST_FUNCTIONS_DIR / "timeout" / "main.py" + target = "function" + + app = create_app(target, source) + + options = {} + + gunicorn_app = ff_gunicorn.GunicornApplication( + app, TEST_HOST, TEST_PORT, False, **options + ) + + gunicorn_p = Process(target=gunicorn_app.run) + gunicorn_p.start() + + _wait_for_listen(TEST_HOST, TEST_PORT) + + result = requests.get("http://{}:{}/".format(TEST_HOST, TEST_PORT)) + + gunicorn_p.terminate() + gunicorn_p.join() + + assert result.status_code == 200 + + +@pytest.mark.skip +def _wait_for_listen(host, port, timeout=10): + # Used in tests to make sure that the gunicorn app has booted and is + # listening before sending a test request + start_time = time.perf_counter() + while True: + try: + with socket.create_connection((host, port), timeout=timeout): + break + except OSError as ex: + time.sleep(0.01) + if time.perf_counter() - start_time >= timeout: + raise TimeoutError( + "Waited too long for port {} on host {} to start accepting " + "connections.".format(port, host) + ) from ex + + +@pytest.mark.skip +def _wait_for_no_listen(host, port, timeout=10): + # Used in tests to make sure that the port is actually free after + # the process binding to it should have been killed + start_time = time.perf_counter() + while True: + try: + with socket.create_connection((host, port), timeout=timeout): + time.sleep(0.01) + if time.perf_counter() - start_time >= timeout: + raise TimeoutError( + "Waited too long for port {} on host {} to stop accepting " + "connections.".format(port, host) + ) + except OSError as ex: + break diff --git a/tests/test_view_functions.py b/tests/test_view_functions.py index f69b2155..a32fe9e4 100644 --- a/tests/test_view_functions.py +++ b/tests/test_view_functions.py @@ -25,7 +25,7 @@ def test_http_view_func_wrapper(): function = pretend.call_recorder(lambda request: "Hello") request_object = pretend.stub() - local_proxy = pretend.stub(_get_current_object=lambda: request_object) + local_proxy = pretend.stub(_get_current_object=lambda: request_object, headers={}) view_func = functions_framework._http_view_func_wrapper(function, local_proxy) view_func("/some/path") diff --git a/tox.ini b/tox.ini index e8c555b5..6ba6d3b4 100644 --- a/tox.ini +++ b/tox.ini @@ -5,6 +5,7 @@ envlist = py{35,36,37,38,39,310}-{ubuntu-latest,macos-latest,windows-latest},lin usedevelop = true deps = docker + pytest-asyncio pytest-cov pytest-integration pretend