diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index a23cc35bc..000000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,54 +0,0 @@ -# Python CircleCI 2.0 configuration file -# -# Check https://circleci.com/docs/2.0/language-python/ for more details -# -version: 2 - -jobs: - build: - branches: - ignore: - - gh-pages - docker: - - image: googleapis/nox:0.18.2 - - image: mysql:5.7 - environment: - MYSQL_ROOT_HOST: "%" - MYSQL_ROOT_PASSWORD: 666666 - ports: - - 3306:3306 - - image: circleci/postgres:9.6 - environment: - POSTGRES_PASSWORD: 666666 - ports: - - 5432:5432 - - working_directory: ~/repo - - steps: - - checkout - - run: - name: Decrypt credentials - command: | - if [ -n "$GOOGLE_APPLICATION_CREDENTIALS" ]; then - openssl aes-256-cbc -d -a -k "$GOOGLE_CREDENTIALS_PASSPHRASE" \ - -in tests/system/credentials.json.enc \ - -out $GOOGLE_APPLICATION_CREDENTIALS - else - echo "No credentials. System tests will not run." - fi - - run: - name: Run tests - opencensus - command: | - pip install --upgrade nox - nox -f noxfile.py - - deploy: - name: Push to PyPI (if this is a release tag). - command: scripts/twine_upload.sh - -deployment: - tag_build_for_cci2: - branch: /v([0-9]+)\.([0-9]+)\.([0-9]+)/ - tag: /v([0-9]+)\.([0-9]+)\.([0-9]+)/ - commands: - - true diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index fa94a04c4..2e1dc6b60 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -2,4 +2,4 @@ # This file controls who is tagged for review for any given pull request. # For anything not explicitly taken by someone else: -* @census-instrumentation/global-owners @aabmass @hectorhdzg @lzchen @songy23 @victoraugustolls +* @census-instrumentation/global-owners @aabmass @hectorhdzg @jeremydvoss @lzchen diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 59f620086..aa8a05f51 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -10,11 +10,9 @@ on: jobs: build: - # 18.04 needed for python3.4 - runs-on: ubuntu-18.04 + runs-on: ubuntu-20.04 env: # We use these variables to convert between tox and GHA version literals - py27: 2.7 py35: 3.5 py36: 3.6 py37: 3.7 @@ -24,7 +22,7 @@ jobs: # ensures the entire test matrix is run, even if one permutation fails fail-fast: false matrix: - python-version: [py27, py35, py36, py37, py38, py39] + python-version: [py35, py36, py37, py38, py39] steps: - name: Checkout code uses: actions/checkout@v2 @@ -33,7 +31,8 @@ jobs: with: python-version: ${{ env[matrix.python-version] }} - name: Install tox - run: pip install -U tox-factor + # Pin tox 3 because of https://github.com/rpkilby/tox-factor/issues/18 + run: pip install -U tox==3.27.1 tox-factor - name: Cache tox environment uses: actions/cache@v2 with: @@ -42,3 +41,21 @@ jobs: key: v1-tox-${{ matrix.python-version }}-${{ hashFiles('tox.ini', '**/setup.py') }} - name: run tox run: tox -f ${{ matrix.python-version }} + build-27: + runs-on: ubuntu-20.04 + container: + image: python:2.7.18-buster + env: + py27: 2.7 + steps: + - name: Checkout code + uses: actions/checkout@v2 + - name: Install tox + run: pip install -U tox==3.27.1 tox-factor + - name: Cache tox environment + uses: actions/cache@v2 + with: + path: .tox + key: v1-tox-27-${{ hashFiles('tox.ini', '**/setup.py') }} + - name: Run tox for Python 2.7 + run: tox -f py27 diff --git a/CHANGELOG.md b/CHANGELOG.md index 77556de29..119d0f3e1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,43 @@ ## Unreleased +# 0.11.4 +Released 2024-01-03 +- Changed bit-mapping for `httpx` and `fastapi` integrations +([#1239](https://github.com/census-instrumentation/opencensus-python/pull/1239)) + +# 0.11.3 +Released 2023-09-18 + +- Updated `azure` modules + +# 0.11.2 +Released 2023-03-10 + +- Updated `azure`, `fastapi`,`flask` modules + +# 0.11.1 +Released 2023-01-18 + +- Updated `azure`, `httpx` modules + +# 0.11.0 +Released 2022-08-03 + +- Updated `azure`, `context`, `flask`, `requests` modules + +# 0.10.0 +Released 2022-07-05 + +- Add kwargs to derived gauge +([#1135](https://github.com/census-instrumentation/opencensus-python/pull/1135)) + +# 0.9.0 +Released 2022-04-20 + +- Make sure handler.flush() doesn't deadlock +([#1112](https://github.com/census-instrumentation/opencensus-python/pull/1112)) + # 0.8.0 Released 2021-10-05 diff --git a/README.rst b/README.rst index 4bc43845d..3430c557d 100644 --- a/README.rst +++ b/README.rst @@ -1,3 +1,19 @@ + **Warning** + + OpenCensus and OpenTracing have merged to form + `OpenTelemetry `__, which serves as the + next major version of OpenCensus and OpenTracing. + + OpenTelemetry has now reached feature parity with OpenCensus, with + tracing and metrics SDKs available in .NET, Golang, Java, NodeJS, and + Python. **All OpenCensus Github repositories will be archived**. We + encourage users to migrate to OpenTelemetry. + + To help you gradually migrate your instrumentation to OpenTelemetry, + bridges are available in Java, Go, Python (tracing only), and JS. `Read the + full blog post to learn more + `__. + OpenCensus - A stats collection and distributed tracing framework ================================================================= @@ -199,6 +215,7 @@ OpenCensus supports integration with popular web frameworks, client libraries an - `Google Cloud Client Libraries`_ - `gRPC`_ - `httplib`_ +- `httpx`_ - `logging`_ - `MySQL`_ - `PostgreSQL`_ @@ -240,10 +257,12 @@ Trace Exporter .. _Datadog: https://github.com/census-instrumentation/opencensus-python/tree/master/contrib/opencensus-ext-datadog .. _Django: https://github.com/census-instrumentation/opencensus-python/tree/master/contrib/opencensus-ext-django .. _Flask: https://github.com/census-instrumentation/opencensus-python/tree/master/contrib/opencensus-ext-flask +.. _FastAPI: https://github.com/census-instrumentation/opencensus-python/tree/master/contrib/opencensus-ext-fastapi .. _gevent: https://github.com/census-instrumentation/opencensus-python/tree/master/contrib/opencensus-ext-gevent .. _Google Cloud Client Libraries: https://github.com/census-instrumentation/opencensus-python/tree/master/contrib/opencensus-ext-google-cloud-clientlibs .. _gRPC: https://github.com/census-instrumentation/opencensus-python/tree/master/contrib/opencensus-ext-grpc .. _httplib: https://github.com/census-instrumentation/opencensus-python/tree/master/contrib/opencensus-ext-httplib +.. _httpx: https://github.com/census-instrumentation/opencensus-python/tree/master/contrib/opencensus-ext-httpx .. _Jaeger: https://github.com/census-instrumentation/opencensus-python/tree/master/contrib/opencensus-ext-jaeger .. _logging: https://github.com/census-instrumentation/opencensus-python/tree/master/contrib/opencensus-ext-logging .. _MySQL: https://github.com/census-instrumentation/opencensus-python/tree/master/contrib/opencensus-ext-mysql diff --git a/context/opencensus-context/CHANGELOG.md b/context/opencensus-context/CHANGELOG.md index 55518cfea..d33b890a6 100644 --- a/context/opencensus-context/CHANGELOG.md +++ b/context/opencensus-context/CHANGELOG.md @@ -2,6 +2,12 @@ ## Unreleased +## 0.1.3 +Released 2022-08-03 + +- Move `version.py` file into `runtime_context` folder +([#1143](https://github.com/census-instrumentation/opencensus-python/pull/1143)) + ## 0.1.2 Released 2020-06-29 diff --git a/context/opencensus-context/version.py b/context/opencensus-context/opencensus/common/runtime_context/version.py similarity index 100% rename from context/opencensus-context/version.py rename to context/opencensus-context/opencensus/common/runtime_context/version.py diff --git a/context/opencensus-context/setup.py b/context/opencensus-context/setup.py index 169e6fcfd..cba191aa0 100644 --- a/context/opencensus-context/setup.py +++ b/context/opencensus-context/setup.py @@ -12,13 +12,21 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + from setuptools import find_packages, setup -from version import __version__ +BASE_DIR = os.path.dirname(__file__) +VERSION_FILENAME = os.path.join( + BASE_DIR, "opencensus", "common", "runtime_context", "version.py" +) +PACKAGE_INFO = {} +with open(VERSION_FILENAME) as f: + exec(f.read(), PACKAGE_INFO) setup( name='opencensus-context', - version=__version__, # noqa + version=PACKAGE_INFO["__version__"], # noqa author='OpenCensus Authors', author_email='census-developers@googlegroups.com', classifiers=[ diff --git a/contrib/opencensus-ext-azure/CHANGELOG.md b/contrib/opencensus-ext-azure/CHANGELOG.md index 07cb7b417..3ece097d9 100644 --- a/contrib/opencensus-ext-azure/CHANGELOG.md +++ b/contrib/opencensus-ext-azure/CHANGELOG.md @@ -2,7 +2,130 @@ ## Unreleased +## 1.1.15 + +Released 2025-06-03 + +- Switch ordering for Statsbeat Attach detection to prioritize Azure Functions + ([#1251](https://github.com/census-instrumentation/opencensus-python/pull/1251)) + +## 1.1.14 + +Released 2025-01-06 + +- Remove status code `206` from retry code + only count batch level for statsbeat +([#1247](https://github.com/census-instrumentation/opencensus-python/pull/1247)) + +## 1.1.13 + +Released 2024-01-03 + +- Changed bit-mapping for `httpx` and `fastapi` integrations +([#1239](https://github.com/census-instrumentation/opencensus-python/pull/1239)) + +## 1.1.12 + +Released 2023-11-28 + +- Fix missing/None fields in `ExceptionDetails` +([#1232](https://github.com/census-instrumentation/opencensus-python/pull/1232)) +- Fix missing/None typeName field in `ExceptionDetails` +([#1234](https://github.com/census-instrumentation/opencensus-python/pull/1234)) + +## 1.1.11 + +Released 2023-10-12 + +- Add str fallback to envelope serialization +([#1196](https://github.com/census-instrumentation/opencensus-python/pull/1196)) +- Remove outerId from exceptiondata +([#1221](https://github.com/census-instrumentation/opencensus-python/pull/1221)) + +## 1.1.10 + +Released 2023-09-18 + +- Add str fallback to envelope serialization +([#1196](https://github.com/census-instrumentation/opencensus-python/pull/1196)) + +## 1.1.9 + +Released 2023-03-10 + +- Fix export of exception information in traces +([#1187](https://github.com/census-instrumentation/opencensus-python/pull/1187)) +- Modify metrics exporter to include setting export interval to 60s +([#1193](https://github.com/census-instrumentation/opencensus-python/pull/1193)) + +## 1.1.8 + +Released 2023-01-18 + +- Disable storage for statsbeat if storage is disabled for exporter +([#1155](https://github.com/census-instrumentation/opencensus-python/pull/1155)) +- Add UK to eu statsbeats +([#1181](https://github.com/census-instrumentation/opencensus-python/pull/1181)) + +## 1.1.7 + +Released 2022-08-18 + +- Add storage existence checks to storing and transmitting in exporter +([#1150](https://github.com/census-instrumentation/opencensus-python/pull/1150)) +- Add 502 and 504 status codes as retriable +([#1153](https://github.com/census-instrumentation/opencensus-python/pull/1153)) +- Fix statsbeat bug - exporting zero values for network statsbeat +([#1155](https://github.com/census-instrumentation/opencensus-python/pull/1155)) + +## 1.1.6 + +Released 2022-08-03 + +- Add statusCode and exceptionType to network statsbeat +([#1138](https://github.com/census-instrumentation/opencensus-python/pull/1138)) + +## 1.1.5 + +Released 2022-07-05 + +- Allow specifying metrics (custom_measurements) for Azure custom events +([#1117](https://github.com/census-instrumentation/opencensus-python/pull/1117)) +- Shutdown Statsbeat when hitting error/exception threshold +([#1127](https://github.com/census-instrumentation/opencensus-python/pull/1127)) +- Fix failure counting statsbeat - refactor status code logic in transport +([#1132](https://github.com/census-instrumentation/opencensus-python/pull/1132)) +- Use logging handler close instead of custom atexit hook +([#1134](https://github.com/census-instrumentation/opencensus-python/pull/1134)) + +## 1.1.4 + +Released 2022-04-20 + +- Statsbeat bug fixes - status codes +([#1113](https://github.com/census-instrumentation/opencensus-python/pull/1113)) +- Statsbeat bug fixes - do not log if statsbeat +([#1116](https://github.com/census-instrumentation/opencensus-python/pull/1116)) +- Add deprecation warning for explicitly using instrumentation key +([#1118](https://github.com/census-instrumentation/opencensus-python/pull/1118)) + +## 1.1.3 + +Released 2022-03-03 + +- Hotfix for version number +([#1108](https://github.com/census-instrumentation/opencensus-python/pull/1108)) + +## 1.1.2 + +Released 2022-03-03 + +- Statsbeat bug fixes, shorten host in network stats +([#1100](https://github.com/census-instrumentation/opencensus-python/pull/1100)) +- Support statsbeat in EU regions +([#1105](https://github.com/census-instrumentation/opencensus-python/pull/1105)) + ## 1.1.1 + Released 2022-01-19 - Fix statsbeats metric names @@ -11,6 +134,7 @@ Released 2022-01-19 ([#1093](https://github.com/census-instrumentation/opencensus-python/pull/1093)) ## 1.1.0 + Released 2021-10-05 - Enable AAD authorization via TokenCredential @@ -27,6 +151,7 @@ Released 2021-10-05 ([#1078](https://github.com/census-instrumentation/opencensus-python/pull/1078)) ## 1.0.8 + Released 2021-05-13 - Fix `logger.exception` with no exception info throwing error @@ -35,12 +160,14 @@ Released 2021-05-13 ([#1016](https://github.com/census-instrumentation/opencensus-python/pull/1016)) ## 1.0.7 + Released 2021-01-25 - Hotfix ([#1004](https://github.com/census-instrumentation/opencensus-python/pull/1004)) ## 1.0.6 + Released 2021-01-14 - Disable heartbeat metrics in exporters @@ -49,6 +176,7 @@ Released 2021-01-14 ([#986](https://github.com/census-instrumentation/opencensus-python/pull/986)) ## 1.0.5 + Released 2020-10-13 - Attach rate metrics via Heartbeat for Web and Function apps @@ -63,6 +191,7 @@ Released 2020-10-13 ([#949](https://github.com/census-instrumentation/opencensus-python/pull/949)) ## 1.0.4 + Released 2020-06-29 - Remove dependency rate from standard metrics @@ -71,6 +200,7 @@ Released 2020-06-29 ([#925](https://github.com/census-instrumentation/opencensus-python/pull/925)) ## 1.0.3 + Released 2020-06-17 - Change default path of local storage @@ -79,6 +209,7 @@ Released 2020-06-17 ([#902](https://github.com/census-instrumentation/opencensus-python/pull/902)) ## 1.0.2 + Released 2020-02-04 - Add local storage and retry logic for Azure Metrics Exporter @@ -89,6 +220,7 @@ Released 2020-02-04 ([#851](https://github.com/census-instrumentation/opencensus-python/pull/851)) ## 1.0.1 + Released 2019-11-26 - Validate instrumentation key in Azure Exporters @@ -97,6 +229,7 @@ Released 2019-11-26 ([#822](https://github.com/census-instrumentation/opencensus-python/pull/822)) ## 1.0.0 + Released 2019-09-30 - Standard Metrics - Incoming requests execution time @@ -105,12 +238,14 @@ Released 2019-09-30 ([#767](https://github.com/census-instrumentation/opencensus-python/pull/767)) ## 0.7.1 + Released 2019-08-26 - Standard metrics incoming requests per second ([#758](https://github.com/census-instrumentation/opencensus-python/pull/758)) ## 0.7.0 + Released 2019-07-31 - Added standard metrics @@ -123,18 +258,21 @@ Released 2019-07-31 ([#735](https://github.com/census-instrumentation/opencensus-python/pull/735)) ## 0.3.1 + Released 2019-06-30 - Added metrics exporter ([#678](https://github.com/census-instrumentation/opencensus-python/pull/678)) ## 0.2.1 + Released 2019-06-13 - Support span attributes ([#682](https://github.com/census-instrumentation/opencensus-python/pull/682)) ## 0.2.0 + Released 2019-05-31 - Added log exporter @@ -146,6 +284,7 @@ Released 2019-05-31 ([#632](https://github.com/census-instrumentation/opencensus-python/pull/632)) ## 0.1.0 + Released 2019-04-24 - Initial release diff --git a/contrib/opencensus-ext-azure/README.rst b/contrib/opencensus-ext-azure/README.rst index 1332c824b..71fb73127 100644 --- a/contrib/opencensus-ext-azure/README.rst +++ b/contrib/opencensus-ext-azure/README.rst @@ -1,6 +1,9 @@ OpenCensus Azure Monitor Exporters ================================== +OpenCensus Azure Monitor exporters are on the path to deprecation. They will be officially unsupported by September 2024. Please migrate to the `Azure Monitor OpenTelemetry Distro `_ for the recommended "one-stop-shop" solution or the `Azure Monitor OpenTelemetry exporters `_ for the more hand-on, configurable solution based on `OpenTelemetry `_. +Check out the `migration guide `_ on how to easily migrate Python code. + |pypi| .. |pypi| image:: https://badge.fury.io/py/opencensus-ext-azure.svg @@ -166,8 +169,11 @@ The **Azure Monitor Metrics Exporter** allows you to export metrics to `Azure Mo def main(): # Enable metrics - # Set the interval in seconds in which you want to send metrics - exporter = metrics_exporter.new_metrics_exporter(connection_string='InstrumentationKey=') + # Set the interval in seconds to 60s, which is the time interval application insights + # aggregates your metrics + exporter = metrics_exporter.new_metrics_exporter( + connection_string='InstrumentationKey=' + ) view_manager.register_exporter(exporter) view_manager.register_view(CARROTS_VIEW) @@ -176,7 +182,6 @@ The **Azure Monitor Metrics Exporter** allows you to export metrics to `Azure Mo mmap.measure_int_put(CARROTS_MEASURE, 1000) mmap.record(tmap) - # Default export interval is every 15.0s print("Done recording metrics") @@ -196,10 +201,13 @@ The exporter also includes a set of performance counters that are exported to Az from opencensus.ext.azure import metrics_exporter def main(): - # All you need is the next line. You can disable performance counters by + # Performance counters are sent by default. You can disable performance counters by # passing in enable_standard_metrics=False into the constructor of # new_metrics_exporter() - _exporter = metrics_exporter.new_metrics_exporter(connection_string='InstrumentationKey=') + _exporter = metrics_exporter.new_metrics_exporter( + connection_string='InstrumentationKey=', + export_interval=60, + ) for i in range(100): print(psutil.virtual_memory()) @@ -256,8 +264,12 @@ Modifying Metrics def main(): # Enable metrics - # Set the interval in seconds in which you want to send metrics - exporter = metrics_exporter.new_metrics_exporter(connection_string='InstrumentationKey=') + # Set the interval in seconds to 60s, which is the time interval application insights + # aggregates your metrics + exporter = metrics_exporter.new_metrics_exporter( + connection_string='InstrumentationKey=', + export_interval=60, + ) exporter.add_telemetry_processor(callback_function) view_manager.register_exporter(exporter) @@ -267,7 +279,6 @@ Modifying Metrics mmap.measure_int_put(CARROTS_MEASURE, 1000) mmap.record(tmap) - # Default export interval is every 15.0s print("Done recording metrics") @@ -367,6 +378,7 @@ References ---------- * `Azure Monitor `_ +* `Official Microsoft Docs `_ * `Examples `_ * `OpenCensus Project `_ diff --git a/contrib/opencensus-ext-azure/examples/logs/simple.py b/contrib/opencensus-ext-azure/examples/logs/simple.py index 04e268455..2978033b1 100644 --- a/contrib/opencensus-ext-azure/examples/logs/simple.py +++ b/contrib/opencensus-ext-azure/examples/logs/simple.py @@ -21,3 +21,5 @@ # and place it in the APPLICATIONINSIGHTS_CONNECTION_STRING # environment variable. logger.addHandler(AzureLogHandler()) + +logger.warning("Hello World!") diff --git a/contrib/opencensus-ext-azure/examples/metrics/simple.py b/contrib/opencensus-ext-azure/examples/metrics/simple.py index 611d3b146..353843486 100644 --- a/contrib/opencensus-ext-azure/examples/metrics/simple.py +++ b/contrib/opencensus-ext-azure/examples/metrics/simple.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + from opencensus.ext.azure import metrics_exporter from opencensus.stats import aggregation as aggregation_module from opencensus.stats import measure as measure_module @@ -34,12 +36,12 @@ def main(): - # Enable metrics - # Set the interval in seconds in which you want to send metrics - # TODO: you need to specify the instrumentation key in a connection string - # and place it in the APPLICATIONINSIGHTS_CONNECTION_STRING - # environment variable. - exporter = metrics_exporter.new_metrics_exporter() + # Enable metrics. Set the interval in seconds to 60s, which is the time + # interval application insights aggregates your metrics + exporter = metrics_exporter.new_metrics_exporter( + connection_string=os.environ["APPLICATIONINSIGHTS_CONNECTION_STRING"], + export_interval=60, + ) view_manager.register_exporter(exporter) view_manager.register_view(CARROTS_VIEW) diff --git a/contrib/opencensus-ext-azure/examples/metrics/standard.py b/contrib/opencensus-ext-azure/examples/metrics/standard.py deleted file mode 100644 index 3d726385c..000000000 --- a/contrib/opencensus-ext-azure/examples/metrics/standard.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2019, OpenCensus Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import time - -import psutil - -from opencensus.ext.azure import metrics_exporter - - -def main(): - # TODO: you need to specify the instrumentation key in a connection string - # and place it in the APPLICATIONINSIGHTS_CONNECTION_STRING - # environment variable. - # All you need is the next line. You can disable standard metrics by - # passing in enable_standard_metrics=False into the constructor of - # new_metrics_exporter() - _exporter = metrics_exporter.new_metrics_exporter() - - print(_exporter.max_batch_size) - for i in range(100): - print(psutil.virtual_memory()) - time.sleep(5) - - print("Done recording metrics") - - -if __name__ == "__main__": - main() diff --git a/contrib/opencensus-ext-azure/examples/metrics/sum.py b/contrib/opencensus-ext-azure/examples/metrics/sum.py index 355c72954..27db12949 100644 --- a/contrib/opencensus-ext-azure/examples/metrics/sum.py +++ b/contrib/opencensus-ext-azure/examples/metrics/sum.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import time from opencensus.ext.azure import metrics_exporter @@ -36,12 +37,12 @@ def main(): - # Enable metrics - # Set the interval in seconds in which you want to send metrics - # TODO: you need to specify the instrumentation key in a connection string - # and place it in the APPLICATIONINSIGHTS_CONNECTION_STRING - # environment variable. - exporter = metrics_exporter.new_metrics_exporter() + # Enable metrics. Set the interval in seconds to 60s, which is the time + # interval application insights aggregates your metrics + exporter = metrics_exporter.new_metrics_exporter( + connection_string=os.environ["APPLICATIONINSIGHTS_CONNECTION_STRING"], + export_interval=60, + ) view_manager.register_exporter(exporter) view_manager.register_view(NUM_REQUESTS_VIEW) diff --git a/contrib/opencensus-ext-azure/examples/traces/django/logfile b/contrib/opencensus-ext-azure/examples/traces/django/logfile new file mode 100644 index 000000000..c58b7d762 --- /dev/null +++ b/contrib/opencensus-ext-azure/examples/traces/django/logfile @@ -0,0 +1,8 @@ +2022-10-26 15:48:57,700 INFO This is an INFO level log entry. +2022-10-26 15:48:57,700 WARNING This is a WARNING level log entry. +2022-10-26 15:48:57,701 ERROR This is an ERROR level log entry. +2022-10-26 15:48:57,702 CRITICAL This is a CRITICAL level log entry. +2022-10-26 16:10:22,849 INFO This is an INFO level log entry. +2022-10-26 16:10:22,850 WARNING This is a WARNING level log entry. +2022-10-26 16:10:22,850 ERROR This is an ERROR level log entry. +2022-10-26 16:10:22,850 CRITICAL This is a CRITICAL level log entry. diff --git a/contrib/opencensus-ext-azure/examples/traces/django/mysite/db.sqlite3 b/contrib/opencensus-ext-azure/examples/traces/django/mysite/db.sqlite3 new file mode 100644 index 000000000..e69de29bb diff --git a/contrib/opencensus-ext-azure/examples/traces/django/mysite/manage.py b/contrib/opencensus-ext-azure/examples/traces/django/mysite/manage.py new file mode 100644 index 000000000..538658793 --- /dev/null +++ b/contrib/opencensus-ext-azure/examples/traces/django/mysite/manage.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python + +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Django's command-line utility for administrative tasks.""" +import os +import sys + + +def main(): + """Run administrative tasks.""" + os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mysite.settings') + try: + from django.core.management import execute_from_command_line + except ImportError as exc: + raise ImportError( + "Couldn't import Django. Are you sure it's installed and " + "available on your PYTHONPATH environment variable? Did you " + "forget to activate a virtual environment?" + ) from exc + execute_from_command_line(sys.argv) + + +if __name__ == '__main__': + main() diff --git a/contrib/opencensus-ext-azure/examples/traces/django/mysite/mysite/__init__.py b/contrib/opencensus-ext-azure/examples/traces/django/mysite/mysite/__init__.py new file mode 100644 index 000000000..296af4941 --- /dev/null +++ b/contrib/opencensus-ext-azure/examples/traces/django/mysite/mysite/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/opencensus-ext-azure/examples/traces/django/mysite/mysite/asgi.py b/contrib/opencensus-ext-azure/examples/traces/django/mysite/mysite/asgi.py new file mode 100644 index 000000000..4d82b9f8c --- /dev/null +++ b/contrib/opencensus-ext-azure/examples/traces/django/mysite/mysite/asgi.py @@ -0,0 +1,29 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +ASGI config for mysite project. + +It exposes the ASGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/ +""" + +import os + +from django.core.asgi import get_asgi_application + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mysite.settings') + +application = get_asgi_application() diff --git a/contrib/opencensus-ext-azure/examples/traces/django/mysite/mysite/settings.py b/contrib/opencensus-ext-azure/examples/traces/django/mysite/mysite/settings.py new file mode 100644 index 000000000..da26367f2 --- /dev/null +++ b/contrib/opencensus-ext-azure/examples/traces/django/mysite/mysite/settings.py @@ -0,0 +1,184 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Django settings for mysite project. + +Generated by 'django-admin startproject' using Django 3.2.14. + +For more information on this file, see +https://docs.djangoproject.com/en/3.2/topics/settings/ + +For the full list of settings and their values, see +https://docs.djangoproject.com/en/3.2/ref/settings/ +""" + +from pathlib import Path + +# Build paths inside the project like this: BASE_DIR / 'subdir'. +BASE_DIR = Path(__file__).resolve().parent.parent + + +# Quick-start development settings - unsuitable for production +# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/ + +# SECURITY WARNING: keep the secret key used in production secret! +SECRET_KEY = 'secret_key_for_test' + +ALLOWED_HOSTS = ['*'] + +# Application definition + +INSTALLED_APPS = [ + 'django.contrib.admin', + 'django.contrib.auth', + 'django.contrib.contenttypes', + 'django.contrib.sessions', + 'django.contrib.messages', + 'django.contrib.staticfiles', +] + +MIDDLEWARE = [ + 'django.middleware.security.SecurityMiddleware', + 'django.contrib.sessions.middleware.SessionMiddleware', + 'django.middleware.common.CommonMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', + 'opencensus.ext.django.middleware.OpencensusMiddleware', +] + +MY_CONNECTION_STRING = "''" + +OPENCENSUS = { + 'TRACE': { + 'SAMPLER': 'opencensus.trace.samplers.ProbabilitySampler(rate=1.0)', + 'EXPORTER': 'opencensus.ext.azure.trace_exporter.AzureExporter(connection_string=' + MY_CONNECTION_STRING + ')', # noqa: E501 + } +} + +ROOT_URLCONF = 'mysite.urls' + +TEMPLATES = [ + { + 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'DIRS': [], + 'APP_DIRS': True, + 'OPTIONS': { + 'context_processors': [ + 'django.template.context_processors.debug', + 'django.template.context_processors.request', + 'django.contrib.auth.context_processors.auth', + 'django.contrib.messages.context_processors.messages', + ], + }, + }, +] + +WSGI_APPLICATION = 'mysite.wsgi.application' + + +# Database +# https://docs.djangoproject.com/en/3.2/ref/settings/#databases + +DATABASES = { + 'default': { + 'ENGINE': 'django.db.backends.sqlite3', + 'NAME': BASE_DIR / 'db.sqlite3', + } +} + + +# Password validation +# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators + +AUTH_PASSWORD_VALIDATORS = [ + { + 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', # noqa: E501 + }, + { + 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', # noqa: E501 + }, + { + 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', # noqa: E501 + }, + { + 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', # noqa: E501 + }, +] + + +# Internationalization +# https://docs.djangoproject.com/en/3.2/topics/i18n/ + +LANGUAGE_CODE = 'en-us' + +TIME_ZONE = 'UTC' + +USE_I18N = True + +USE_L10N = True + +USE_TZ = True + + +# Static files (CSS, JavaScript, Images) +# https://docs.djangoproject.com/en/3.2/howto/static-files/ + +STATIC_URL = '/static/' + +# Default primary key field type +# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field + +DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField' + +# SECURITY WARNING: don't run with debug turned on in production! +DEBUG = False +ALLOWED_HOSTS = ["*"] + +LOGGING = { + 'version': 1, + 'disable_existing_loggers': False, + 'formatters': { + 'timestamp': { + 'format': '{asctime} {levelname} {message}', + 'style': '{', + }, + }, + 'handlers': { + 'console': { + 'level': 'DEBUG', + 'class': 'logging.StreamHandler', + 'formatter': 'timestamp', + }, + 'logfile': { + 'level': 'DEBUG', + 'class': 'logging.FileHandler', + 'formatter': 'timestamp', + 'filename': str(BASE_DIR) + "/../logfile", + }, + 'azure': { + 'level': "DEBUG", + 'class': "opencensus.ext.azure.log_exporter.AzureLogHandler", + 'connection_string': MY_CONNECTION_STRING, + 'formatter': 'timestamp', + }, + }, + 'loggers': { + 'custom': { + 'level': 'INFO', + 'handlers': ['console', 'logfile', 'azure'] + } + } +} diff --git a/contrib/opencensus-ext-azure/examples/traces/django/mysite/mysite/urls.py b/contrib/opencensus-ext-azure/examples/traces/django/mysite/mysite/urls.py new file mode 100644 index 000000000..407f10f8b --- /dev/null +++ b/contrib/opencensus-ext-azure/examples/traces/django/mysite/mysite/urls.py @@ -0,0 +1,35 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""mysite URL Configuration + +The `urlpatterns` list routes URLs to views. For more information please see: + https://docs.djangoproject.com/en/3.2/topics/http/urls/ +Examples: +Function views + 1. Add an import: from my_app import views + 2. Add a URL to urlpatterns: path('', views.home, name='home') +Class-based views + 1. Add an import: from other_app.views import Home + 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') +Including another URLconf + 1. Import the include() function: from django.urls import include, path + 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) +""" +from django.contrib import admin +from django.urls import include, path + +urlpatterns = [ + path('admin/', admin.site.urls), + path('', include('polls.urls')), +] diff --git a/contrib/opencensus-ext-azure/examples/traces/django/mysite/mysite/wsgi.py b/contrib/opencensus-ext-azure/examples/traces/django/mysite/mysite/wsgi.py new file mode 100644 index 000000000..247a03089 --- /dev/null +++ b/contrib/opencensus-ext-azure/examples/traces/django/mysite/mysite/wsgi.py @@ -0,0 +1,29 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +WSGI config for mysite project. + +It exposes the WSGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/ +""" + +import os + +from django.core.wsgi import get_wsgi_application + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mysite.settings') + +application = get_wsgi_application() diff --git a/contrib/opencensus-ext-azure/examples/traces/django/mysite/polls/__init__.py b/contrib/opencensus-ext-azure/examples/traces/django/mysite/polls/__init__.py new file mode 100644 index 000000000..296af4941 --- /dev/null +++ b/contrib/opencensus-ext-azure/examples/traces/django/mysite/polls/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/opencensus-ext-azure/examples/traces/django/mysite/polls/admin.py b/contrib/opencensus-ext-azure/examples/traces/django/mysite/polls/admin.py new file mode 100644 index 000000000..296af4941 --- /dev/null +++ b/contrib/opencensus-ext-azure/examples/traces/django/mysite/polls/admin.py @@ -0,0 +1,13 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/opencensus-ext-azure/examples/traces/django/mysite/polls/apps.py b/contrib/opencensus-ext-azure/examples/traces/django/mysite/polls/apps.py new file mode 100644 index 000000000..41e62e385 --- /dev/null +++ b/contrib/opencensus-ext-azure/examples/traces/django/mysite/polls/apps.py @@ -0,0 +1,19 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from django.apps import AppConfig + + +class PollsConfig(AppConfig): + default_auto_field = 'django.db.models.BigAutoField' + name = 'polls' diff --git a/contrib/opencensus-ext-azure/examples/traces/django/mysite/polls/migrations/__init__.py b/contrib/opencensus-ext-azure/examples/traces/django/mysite/polls/migrations/__init__.py new file mode 100644 index 000000000..296af4941 --- /dev/null +++ b/contrib/opencensus-ext-azure/examples/traces/django/mysite/polls/migrations/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/opencensus-ext-azure/examples/traces/django/mysite/polls/models.py b/contrib/opencensus-ext-azure/examples/traces/django/mysite/polls/models.py new file mode 100644 index 000000000..296af4941 --- /dev/null +++ b/contrib/opencensus-ext-azure/examples/traces/django/mysite/polls/models.py @@ -0,0 +1,13 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/opencensus-ext-azure/examples/traces/django/mysite/polls/tests.py b/contrib/opencensus-ext-azure/examples/traces/django/mysite/polls/tests.py new file mode 100644 index 000000000..296af4941 --- /dev/null +++ b/contrib/opencensus-ext-azure/examples/traces/django/mysite/polls/tests.py @@ -0,0 +1,13 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/opencensus-ext-azure/examples/traces/django/mysite/polls/urls.py b/contrib/opencensus-ext-azure/examples/traces/django/mysite/polls/urls.py new file mode 100644 index 000000000..580d73bef --- /dev/null +++ b/contrib/opencensus-ext-azure/examples/traces/django/mysite/polls/urls.py @@ -0,0 +1,20 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from django.urls import path + +from . import views + +urlpatterns = [ + path('', views.index, name='index'), +] diff --git a/contrib/opencensus-ext-azure/examples/traces/django/mysite/polls/views.py b/contrib/opencensus-ext-azure/examples/traces/django/mysite/polls/views.py new file mode 100644 index 000000000..833f77d5e --- /dev/null +++ b/contrib/opencensus-ext-azure/examples/traces/django/mysite/polls/views.py @@ -0,0 +1,29 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging + +from django.http import HttpResponse + +# Logging configured through settings.LOGGING in settings.py +logger = logging.getLogger('custom') + + +# Distributed tracing configured through settings.OPENCENSUS in settings.py +def index(request): + logger.debug('This is a DEBUG level log entry.') + logger.info('This is an INFO level log entry.') + logger.warning('This is a WARNING level log entry.') + logger.error('This is an ERROR level log entry.') + logger.critical('This is a CRITICAL level log entry.') + return HttpResponse("Hello, world. You're at the polls index.") diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py index 9291730b2..96aa54ed3 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging import os import tempfile @@ -21,6 +22,8 @@ INSTRUMENTATION_KEY = 'instrumentationkey' TEMPDIR_PREFIX = "opencensus-python-" +_logger = logging.getLogger(__name__) + def process_options(options): # Connection string/ikey @@ -30,6 +33,13 @@ def process_options(options): os.getenv('APPLICATIONINSIGHTS_CONNECTION_STRING')) env_ikey = os.getenv('APPINSIGHTS_INSTRUMENTATIONKEY') + # Deprecation note about explicit instrumentation key usage + if (not code_cs and code_ikey) or (not env_cs and env_ikey): + _logger.warning( + "DeprecationWarning: Explicitly using instrumentation key is" + "deprecated. Please use a connection string instead." + ) + # The priority of which value takes on the instrumentation key is: # 1. Key from explicitly passed in connection string # 2. Key from explicitly passed in instrumentation key diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/storage.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/storage.py index 905d86dd0..46958f384 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/storage.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/storage.py @@ -196,7 +196,7 @@ def _check_storage_size(self): if size >= self.max_size: logger.warning( "Persistent storage max capacity has been " - "reached. Currently at %fKB. Telemetry will be " + "reached. Currently at %sKB. Telemetry will be " "lost. Please consider increasing the value of " "'storage_max_size' in exporter config.", format(size/1024) diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/transport.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/transport.py index b3fc75027..cf0cf622a 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/transport.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/transport.py @@ -14,7 +14,6 @@ import json import logging -import os import threading import time @@ -22,6 +21,8 @@ from azure.core.exceptions import ClientAuthenticationError from azure.identity._exceptions import CredentialUnavailableError +from opencensus.ext.azure.statsbeat import state + try: from urllib.parse import urlparse except ImportError: @@ -30,16 +31,48 @@ logger = logging.getLogger(__name__) + _MAX_CONSECUTIVE_REDIRECTS = 10 _MONITOR_OAUTH_SCOPE = "https://monitor.azure.com//.default" _requests_lock = threading.Lock() _requests_map = {} +_REACHED_INGESTION_STATUS_CODES = (200, 206, 402, 408, 429, 439, 500) +REDIRECT_STATUS_CODES = (307, 308) +RETRYABLE_STATUS_CODES = ( + 401, # Unauthorized + 403, # Forbidden + 408, # Request Timeout + 429, # Too Many Requests - retry after + 500, # Internal server error + 502, # Bad Gateway + 503, # Service unavailable + 504, # Gateway timeout +) +THROTTLE_STATUS_CODES = ( + 402, # Quota, too Many Requests over extended time + 439, # Quota, too Many Requests over extended time (legacy) +) + + +class TransportStatusCode: + SUCCESS = 0 + RETRY = 1 + DROP = 2 + STATSBEAT_SHUTDOWN = 3 class TransportMixin(object): + # check to see whether its the case of stats collection def _check_stats_collection(self): - return not os.environ.get("APPLICATIONINSIGHTS_STATSBEAT_DISABLED_ALL") and (not hasattr(self, '_is_stats') or not self._is_stats) # noqa: E501 + return state.is_statsbeat_enabled() and \ + not state.get_statsbeat_shutdown() and \ + not self._is_stats_exporter() + + # check if the current exporter is a statsbeat metric exporter + # only applies to metrics exporter + def _is_stats_exporter(self): + return hasattr(self, '_is_stats') and self._is_stats def _transmit_from_storage(self): if self.storage: @@ -49,7 +82,7 @@ def _transmit_from_storage(self): if blob.lease(self.options.timeout + 5): envelopes = blob.get() result = self._transmit(envelopes) - if result > 0: + if result is TransportStatusCode.RETRY: blob.lease(result) else: blob.delete() @@ -64,6 +97,7 @@ def _transmit(self, envelopes): """ if not envelopes: return 0 + status = None exception = None try: start_time = time.time() @@ -76,149 +110,133 @@ def _transmit(self, envelopes): token = self.options.credential.get_token(_MONITOR_OAUTH_SCOPE) headers["Authorization"] = "Bearer {}".format(token.token) endpoint += '/v2.1/track' - if self._check_stats_collection(): - with _requests_lock: - _requests_map['count'] = _requests_map.get('count', 0) + 1 # noqa: E501 + proxies = json.loads(self.options.proxies) + allow_redirects = len(proxies) != 0 + response = requests.post( url=endpoint, - data=json.dumps(envelopes), + data=json.dumps(envelopes, default=str), headers=headers, timeout=self.options.timeout, - proxies=json.loads(self.options.proxies), - allow_redirects=False, + proxies=proxies, + allow_redirects=allow_redirects, ) - except requests.Timeout: - logger.warning( - 'Request time out. Ingestion may be backed up. Retrying.') - exception = self.options.minimum_retry_interval + except requests.Timeout as ex: + if not self._is_stats_exporter(): + logger.warning( + 'Request time out. Ingestion may be backed up. Retrying.') + status = TransportStatusCode.RETRY + exception = ex except requests.RequestException as ex: - logger.warning( - 'Retrying due to transient client side error %s.', ex) - if self._check_stats_collection(): - with _requests_lock: - _requests_map['retry'] = _requests_map.get('retry', 0) + 1 # noqa: E501 - # client side error (retryable) - exception = self.options.minimum_retry_interval + if not self._is_stats_exporter(): + logger.warning( + 'Retrying due to transient client side error %s.', ex) + status = TransportStatusCode.RETRY + exception = ex except CredentialUnavailableError as ex: - logger.warning('Credential error. %s. Dropping telemetry.', ex) - exception = -1 + if not self._is_stats_exporter(): + logger.warning('Credential error. %s. Dropping telemetry.', ex) + status = TransportStatusCode.DROP + exception = ex except ClientAuthenticationError as ex: - logger.warning('Authentication error %s', ex) - exception = self.options.minimum_retry_interval + if not self._is_stats_exporter(): + logger.warning('Authentication error %s', ex) + status = TransportStatusCode.RETRY + exception = ex except Exception as ex: - logger.warning( - 'Error when sending request %s. Dropping telemetry.', ex) - if self._check_stats_collection(): - with _requests_lock: - _requests_map['exception'] = _requests_map.get('exception', 0) + 1 # noqa: E501 - # Extraneous error (non-retryable) - exception = -1 + if not self._is_stats_exporter(): + logger.warning( + 'Error when sending request %s. Dropping telemetry.', ex) + status = TransportStatusCode.DROP + exception = ex finally: + if self._check_stats_collection(): + _update_requests_map('count') end_time = time.time() if self._check_stats_collection(): - with _requests_lock: - duration = _requests_map.get('duration', 0) - _requests_map['duration'] = duration + (end_time - start_time) # noqa: E501 - if exception is not None: - return exception + _update_requests_map('duration', value=end_time-start_time) + + if status is not None and exception is not None: + if self._check_stats_collection(): + _update_requests_map('exception', value=exception.__class__.__name__) # noqa: E501 + return status + if self._is_stats_exporter() and \ + not state.get_statsbeat_shutdown() and \ + not state.get_statsbeat_initial_success(): + # If ingestion threshold during statsbeat initialization is + # reached, return back code to shut it down + if _statsbeat_failure_reached_threshold(): + return TransportStatusCode.STATSBEAT_SHUTDOWN text = 'N/A' - data = None + status_code = 0 try: text = response.text + status_code = response.status_code except Exception as ex: - logger.warning('Error while reading response body %s.', ex) - else: - try: - data = json.loads(text) - except Exception: - pass - if response.status_code == 200: + if not self._is_stats_exporter(): + logger.warning('Error while reading response body %s.', ex) + if self._check_stats_collection(): + _update_requests_map('exception', value=ex.__class__.__name__) + return TransportStatusCode.DROP + + if self._is_stats_exporter() and \ + not state.get_statsbeat_shutdown() and \ + not state.get_statsbeat_initial_success(): + # If statsbeat exporter, record initialization as success if + # appropriate status code is returned + if _reached_ingestion_status_code(status_code): + state.set_statsbeat_initial_success(True) + elif _statsbeat_failure_reached_threshold(): + # If ingestion threshold during statsbeat initialization is + # reached, return back code to shut it down + return TransportStatusCode.STATSBEAT_SHUTDOWN + + if status_code == 200: # Success self._consecutive_redirects = 0 if self._check_stats_collection(): - with _requests_lock: - _requests_map['success'] = _requests_map.get('success', 0) + 1 # noqa: E501 - return 0 - # Status code not 200 counts as failure - if self._check_stats_collection(): - with _requests_lock: - _requests_map['failure'] = _requests_map.get('failure', 0) + 1 # noqa: E501 - if response.status_code == 206: # Partial Content + _update_requests_map('success') + return TransportStatusCode.SUCCESS + elif status_code == 206: # Partial Content + data = None + try: + data = json.loads(text) + except Exception as ex: + if not self._is_stats_exporter(): + logger.warning('Error while reading response body %s for partial content.', ex) # noqa: E501 + if self._check_stats_collection(): + _update_requests_map('exception', value=ex.__class__.__name__) # noqa: E501 + return TransportStatusCode.DROP if data: try: resend_envelopes = [] for error in data['errors']: - if error['statusCode'] in ( - 429, # Too Many Requests - 500, # Internal Server Error - 503, # Service Unavailable - ): + if _status_code_is_retryable(error['statusCode']): resend_envelopes.append(envelopes[error['index']]) else: - logger.error( - 'Data drop %s: %s %s.', - error['statusCode'], - error['message'], - envelopes[error['index']], - ) - if resend_envelopes: + if not self._is_stats_exporter(): + logger.error( + 'Data drop %s: %s %s.', + error['statusCode'], + error['message'], + envelopes[error['index']], + ) + if self.storage and resend_envelopes: self.storage.put(resend_envelopes) except Exception as ex: - logger.error( - 'Error while processing %s: %s %s.', - response.status_code, - text, - ex, - ) - if self._check_stats_collection(): - with _requests_lock: - _requests_map['retry'] = _requests_map.get('retry', 0) + 1 # noqa: E501 - return -response.status_code + if not self._is_stats_exporter(): + logger.error( + 'Error while processing %s: %s %s.', + status_code, + text, + ex, + ) + if self._check_stats_collection(): + _update_requests_map('exception', value=ex.__class__.__name__) # noqa: E501 + return TransportStatusCode.DROP # cannot parse response body, fallback to retry - if response.status_code in ( - 206, # Partial Content - 429, # Too Many Requests - 500, # Internal Server Error - 503, # Service Unavailable - ): - logger.warning( - 'Transient server side error %s: %s.', - response.status_code, - text, - ) - # server side error (retryable) - if self._check_stats_collection(): - with _requests_lock: - _requests_map['retry'] = _requests_map.get('retry', 0) + 1 # noqa: E501 - if response.status_code == 429: - _requests_map['throttle'] = _requests_map.get('throttle', 0) + 1 # noqa: E501 - return self.options.minimum_retry_interval - # Authentication error - if response.status_code == 401: - logger.warning( - 'Authentication error %s: %s.', - response.status_code, - text, - ) - if self._check_stats_collection(): - with _requests_lock: - _requests_map['retry'] = _requests_map.get('retry', 0) + 1 # noqa: E501 - return self.options.minimum_retry_interval - # Forbidden error - # Can occur when v2 endpoint is used while AI resource is configured - # with disableLocalAuth - if response.status_code == 403: - logger.warning( - 'Forbidden error %s: %s.', - response.status_code, - text, - ) - if self._check_stats_collection(): - with _requests_lock: - _requests_map['retry'] = _requests_map.get('retry', 0) + 1 # noqa: E501 - return self.options.minimum_retry_interval - # Redirect - if response.status_code in (307, 308): + elif _status_code_is_redirect(status_code): # Redirect + # for statsbeat, these are not tracked as success nor failures self._consecutive_redirects += 1 if self._consecutive_redirects < _MAX_CONSECUTIVE_REDIRECTS: if response.headers: @@ -230,17 +248,108 @@ def _transmit(self, envelopes): self.options.endpoint = "{}://{}".format(url.scheme, url.netloc) # noqa: E501 # Attempt to export again return self._transmit(envelopes) + if not self._is_stats_exporter(): + logger.error( + "Error parsing redirect information." + ) + else: + if not self._is_stats_exporter(): + logger.error( + "Error sending telemetry because of circular redirects." # noqa: E501 + " Please check the integrity of your connection string." # noqa: E501 + ) + # If redirect but did not return, exception occured + if self._check_stats_collection(): + _update_requests_map('exception', value="Circular Redirect") + return TransportStatusCode.DROP + elif _status_code_is_throttle(status_code): # Throttle + if self._check_stats_collection(): + # 402: Monthly Quota Exceeded (new SDK) + # 439: Monthly Quota Exceeded (old SDK) <- Currently OC SDK + _update_requests_map('throttle', value=status_code) + if not self._is_stats_exporter(): + logger.warning( + 'Telemetry was throttled %s: %s.', + status_code, + text, + ) + return TransportStatusCode.DROP + elif _status_code_is_retryable(status_code): # Retry + if not self._is_stats_exporter(): + if status_code == 401: # Authentication error + logger.warning( + 'Authentication error %s: %s. Retrying.', + status_code, + text, + ) + elif status_code == 403: + # Forbidden error + # Can occur when v2 endpoint is used while AI resource is configured # noqa: E501 + # with disableLocalAuth + logger.warning( + 'Forbidden error %s: %s. Retrying.', + status_code, + text, + ) + else: + logger.warning( + 'Transient server side error %s: %s. Retrying.', + status_code, + text, + ) + if self._check_stats_collection(): + _update_requests_map('retry', value=status_code) + return TransportStatusCode.RETRY + else: + # 400 and 404 will be tracked as failure count + # 400 - Invalid - The server cannot or will not process the request due to the invalid telemetry (invalid data, iKey) # noqa: E501 + # 404 - Ingestion is allowed only from stamp specific endpoint - must update connection string # noqa: E501 + if self._check_stats_collection(): + _update_requests_map('failure', value=status_code) + # Other, server side error (non-retryable) + if not self._is_stats_exporter(): logger.error( - "Error parsing redirect information." + 'Non-retryable server side error %s: %s.', + status_code, + text, ) - logger.error( - "Error sending telemetry because of circular redirects." - " Please check the integrity of your connection string." - ) - logger.error( - 'Non-retryable server side error %s: %s.', - response.status_code, - text, - ) - # server side error (non-retryable) - return -response.status_code + return TransportStatusCode.DROP + + +def _status_code_is_redirect(status_code): + return status_code in REDIRECT_STATUS_CODES + + +def _status_code_is_throttle(status_code): + return status_code in THROTTLE_STATUS_CODES + + +def _status_code_is_retryable(status_code): + return status_code in RETRYABLE_STATUS_CODES + + +def _reached_ingestion_status_code(status_code): + return status_code in _REACHED_INGESTION_STATUS_CODES + + +def _statsbeat_failure_reached_threshold(): + # increment failure counter for sending statsbeat if in initialization + state.increment_statsbeat_initial_failure_count() + return state.get_statsbeat_initial_failure_count() >= 3 + + +def _update_requests_map(type_name, value=None): + # value is either None, duration, status_code or exc_name + with _requests_lock: + if type_name == "success" or type_name == "count": # success, count + _requests_map[type_name] = _requests_map.get(type_name, 0) + 1 + elif type_name == "duration": # value will be duration + _requests_map[type_name] = _requests_map.get(type_name, 0) + value # noqa: E501 + else: # exception, failure, retry, throttle + # value will be a key (status_code/exc_name) + prev = 0 + if _requests_map.get(type_name): + prev = _requests_map.get(type_name).get(value, 0) + else: + _requests_map[type_name] = {} + _requests_map[type_name][value] = prev + 1 diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/version.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/version.py index c07ddbe9c..5ef12b3ba 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/version.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '1.2.dev0' +__version__ = '1.1.dev0' diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py index 4dbc384da..14e228d06 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py @@ -12,9 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import atexit import logging -import os import random import threading import time @@ -31,8 +29,11 @@ Message, ) from opencensus.ext.azure.common.storage import LocalFileStorage -from opencensus.ext.azure.common.transport import TransportMixin -from opencensus.ext.azure.metrics_exporter import statsbeat_metrics +from opencensus.ext.azure.common.transport import ( + TransportMixin, + TransportStatusCode, +) +from opencensus.ext.azure.statsbeat import statsbeat from opencensus.trace import execution_context logger = logging.getLogger(__name__) @@ -64,10 +65,6 @@ def __init__(self, **options): self._queue = Queue(capacity=self.options.queue_capacity) self._worker = Worker(self._queue, self) self._worker.start() - atexit.register(self.close, self.options.grace_period) - # start statsbeat on exporter instantiation - if not os.environ.get("APPLICATIONINSIGHTS_STATSBEAT_DISABLED_ALL"): - statsbeat_metrics.collect_statsbeat_metrics(self.options) # For redirects self._consecutive_redirects = 0 # To prevent circular redirects @@ -78,23 +75,32 @@ def _export(self, batch, event=None): # pragma: NO COVER envelopes = self.apply_telemetry_processors(envelopes) result = self._transmit(envelopes) # Only store files if local storage enabled - if self.storage and result > 0: - self.storage.put(envelopes, result) - if event: - if isinstance(event, QueueExitEvent): - self._transmit_from_storage() # send files before exit - return - if len(batch) < self.options.max_batch_size: - self._transmit_from_storage() + if self.storage: + if result is TransportStatusCode.RETRY: + self.storage.put( + envelopes, + self.options.minimum_retry_interval, + ) + if result is TransportStatusCode.SUCCESS: + if len(batch) < self.options.max_batch_size: + self._transmit_from_storage() + if event: + if isinstance(event, QueueExitEvent): + # send files before exit + self._transmit_from_storage() finally: if event: event.set() + # Close is automatically called as part of logging shutdown def close(self, timeout=None): - if self.storage: + if not timeout and hasattr(self, "options"): + timeout = self.options.grace_period + if hasattr(self, "storage") and self.storage: self.storage.close() - if self._worker: + if hasattr(self, "_worker") and self._worker: self._worker.stop(timeout) + super(BaseLogHandler, self).close() def createLock(self): self.lock = None @@ -105,7 +111,22 @@ def emit(self, record): def log_record_to_envelope(self, record): raise NotImplementedError # pragma: NO COVER + # Flush is automatically called as part of logging shutdown def flush(self, timeout=None): + if not hasattr(self, "_queue") or self._queue.is_empty(): + return + + # We must check the worker thread is alive, because otherwise flush + # is useless. Also, it would deadlock if no timeout is given, and the + # queue isn't empty. + # This is a very possible scenario during process termination, when + # atexit first calls handler.close() and then logging.shutdown(), + # that in turn calls handler.flush() without arguments. + if not self._worker.is_alive(): + logger.warning("Can't flush %s, worker thread is dead. " + "Any pending messages will be lost.", self) + return + self._queue.flush(timeout=timeout) @@ -163,9 +184,15 @@ def filter(self, record): return random.random() < self.probability -class AzureLogHandler(TransportMixin, ProcessorMixin, BaseLogHandler): +class AzureLogHandler(BaseLogHandler, TransportMixin, ProcessorMixin): """Handler for logging to Microsoft Azure Monitor.""" + def __init__(self, **options): + super(AzureLogHandler, self).__init__(**options) + # start statsbeat on exporter instantiation + if self._check_stats_collection(): + statsbeat.collect_statsbeat_metrics(self.options) + def log_record_to_envelope(self, record): envelope = create_envelope(self.options.instrumentation_key, record) @@ -204,6 +231,11 @@ def log_record_to_envelope(self, record): if exctype is not None: exc_type = exctype.__name__ + if not exc_type: + exc_type = "Exception" + if not message: + message = "Exception" + envelope.name = 'Microsoft.ApplicationInsights.Exception' data = ExceptionData( @@ -233,6 +265,12 @@ def log_record_to_envelope(self, record): class AzureEventHandler(TransportMixin, ProcessorMixin, BaseLogHandler): """Handler for sending custom events to Microsoft Azure Monitor.""" + def __init__(self, **options): + super(AzureEventHandler, self).__init__(**options) + # start statsbeat on exporter instantiation + if self._check_stats_collection(): + statsbeat.collect_statsbeat_metrics(self.options) + def log_record_to_envelope(self, record): envelope = create_envelope(self.options.instrumentation_key, record) @@ -241,10 +279,16 @@ def log_record_to_envelope(self, record): isinstance(record.custom_dimensions, dict)): properties.update(record.custom_dimensions) + measurements = {} + if (hasattr(record, 'custom_measurements') and + isinstance(record.custom_measurements, dict)): + measurements.update(record.custom_measurements) + envelope.name = 'Microsoft.ApplicationInsights.Event' data = Event( name=self.format(record), properties=properties, + measurements=measurements, ) envelope.data = Data(baseData=data, baseType='EventData') diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py index 59c82b002..a92354138 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py @@ -13,8 +13,6 @@ # limitations under the License. import atexit -import logging -import os from opencensus.common import utils as common_utils from opencensus.ext.azure.common import Options, utils @@ -26,16 +24,20 @@ MetricData, ) from opencensus.ext.azure.common.storage import LocalFileStorage -from opencensus.ext.azure.common.transport import TransportMixin +from opencensus.ext.azure.common.transport import ( + TransportMixin, + TransportStatusCode, +) from opencensus.ext.azure.metrics_exporter import standard_metrics +from opencensus.ext.azure.statsbeat.statsbeat_metrics import ( + _NETWORK_STATSBEAT_NAMES, +) from opencensus.metrics import transport from opencensus.metrics.export.metric_descriptor import MetricDescriptorType from opencensus.stats import stats as stats_module __all__ = ['MetricsExporter', 'new_metrics_exporter'] -logger = logging.getLogger(__name__) - class MetricsExporter(TransportMixin, ProcessorMixin): """Metrics exporter for Microsoft Azure Monitor.""" @@ -74,14 +76,21 @@ def export_metrics(self, metrics): for batch in batched_envelopes: batch = self.apply_telemetry_processors(batch) result = self._transmit(batch) + # If statsbeat exporter and received signal to shutdown + if self._is_stats and result is \ + TransportStatusCode.STATSBEAT_SHUTDOWN: + from opencensus.ext.azure.statsbeat import statsbeat + statsbeat.shutdown_statsbeat_metrics() + return # Only store files if local storage enabled - if self.storage and result > 0: - self.storage.put(batch, result) - - # If there is still room to transmit envelopes, transmit from storage - # if available - if len(envelopes) < self.options.max_batch_size: - self._transmit_from_storage() + if self.storage: + if result is TransportStatusCode.RETRY: + self.storage.put(batch, self.options.minimum_retry_interval) # noqa: E501 + if result is TransportStatusCode.SUCCESS: + # If there is still room to transmit envelopes, + # transmit from storage if available + if len(envelopes) < self.options.max_batch_size: + self._transmit_from_storage() def metric_to_envelopes(self, metric): envelopes = [] @@ -92,41 +101,47 @@ def metric_to_envelopes(self, metric): # Each time series will be uniquely identified by its # label values for time_series in metric.time_series: - # Using stats, time_series should only have one - # point which contains the aggregated value - data_point = self._create_data_points( - time_series, md)[0] + # time_series should only have one point which + # contains the aggregated value + # time_series point list is never empty + point = time_series.points[0] + # we ignore None and 0 values for network statsbeats + if self._is_stats_exporter(): + if md.name in _NETWORK_STATSBEAT_NAMES: + if not point.value.value: + continue + data_point = DataPoint( + ns=md.name, + name=md.name, + value=point.value.value + ) # The timestamp is when the metric was recorded - timestamp = time_series.points[0].timestamp + timestamp = point.timestamp # Get the properties using label keys from metric # and label values of the time series - properties = self._create_properties(time_series, md) - envelopes.append(self._create_envelope(data_point, - timestamp, - properties)) + properties = self._create_properties( + time_series, + md.label_keys + ) + envelopes.append( + self._create_envelope( + data_point, + timestamp, + properties + ) + ) return envelopes - def _create_data_points(self, time_series, metric_descriptor): - """Convert a metric's OC time series to list of Azure data points.""" - data_points = [] - for point in time_series.points: - # TODO: Possibly encode namespace in name - data_point = DataPoint(ns=metric_descriptor.name, - name=metric_descriptor.name, - value=point.value.value) - data_points.append(data_point) - return data_points - - def _create_properties(self, time_series, metric_descriptor): + def _create_properties(self, time_series, label_keys): properties = {} # We construct a properties map from the label keys and values. We # assume the ordering is already correct - for i in range(len(metric_descriptor.label_keys)): + for i in range(len(label_keys)): if time_series.label_values[i].value is None: value = "null" else: value = time_series.label_values[i].value - properties[metric_descriptor.label_keys[i].key] = value + properties[label_keys[i].key] = value return properties def _create_envelope(self, data_point, timestamp, properties): @@ -147,10 +162,12 @@ def _create_envelope(self, data_point, timestamp, properties): return envelope def shutdown(self): - # Flush the exporter thread - # Do not flush if metrics exporter for stats - if self.exporter_thread and not self._is_stats: - self.exporter_thread.close() + if self.exporter_thread: + # flush if metrics exporter is not for stats + if not self._is_stats: + self.exporter_thread.close() + else: + self.exporter_thread.cancel() # Shutsdown storage worker if self.storage: self.storage.close() @@ -165,8 +182,9 @@ def new_metrics_exporter(**options): producers, exporter, interval=exporter.options.export_interval) - if not os.environ.get("APPLICATIONINSIGHTS_STATSBEAT_DISABLED_ALL"): - from opencensus.ext.azure.metrics_exporter import statsbeat_metrics - # Stats will track the user's ikey - statsbeat_metrics.collect_statsbeat_metrics(exporter.options) + # start statsbeat on exporter instantiation + if exporter._check_stats_collection(): + # Import here to avoid circular dependencies + from opencensus.ext.azure.statsbeat import statsbeat + statsbeat.collect_statsbeat_metrics(exporter.options) return exporter diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/statsbeat/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/statsbeat/__init__.py new file mode 100644 index 000000000..69e3be50d --- /dev/null +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/statsbeat/__init__.py @@ -0,0 +1 @@ +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/statsbeat/state.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/statsbeat/state.py new file mode 100644 index 000000000..84ab6c71a --- /dev/null +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/statsbeat/state.py @@ -0,0 +1,50 @@ +# Copyright 2020, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import threading + +_STATSBEAT_STATE = { + "INITIAL_FAILURE_COUNT": 0, + "INITIAL_SUCCESS": False, + "SHUTDOWN": False, +} +_STATSBEAT_STATE_LOCK = threading.Lock() + + +def is_statsbeat_enabled(): + disabled = os.environ.get("APPLICATIONINSIGHTS_STATSBEAT_DISABLED_ALL") + return disabled is None or disabled.lower() != "true" + + +def increment_statsbeat_initial_failure_count(): + with _STATSBEAT_STATE_LOCK: + _STATSBEAT_STATE["INITIAL_FAILURE_COUNT"] += 1 + + +def get_statsbeat_initial_failure_count(): + return _STATSBEAT_STATE["INITIAL_FAILURE_COUNT"] + + +def set_statsbeat_initial_success(success): + with _STATSBEAT_STATE_LOCK: + _STATSBEAT_STATE["INITIAL_SUCCESS"] = success + + +def get_statsbeat_initial_success(): + return _STATSBEAT_STATE["INITIAL_SUCCESS"] + + +def get_statsbeat_shutdown(): + return _STATSBEAT_STATE["SHUTDOWN"] diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/statsbeat_metrics/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/statsbeat/statsbeat.py similarity index 54% rename from contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/statsbeat_metrics/__init__.py rename to contrib/opencensus-ext-azure/opencensus/ext/azure/statsbeat/statsbeat.py index 281138aea..f0cfeed6a 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/statsbeat_metrics/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/statsbeat/statsbeat.py @@ -11,30 +11,39 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - import threading from opencensus.ext.azure.metrics_exporter import MetricsExporter -from opencensus.ext.azure.metrics_exporter.statsbeat_metrics.statsbeat import ( - _STATS_CONNECTION_STRING, +from opencensus.ext.azure.statsbeat.state import ( + _STATSBEAT_STATE, + _STATSBEAT_STATE_LOCK, +) +from opencensus.ext.azure.statsbeat.statsbeat_metrics import ( _STATS_SHORT_EXPORT_INTERVAL, + _get_stats_connection_string, _StatsbeatMetrics, ) from opencensus.metrics import transport from opencensus.metrics.export.metric_producer import MetricProducer +from opencensus.trace import execution_context _STATSBEAT_METRICS = None +_STATSBEAT_EXPORTER = None _STATSBEAT_LOCK = threading.Lock() def collect_statsbeat_metrics(options): - with _STATSBEAT_LOCK: - # Only start statsbeat if did not exist before - global _STATSBEAT_METRICS # pylint: disable=global-statement - if _STATSBEAT_METRICS is None: + # pylint: disable=global-statement + global _STATSBEAT_METRICS + global _STATSBEAT_EXPORTER + # Only start statsbeat if did not exist before + if _STATSBEAT_METRICS is None and _STATSBEAT_EXPORTER is None: + with _STATSBEAT_LOCK: + # Only start statsbeat if did not exist before exporter = MetricsExporter( is_stats=True, - connection_string=_STATS_CONNECTION_STRING, + connection_string=_get_stats_connection_string(options.endpoint), # noqa: E501 + enable_local_storage=options.enable_local_storage, enable_standard_metrics=False, export_interval=_STATS_SHORT_EXPORT_INTERVAL, # 15m by default ) @@ -42,11 +51,37 @@ def collect_statsbeat_metrics(options): producer = _AzureStatsbeatMetricsProducer(options) _STATSBEAT_METRICS = producer # Export some initial stats on program start + execution_context.set_is_exporter(True) exporter.export_metrics(_STATSBEAT_METRICS.get_initial_metrics()) + execution_context.set_is_exporter(False) exporter.exporter_thread = \ transport.get_exporter_thread([_STATSBEAT_METRICS], exporter, exporter.options.export_interval) + _STATSBEAT_EXPORTER = exporter + with _STATSBEAT_STATE_LOCK: + _STATSBEAT_STATE["INITIAL_FAILURE_COUNT"] = 0 + _STATSBEAT_STATE["INITIAL_SUCCESS"] = 0 + _STATSBEAT_STATE["SHUTDOWN"] = False + + +def shutdown_statsbeat_metrics(): + # pylint: disable=global-statement + global _STATSBEAT_METRICS + global _STATSBEAT_EXPORTER + shutdown_success = False + if _STATSBEAT_METRICS is not None and _STATSBEAT_EXPORTER is not None and not _STATSBEAT_STATE["SHUTDOWN"]: # noqa: E501 + with _STATSBEAT_LOCK: + try: + _STATSBEAT_EXPORTER.shutdown() + _STATSBEAT_EXPORTER = None + _STATSBEAT_METRICS = None + shutdown_success = True + except: # pylint: disable=broad-except # noqa: E722 + pass + if shutdown_success: + with _STATSBEAT_STATE_LOCK: + _STATSBEAT_STATE["SHUTDOWN"] = True class _AzureStatsbeatMetricsProducer(MetricProducer): diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/statsbeat_metrics/statsbeat.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/statsbeat/statsbeat_metrics.py similarity index 67% rename from contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/statsbeat_metrics/statsbeat.py rename to contrib/opencensus-ext-azure/opencensus/ext/azure/statsbeat/statsbeat_metrics.py index 8dd13fc7e..712efecaf 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/statsbeat_metrics/statsbeat.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/statsbeat/statsbeat_metrics.py @@ -14,9 +14,9 @@ import datetime import json -import logging import os import platform +import re import threading import requests @@ -30,29 +30,53 @@ ) from opencensus.metrics.label_key import LabelKey from opencensus.metrics.label_value import LabelValue -from opencensus.trace.integrations import get_integrations +from opencensus.trace.integrations import _Integrations, get_integrations _AIMS_URI = "http://169.254.169.254/metadata/instance/compute" _AIMS_API_VERSION = "api-version=2017-12-01" _AIMS_FORMAT = "format=json" -_DEFAULT_STATS_CONNECTION_STRING = "InstrumentationKey=c4a29126-a7cb-47e5-b348-11414998b11e;IngestionEndpoint=https://westus-0.in.applicationinsights.azure.com/" # noqa: E501 +_DEFAULT_NON_EU_STATS_CONNECTION_STRING = "InstrumentationKey=c4a29126-a7cb-47e5-b348-11414998b11e;IngestionEndpoint=https://westus-0.in.applicationinsights.azure.com/" # noqa: E501 +_DEFAULT_EU_STATS_CONNECTION_STRING = "InstrumentationKey=7dc56bab-3c0c-4e9f-9ebb-d1acadee8d0f;IngestionEndpoint=https://westeurope-5.in.applicationinsights.azure.com/" # noqa: E501 _DEFAULT_STATS_SHORT_EXPORT_INTERVAL = 900 # 15 minutes _DEFAULT_STATS_LONG_EXPORT_INTERVAL = 86400 # 24 hours +_EU_ENDPOINTS = [ + "westeurope", + "northeurope", + "francecentral", + "francesouth", + "germanywestcentral", + "norwayeast", + "norwaywest", + "swedencentral", + "switzerlandnorth", + "switzerlandwest", + "uksouth", + "ukwest", +] _ATTACH_METRIC_NAME = "Attach" _FEATURE_METRIC_NAME = "Feature" -_REQ_SUC_COUNT_NAME = "Request Success Count" -_REQ_FAIL_COUNT_NAME = "Request Failure Count" +_REQ_SUCCESS_NAME = "Request Success Count" +_REQ_FAILURE_NAME = "Request Failure Count" _REQ_DURATION_NAME = "Request Duration" _REQ_RETRY_NAME = "Retry Count" _REQ_THROTTLE_NAME = "Throttle Count" _REQ_EXCEPTION_NAME = "Exception Count" +_NETWORK_STATSBEAT_NAMES = ( + _REQ_SUCCESS_NAME, + _REQ_FAILURE_NAME, + _REQ_DURATION_NAME, + _REQ_RETRY_NAME, + _REQ_THROTTLE_NAME, + _REQ_EXCEPTION_NAME, +) + _ENDPOINT_TYPES = ["breeze"] _RP_NAMES = ["appsvc", "functions", "vm", "unknown"] -_logger = logging.getLogger(__name__) +_HOST_PATTERN = re.compile('^https?://(?:www\\.)?([^/.]+)') class _FEATURE_TYPES: @@ -66,12 +90,16 @@ class _StatsbeatFeature: AAD = 2 -def _get_stats_connection_string(): +def _get_stats_connection_string(endpoint): cs_env = os.environ.get("APPLICATION_INSIGHTS_STATS_CONNECTION_STRING") if cs_env: return cs_env else: - return _DEFAULT_STATS_CONNECTION_STRING + for ep in _EU_ENDPOINTS: + if ep in endpoint: + # Use statsbeat EU endpoint if user is in EU region + return _DEFAULT_EU_STATS_CONNECTION_STRING + return _DEFAULT_NON_EU_STATS_CONNECTION_STRING def _get_stats_short_export_interval(): @@ -90,7 +118,6 @@ def _get_stats_long_export_interval(): return _DEFAULT_STATS_LONG_EXPORT_INTERVAL -_STATS_CONNECTION_STRING = _get_stats_connection_string() _STATS_SHORT_EXPORT_INTERVAL = _get_stats_short_export_interval() _STATS_LONG_EXPORT_INTERVAL = _get_stats_long_export_interval() _STATS_LONG_INTERVAL_THRESHOLD = _STATS_LONG_EXPORT_INTERVAL / _STATS_SHORT_EXPORT_INTERVAL # noqa: E501 @@ -104,7 +131,7 @@ def _get_common_properties(): properties.append(LabelKey("cikey", 'customer ikey')) properties.append(LabelKey("runtimeVersion", 'Python version')) properties.append(LabelKey("os", 'os of application being instrumented')) - properties.append(LabelKey("language", 'Python')) + properties.append(LabelKey("language", 'python')) properties.append(LabelKey("version", 'sdkVersion - version of the ext')) return properties @@ -115,10 +142,14 @@ def _get_attach_properties(): return properties -def _get_network_properties(): +def _get_network_properties(value=None): properties = _get_common_properties() properties.append(LabelKey("endpoint", "ingestion endpoint type")) properties.append(LabelKey("host", "destination of ingestion endpoint")) + if value is None: + properties.append(LabelKey("statusCode", "ingestion service response code")) # noqa: E501 + elif value == "Exception": + properties.append(LabelKey("exceptionType", "language specific exception type")) # noqa: E501 return properties @@ -131,27 +162,27 @@ def _get_feature_properties(): def _get_success_count_value(): with _requests_lock: - interval_count = _requests_map.get('success', 0) \ - - _requests_map.get('last_success', 0) - _requests_map['last_success'] = _requests_map.get('success', 0) + interval_count = _requests_map.get('success', 0) + _requests_map['success'] = 0 return interval_count -def _get_failure_count_value(): - with _requests_lock: - interval_count = _requests_map.get('failure', 0) \ - - _requests_map.get('last_failure', 0) - _requests_map['last_failure'] = _requests_map.get('failure', 0) - return interval_count +def _get_failure_count_value(status_code): + interval_count = 0 + if status_code: + with _requests_lock: + if _requests_map.get('failure'): + interval_count = _requests_map.get('failure').get(status_code, 0) # noqa: E501 + _requests_map['failure'][status_code] = 0 + return interval_count def _get_average_duration_value(): with _requests_lock: - interval_duration = _requests_map.get('duration', 0) \ - - _requests_map.get('last_duration', 0) - interval_count = _requests_map.get('count', 0) \ - - _requests_map.get('last_count', 0) - _requests_map['last_duration'] = _requests_map.get('duration', 0) + interval_duration = _requests_map.get('duration', 0) + interval_count = _requests_map.get('count', 0) + _requests_map['duration'] = 0 + _requests_map['count'] = 0 if interval_duration > 0 and interval_count > 0: result = interval_duration / interval_count # Convert to milliseconds @@ -159,28 +190,43 @@ def _get_average_duration_value(): return 0 -def _get_retry_count_value(): - with _requests_lock: - interval_count = _requests_map.get('retry', 0) \ - - _requests_map.get('last_retry', 0) - _requests_map['last_retry'] = _requests_map.get('retry', 0) - return interval_count +def _get_retry_count_value(status_code): + interval_count = 0 + if status_code: + with _requests_lock: + if _requests_map.get('retry'): + interval_count = _requests_map.get('retry').get(status_code, 0) + _requests_map['retry'][status_code] = 0 + return interval_count -def _get_throttle_count_value(): - with _requests_lock: - interval_count = _requests_map.get('throttle', 0) \ - - _requests_map.get('last_throttle', 0) - _requests_map['last_throttle'] = _requests_map.get('throttle', 0) - return interval_count +def _get_throttle_count_value(status_code): + interval_count = 0 + if status_code: + with _requests_lock: + if _requests_map.get('throttle'): + interval_count = _requests_map.get('throttle').get(status_code, 0) # noqa: E501 + _requests_map['throttle'][status_code] = 0 + return interval_count -def _get_exception_count_value(): - with _requests_lock: - interval_count = _requests_map.get('exception', 0) \ - - _requests_map.get('last_exception', 0) - _requests_map['last_exception'] = _requests_map.get('exception', 0) - return interval_count +def _get_exception_count_value(exc_type): + interval_count = 0 + if exc_type: + with _requests_lock: + if _requests_map.get('exception'): + interval_count = _requests_map.get('exception').get(exc_type, 0) # noqa: E501 + _requests_map['exception'][exc_type] = 0 + return interval_count + + +def _shorten_host(host): + if not host: + host = "" + match = _HOST_PATTERN.match(host) + if match: + host = match.group(1) + return host class _StatsbeatMetrics: @@ -212,13 +258,13 @@ def __init__(self, options): # Map of gauge function -> metric # Gauge function is the callback used to populate the metric value self._network_metrics[_get_success_count_value] = DerivedLongGauge( - _REQ_SUC_COUNT_NAME, + _REQ_SUCCESS_NAME, 'Statsbeat metric tracking request success count', 'count', _get_network_properties(), ) self._network_metrics[_get_failure_count_value] = DerivedLongGauge( - _REQ_FAIL_COUNT_NAME, + _REQ_FAILURE_NAME, 'Statsbeat metric tracking request failure count', 'count', _get_network_properties(), @@ -226,8 +272,8 @@ def __init__(self, options): self._network_metrics[_get_average_duration_value] = DerivedDoubleGauge( # noqa: E501 _REQ_DURATION_NAME, 'Statsbeat metric tracking average request duration', - 'count', - _get_network_properties(), + 'avg', + _get_network_properties(value="Duration"), ) self._network_metrics[_get_retry_count_value] = DerivedLongGauge( _REQ_RETRY_NAME, @@ -245,7 +291,7 @@ def __init__(self, options): _REQ_EXCEPTION_NAME, 'Statsbeat metric tracking request exception count', 'count', - _get_network_properties(), + _get_network_properties(value="Exception"), ) # feature/instrumentation metrics # metrics related to what features and instrumentations are enabled @@ -293,28 +339,57 @@ def get_metrics(self): self._long_threshold_count = 0 network_metrics = self._get_network_metrics() metrics.extend(network_metrics) - except Exception as ex: - _logger.warning('Error while exporting stats metrics %s.', ex) + except Exception: + pass return metrics def _get_network_metrics(self): properties = self._get_common_properties() properties.append(LabelValue(_ENDPOINT_TYPES[0])) # endpoint - properties.append(LabelValue(self._options.endpoint)) # host + host = _shorten_host(self._options.endpoint) + properties.append(LabelValue(host)) # host metrics = [] for fn, metric in self._network_metrics.items(): - # NOTE: A time series is a set of unique label values - # If the label values ever change, a separate time series will be - # created, however, `_get_properties()` should never change - metric.create_time_series(properties, fn) + if metric.descriptor.name == _REQ_SUCCESS_NAME: + properties.append(LabelValue(200)) + metric.create_time_series(properties, fn) + properties.pop() + elif metric.descriptor.name == _REQ_FAILURE_NAME: + for code in _requests_map.get('failure', {}).keys(): + properties.append(LabelValue(code)) + metric.create_time_series(properties, fn, status_code=code) + properties.pop() + elif metric.descriptor.name == _REQ_DURATION_NAME: + metric.create_time_series(properties, fn) + elif metric.descriptor.name == _REQ_RETRY_NAME: + for code in _requests_map.get('retry', {}).keys(): + properties.append(LabelValue(code)) + metric.create_time_series(properties, fn, status_code=code) + properties.pop() + elif metric.descriptor.name == _REQ_THROTTLE_NAME: + for code in _requests_map.get('throttle', {}).keys(): + properties.append(LabelValue(code)) + metric.create_time_series(properties, fn, status_code=code) + properties.pop() + elif metric.descriptor.name == _REQ_EXCEPTION_NAME: + for exc_type in _requests_map.get('exception', {}).keys(): + properties.append(LabelValue(exc_type)) + metric.create_time_series(properties, fn, exc_type=exc_type) # noqa: E501 + properties.pop() + stats_metric = metric.get_metric(datetime.datetime.utcnow()) - # Don't export if value is 0 - if stats_metric.time_series[0].points[0].value.value != 0: + # metric will be None if status_code or exc_type is invalid + # for success count, this will never be None + if stats_metric is not None: + # we handle not exporting of None and 0 values in the exporter metrics.append(stats_metric) return metrics def _get_feature_metric(self): + # Don't export if feature list is None + if self._feature is _StatsbeatFeature.NONE: + return None properties = self._get_common_properties() properties.insert(4, LabelValue(self._feature)) # feature long properties.insert(4, LabelValue(_FEATURE_TYPES.FEATURE)) # type @@ -322,6 +397,10 @@ def _get_feature_metric(self): return self._feature_metric.get_metric(datetime.datetime.utcnow()) def _get_instrumentation_metric(self): + integrations = get_integrations() + # Don't export if instrumentation list is None + if integrations is _Integrations.NONE: + return None properties = self._get_common_properties() properties.insert(4, LabelValue(get_integrations())) # instr long properties.insert(4, LabelValue(_FEATURE_TYPES.INSTRUMENTATION)) # type # noqa: E501 @@ -333,17 +412,17 @@ def _get_attach_metric(self): rp = '' rpId = '' # rp, rpId - if os.environ.get("WEBSITE_SITE_NAME") is not None: + if os.environ.get("FUNCTIONS_WORKER_RUNTIME") is not None: + # Function apps + rp = _RP_NAMES[1] + rpId = os.environ.get("WEBSITE_HOSTNAME") + elif os.environ.get("WEBSITE_SITE_NAME") is not None: # Web apps rp = _RP_NAMES[0] rpId = '{}/{}'.format( os.environ.get("WEBSITE_SITE_NAME"), os.environ.get("WEBSITE_HOME_STAMPNAME", '') ) - elif os.environ.get("FUNCTIONS_WORKER_RUNTIME") is not None: - # Function apps - rp = _RP_NAMES[1] - rpId = os.environ.get("WEBSITE_HOSTNAME") elif self._vm_retry and self._get_azure_compute_metadata(): # VM rp = _RP_NAMES[2] diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py index c1373d3ea..627c1ed05 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py @@ -15,7 +15,6 @@ import atexit import json import logging -import os from opencensus.common.schedule import QueueExitEvent from opencensus.ext.azure.common import Options, utils @@ -29,8 +28,11 @@ Request, ) from opencensus.ext.azure.common.storage import LocalFileStorage -from opencensus.ext.azure.common.transport import TransportMixin -from opencensus.ext.azure.metrics_exporter import statsbeat_metrics +from opencensus.ext.azure.common.transport import ( + TransportMixin, + TransportStatusCode, +) +from opencensus.ext.azure.statsbeat import statsbeat from opencensus.trace import attributes_helper from opencensus.trace.span import SpanKind @@ -53,13 +55,14 @@ STACKTRACE = attributes_helper.COMMON_ATTRIBUTES['STACKTRACE'] -class AzureExporter(BaseExporter, ProcessorMixin, TransportMixin): +class AzureExporter(BaseExporter, TransportMixin, ProcessorMixin): """An exporter that sends traces to Microsoft Azure Monitor. :param options: Options for the exporter. """ def __init__(self, **options): + super(AzureExporter, self).__init__(**options) self.options = Options(**options) utils.validate_instrumentation_key(self.options.instrumentation_key) self.storage = None @@ -72,11 +75,10 @@ def __init__(self, **options): source=self.__class__.__name__, ) self._telemetry_processors = [] - super(AzureExporter, self).__init__(**options) atexit.register(self._stop, self.options.grace_period) # start statsbeat on exporter instantiation - if not os.environ.get("APPLICATIONINSIGHTS_STATSBEAT_DISABLED_ALL"): - statsbeat_metrics.collect_statsbeat_metrics(self.options) + if self._check_stats_collection(): + statsbeat.collect_statsbeat_metrics(self.options) # For redirects self._consecutive_redirects = 0 # To prevent circular redirects @@ -94,19 +96,27 @@ def span_data_to_envelope(self, sd): ) if sd.span_kind == SpanKind.SERVER: if ERROR_MESSAGE in sd.attributes: - envelope.name = 'Microsoft.ApplicationInsights.Exception' + message = sd.attributes.get(ERROR_MESSAGE) + if not message: + message = "Exception" + stack_trace = sd.attributes.get(STACKTRACE, []) + if not hasattr(stack_trace, '__iter__'): + stack_trace = [] + type_name = sd.attributes.get(ERROR_NAME, 'Exception') + exc_env = Envelope(**envelope) + exc_env.name = 'Microsoft.ApplicationInsights.Exception' data = ExceptionData( exceptions=[{ 'id': 1, - 'outerId': '{}'.format(sd.span_id), - 'typeName': sd.attributes.get(ERROR_NAME, ''), - 'message': sd.attributes[ERROR_MESSAGE], + 'outerId': 0, + 'typeName': type_name, + 'message': message, 'hasFullStack': STACKTRACE in sd.attributes, - 'parsedStack': sd.attributes.get(STACKTRACE, None) + 'parsedStack': stack_trace }], ) - envelope.data = Data(baseData=data, baseType='ExceptionData') - yield envelope + exc_env.data = Data(baseData=data, baseType='ExceptionData') + yield exc_env envelope.name = 'Microsoft.ApplicationInsights.Request' data = Request( @@ -204,14 +214,17 @@ def emit(self, batch, event=None): envelopes = self.apply_telemetry_processors(envelopes) result = self._transmit(envelopes) # Only store files if local storage enabled - if self.storage and result > 0: - self.storage.put(envelopes, result) + if self.storage and result is TransportStatusCode.RETRY: + self.storage.put( + envelopes, + self.options.minimum_retry_interval + ) if event: - if isinstance(event, QueueExitEvent): + if self.storage and isinstance(event, QueueExitEvent): self._transmit_from_storage() # send files before exit event.set() return - if len(batch) < self.options.max_batch_size: + if self.storage and len(batch) < self.options.max_batch_size: self._transmit_from_storage() except Exception: logger.exception('Exception occurred while exporting the data.') diff --git a/contrib/opencensus-ext-azure/setup.py b/contrib/opencensus-ext-azure/setup.py index 9396ddde9..422effebc 100644 --- a/contrib/opencensus-ext-azure/setup.py +++ b/contrib/opencensus-ext-azure/setup.py @@ -43,7 +43,7 @@ install_requires=[ 'azure-core >= 1.12.0, < 2.0.0', 'azure-identity >= 1.5.0, < 2.0.0', - 'opencensus >= 0.9.dev0, < 1.0.0', + 'opencensus >= 0.11.4, < 1.0.0', 'psutil >= 5.6.3', 'requests >= 2.19.0', ], diff --git a/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py b/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py index 60aff5f2d..beba8ffdb 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py @@ -20,8 +20,9 @@ import mock from opencensus.ext.azure import log_exporter +from opencensus.ext.azure.common.transport import TransportStatusCode -TEST_FOLDER = os.path.abspath('.test.logs') +TEST_FOLDER = os.path.abspath('.test.log.exporter') def setUpModule(): @@ -51,6 +52,13 @@ def export(self, batch): return self.callback(batch) +class MockResponse(object): + def __init__(self, status_code, text, headers=None): + self.status_code = status_code + self.text = text + self.headers = headers + + class TestBaseLogHandler(unittest.TestCase): def setUp(self): @@ -129,7 +137,7 @@ def test_init_handler_with_queue_capacity(self): 500 ) - @mock.patch('requests.post', return_value=mock.Mock()) + @mock.patch('requests.post', return_value=MockResponse(200, '')) def test_exception(self, requests_mock): logger = logging.getLogger(self.id()) handler = log_exporter.AzureLogHandler( @@ -146,7 +154,7 @@ def test_exception(self, requests_mock): post_body = requests_mock.call_args_list[0][1]['data'] self.assertTrue('ZeroDivisionError' in post_body) - @mock.patch('requests.post', return_value=mock.Mock()) + @mock.patch('requests.post', return_value=MockResponse(200, '')) def test_exception_with_custom_properties(self, requests_mock): logger = logging.getLogger(self.id()) handler = log_exporter.AzureLogHandler( @@ -172,7 +180,7 @@ def test_exception_with_custom_properties(self, requests_mock): self.assertTrue('key_1' in post_body) self.assertTrue('key_2' in post_body) - @mock.patch('requests.post', return_value=mock.Mock()) + @mock.patch('requests.post', return_value=MockResponse(200, '')) def test_export_empty(self, request_mock): handler = log_exporter.AzureLogHandler( instrumentation_key='12345678-1234-5678-abcd-12345678abcd', @@ -184,7 +192,7 @@ def test_export_empty(self, request_mock): @mock.patch('opencensus.ext.azure.log_exporter' '.AzureLogHandler.log_record_to_envelope') - def test_export_failure(self, log_record_to_envelope_mock): + def test_export_retry(self, log_record_to_envelope_mock): log_record_to_envelope_mock.return_value = ['bar'] handler = log_exporter.AzureLogHandler( instrumentation_key='12345678-1234-5678-abcd-12345678abcd', @@ -192,12 +200,28 @@ def test_export_failure(self, log_record_to_envelope_mock): ) with mock.patch('opencensus.ext.azure.log_exporter' '.AzureLogHandler._transmit') as transmit: - transmit.return_value = 10 + transmit.return_value = TransportStatusCode.RETRY handler._export(['foo']) self.assertEqual(len(os.listdir(handler.storage.path)), 1) self.assertIsNone(handler.storage.get()) handler.close() + @mock.patch('opencensus.ext.azure.log_exporter' + '.AzureLogHandler.log_record_to_envelope') + def test_export_success(self, log_record_to_envelope_mock): + log_record_to_envelope_mock.return_value = ['bar'] + handler = log_exporter.AzureLogHandler( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + storage_path=os.path.join(TEST_FOLDER, self.id()), + ) + with mock.patch('opencensus.ext.azure.log_exporter' + '.AzureLogHandler._transmit') as transmit: + transmit.return_value = TransportStatusCode.SUCCESS + handler._export(['foo']) + self.assertEqual(len(os.listdir(handler.storage.path)), 0) + self.assertIsNone(handler.storage.get()) + handler.close() + def test_log_record_to_envelope(self): handler = log_exporter.AzureLogHandler( instrumentation_key='12345678-1234-5678-abcd-12345678abcd', @@ -212,7 +236,7 @@ def test_log_record_to_envelope(self): '12345678-1234-5678-abcd-12345678abcd') handler.close() - @mock.patch('requests.post', return_value=mock.Mock()) + @mock.patch('requests.post', return_value=MockResponse(200, '')) def test_log_record_with_custom_properties(self, requests_mock): logger = logging.getLogger(self.id()) handler = log_exporter.AzureLogHandler( @@ -233,7 +257,7 @@ def test_log_record_with_custom_properties(self, requests_mock): self.assertTrue('key_1' in post_body) self.assertTrue('key_2' in post_body) - @mock.patch('requests.post', return_value=mock.Mock()) + @mock.patch('requests.post', return_value=MockResponse(200, '')) def test_log_with_invalid_custom_properties(self, requests_mock): logger = logging.getLogger(self.id()) handler = log_exporter.AzureLogHandler( @@ -250,7 +274,6 @@ def test_log_with_invalid_custom_properties(self, requests_mock): }) handler.close() - self.assertEqual(len(os.listdir(handler.storage.path)), 0) post_body = requests_mock.call_args_list[0][1]['data'] self.assertTrue('action_1_' in post_body) self.assertTrue('action_2_arg' in post_body) @@ -259,7 +282,7 @@ def test_log_with_invalid_custom_properties(self, requests_mock): self.assertFalse('not_a_dict' in post_body) self.assertFalse('key_1' in post_body) - @mock.patch('requests.post', return_value=mock.Mock()) + @mock.patch('requests.post', return_value=MockResponse(200, '')) def test_log_record_sampled(self, requests_mock): logger = logging.getLogger(self.id()) handler = log_exporter.AzureLogHandler( @@ -278,7 +301,7 @@ def test_log_record_sampled(self, requests_mock): self.assertTrue('Hello_World3' in post_body) self.assertTrue('Hello_World4' in post_body) - @mock.patch('requests.post', return_value=mock.Mock()) + @mock.patch('requests.post', return_value=MockResponse(200, '')) def test_log_record_not_sampled(self, requests_mock): logger = logging.getLogger(self.id()) handler = log_exporter.AzureLogHandler( @@ -291,7 +314,7 @@ def test_log_record_not_sampled(self, requests_mock): logger.warning('Hello_World3') logger.warning('Hello_World4') handler.close() - self.assertFalse(requests_mock.called) + self.assertTrue(handler._queue.is_empty()) class TestAzureEventHandler(unittest.TestCase): @@ -340,7 +363,7 @@ def test_init_handler_with_queue_capacity(self): 500 ) - @mock.patch('requests.post', return_value=mock.Mock()) + @mock.patch('requests.post', return_value=MockResponse(200, '')) def test_exception(self, requests_mock): logger = logging.getLogger(self.id()) handler = log_exporter.AzureEventHandler( @@ -357,7 +380,7 @@ def test_exception(self, requests_mock): post_body = requests_mock.call_args_list[0][1]['data'] self.assertTrue('ZeroDivisionError' in post_body) - @mock.patch('requests.post', return_value=mock.Mock()) + @mock.patch('requests.post', return_value=MockResponse(200, '')) def test_exception_with_custom_properties(self, requests_mock): logger = logging.getLogger(self.id()) handler = log_exporter.AzureEventHandler( @@ -373,6 +396,11 @@ def test_exception_with_custom_properties(self, requests_mock): { 'key_1': 'value_1', 'key_2': 'value_2' + }, + 'custom_measurements': + { + 'measure_1': 1, + 'measure_2': 2 } } logger.exception('Captured an exception.', extra=properties) @@ -382,8 +410,10 @@ def test_exception_with_custom_properties(self, requests_mock): self.assertTrue('ZeroDivisionError' in post_body) self.assertTrue('key_1' in post_body) self.assertTrue('key_2' in post_body) + self.assertTrue('measure_1' in post_body) + self.assertTrue('measure_2' in post_body) - @mock.patch('requests.post', return_value=mock.Mock()) + @mock.patch('requests.post', return_value=MockResponse(200, '')) def test_export_empty(self, request_mock): handler = log_exporter.AzureEventHandler( instrumentation_key='12345678-1234-5678-abcd-12345678abcd', @@ -395,7 +425,7 @@ def test_export_empty(self, request_mock): @mock.patch('opencensus.ext.azure.log_exporter' '.AzureEventHandler.log_record_to_envelope') - def test_export_failure(self, log_record_to_envelope_mock): + def test_export_retry(self, log_record_to_envelope_mock): log_record_to_envelope_mock.return_value = ['bar'] handler = log_exporter.AzureEventHandler( instrumentation_key='12345678-1234-5678-abcd-12345678abcd', @@ -403,12 +433,28 @@ def test_export_failure(self, log_record_to_envelope_mock): ) with mock.patch('opencensus.ext.azure.log_exporter' '.AzureEventHandler._transmit') as transmit: - transmit.return_value = 10 + transmit.return_value = TransportStatusCode.RETRY handler._export(['foo']) self.assertEqual(len(os.listdir(handler.storage.path)), 1) self.assertIsNone(handler.storage.get()) handler.close() + @mock.patch('opencensus.ext.azure.log_exporter' + '.AzureEventHandler.log_record_to_envelope') + def test_export_success(self, log_record_to_envelope_mock): + log_record_to_envelope_mock.return_value = ['bar'] + handler = log_exporter.AzureEventHandler( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + storage_path=os.path.join(TEST_FOLDER, self.id()), + ) + with mock.patch('opencensus.ext.azure.log_exporter' + '.AzureEventHandler._transmit') as transmit: + transmit.return_value = TransportStatusCode.SUCCESS + handler._export(['foo']) + self.assertEqual(len(os.listdir(handler.storage.path)), 0) + self.assertIsNone(handler.storage.get()) + handler.close() + def test_log_record_to_envelope(self): handler = log_exporter.AzureEventHandler( instrumentation_key='12345678-1234-5678-abcd-12345678abcd', @@ -423,7 +469,7 @@ def test_log_record_to_envelope(self): '12345678-1234-5678-abcd-12345678abcd') handler.close() - @mock.patch('requests.post', return_value=mock.Mock()) + @mock.patch('requests.post', return_value=MockResponse(200, '')) def test_log_record_with_custom_properties(self, requests_mock): logger = logging.getLogger(self.id()) handler = log_exporter.AzureEventHandler( @@ -436,6 +482,11 @@ def test_log_record_with_custom_properties(self, requests_mock): { 'key_1': 'value_1', 'key_2': 'value_2' + }, + 'custom_measurements': + { + 'measure_1': 1, + 'measure_2': 2 } }) handler.close() @@ -443,8 +494,10 @@ def test_log_record_with_custom_properties(self, requests_mock): self.assertTrue('action' in post_body) self.assertTrue('key_1' in post_body) self.assertTrue('key_2' in post_body) + self.assertTrue('measure_1' in post_body) + self.assertTrue('measure_2' in post_body) - @mock.patch('requests.post', return_value=mock.Mock()) + @mock.patch('requests.post', return_value=MockResponse(200, '')) def test_log_with_invalid_custom_properties(self, requests_mock): logger = logging.getLogger(self.id()) handler = log_exporter.AzureEventHandler( @@ -454,23 +507,24 @@ def test_log_with_invalid_custom_properties(self, requests_mock): logger.addHandler(handler) logger.warning('action_1_%s', None) logger.warning('action_2_%s', 'arg', extra={ - 'custom_dimensions': 'not_a_dict' + 'custom_dimensions': 'not_a_dict', + 'custom_measurements': 'also_not' }) logger.warning('action_3_%s', 'arg', extra={ 'notcustom_dimensions': {'key_1': 'value_1'} }) handler.close() - self.assertEqual(len(os.listdir(handler.storage.path)), 0) post_body = requests_mock.call_args_list[0][1]['data'] self.assertTrue('action_1_' in post_body) self.assertTrue('action_2_arg' in post_body) self.assertTrue('action_3_arg' in post_body) self.assertFalse('not_a_dict' in post_body) + self.assertFalse('also_not' in post_body) self.assertFalse('key_1' in post_body) - @mock.patch('requests.post', return_value=mock.Mock()) + @mock.patch('requests.post', return_value=MockResponse(200, '')) def test_log_record_sampled(self, requests_mock): logger = logging.getLogger(self.id()) handler = log_exporter.AzureEventHandler( @@ -489,7 +543,7 @@ def test_log_record_sampled(self, requests_mock): self.assertTrue('Hello_World3' in post_body) self.assertTrue('Hello_World4' in post_body) - @mock.patch('requests.post', return_value=mock.Mock()) + @mock.patch('requests.post', return_value=MockResponse(200, '')) def test_log_record_not_sampled(self, requests_mock): logger = logging.getLogger(self.id()) handler = log_exporter.AzureEventHandler( diff --git a/contrib/opencensus-ext-azure/tests/test_azure_metrics_exporter.py b/contrib/opencensus-ext-azure/tests/test_azure_metrics_exporter.py index a3cde2bdc..a91c5eff4 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_metrics_exporter.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_metrics_exporter.py @@ -11,8 +11,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - import os +import shutil import unittest from datetime import datetime @@ -20,11 +20,16 @@ from opencensus.common import utils from opencensus.ext.azure.common.protocol import DataPoint +from opencensus.ext.azure.common.transport import TransportStatusCode from opencensus.ext.azure.metrics_exporter import ( MetricsExporter, new_metrics_exporter, standard_metrics, ) +from opencensus.ext.azure.statsbeat.statsbeat_metrics import ( + _ATTACH_METRIC_NAME, + _REQ_SUCCESS_NAME, +) from opencensus.metrics import label_key, label_value from opencensus.metrics.export import ( metric, @@ -35,6 +40,16 @@ ) from opencensus.metrics.export.metric_descriptor import MetricDescriptorType +TEST_FOLDER = os.path.abspath('.test.metrics.exporter') + + +def setUpModule(): + os.makedirs(TEST_FOLDER) + + +def tearDownModule(): + shutil.rmtree(TEST_FOLDER) + def create_metric(): lv = label_value.LabelValue('val') @@ -59,6 +74,63 @@ def create_metric(): return mm +def create_metric_ts(): + lv = label_value.LabelValue('val') + lv2 = label_value.LabelValue('val2') + val = value.ValueLong(value=123) + dt = datetime(2019, 3, 20, 21, 34, 0, 537954) + pp = point.Point(value=val, timestamp=dt) + + ts = [ + time_series.TimeSeries( + label_values=[lv], + points=[pp], + start_timestamp=utils.to_iso_str(dt) + ), + time_series.TimeSeries( + label_values=[lv2], + points=[pp], + start_timestamp=utils.to_iso_str(dt) + ), + ] + + desc = metric_descriptor.MetricDescriptor( + name='name', + description='description', + unit='unit', + type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, + label_keys=[label_key.LabelKey('key', 'description')] + ) + + mm = metric.Metric(descriptor=desc, time_series=ts) + return mm + + +def create_stats_metric(name, num): + lv = label_value.LabelValue('val') + val = value.ValueLong(value=num) + dt = datetime(2019, 3, 20, 21, 34, 0, 537954) + pp = point.Point(value=val, timestamp=dt) + + ts = [ + time_series.TimeSeries( + label_values=[lv], + points=[pp], + start_timestamp=utils.to_iso_str(dt) + ) + ] + + desc = metric_descriptor.MetricDescriptor( + name=name, + description='description', + unit='unit', + type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, + label_keys=[label_key.LabelKey('key', 'description')] + ) + mm = metric.Metric(descriptor=desc, time_series=ts) + return mm + + class TestAzureMetricsExporter(unittest.TestCase): def setUp(self): @@ -107,42 +179,141 @@ def test_export_metrics_histogram(self): @mock.patch('requests.post', return_value=mock.Mock()) def test_export_metrics_empty(self, requests_mock): exporter = MetricsExporter( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd') + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + max_batch_size=1, + storage_path=os.path.join(TEST_FOLDER, self.id()), + ) exporter.export_metrics([]) self.assertEqual(len(requests_mock.call_args_list), 0) + self.assertEqual(len(os.listdir(exporter.storage.path)), 0) - @mock.patch('requests.post', return_value=mock.Mock()) - def test_export_metrics_full_batch(self, requests_mock): + def test_export_metrics_retry(self): metric = create_metric() exporter = MetricsExporter( instrumentation_key='12345678-1234-5678-abcd-12345678abcd', - max_batch_size=1) - requests_mock.return_value.status_code = 200 - requests_mock.return_value.text = '{"itemsReceived":1,'\ - '"itemsAccepted":1,'\ - '"errors":[]}' - exporter.export_metrics([metric]) + max_batch_size=1, + storage_path=os.path.join(TEST_FOLDER, self.id()), + ) + with mock.patch('opencensus.ext.azure.metrics_exporter' + '.MetricsExporter._transmit') as transmit: + transmit.return_value = TransportStatusCode.RETRY + exporter.export_metrics([metric]) - self.assertEqual(len(requests_mock.call_args_list), 1) - post_body = requests_mock.call_args_list[0][1]['data'] - self.assertTrue('metrics' in post_body) - self.assertTrue('properties' in post_body) + self.assertEqual(len(os.listdir(exporter.storage.path)), 1) + self.assertIsNone(exporter.storage.get()) + + def test_export_metrics_success(self): + metric = create_metric() + exporter = MetricsExporter( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + max_batch_size=1, + storage_path=os.path.join(TEST_FOLDER, self.id()), + ) + with mock.patch('opencensus.ext.azure.metrics_exporter' + '.MetricsExporter._transmit') as transmit: + transmit.return_value = TransportStatusCode.SUCCESS + exporter.export_metrics([metric]) + + self.assertEqual(len(os.listdir(exporter.storage.path)), 0) + self.assertIsNone(exporter.storage.get()) - def test_create_data_points(self): + def test_metric_to_envelopes(self): metric = create_metric() exporter = MetricsExporter( instrumentation_key='12345678-1234-5678-abcd-12345678abcd' ) - data_points = exporter._create_data_points(metric.time_series[0], - metric.descriptor) + property_mock = mock.Mock() + envelope_mock = mock.Mock() + exporter._create_properties = property_mock + exporter._create_envelope = envelope_mock + exporter.metric_to_envelopes(metric) + property_mock.assert_called_once() + envelope_mock.assert_called_once() + + def test_metric_to_envelopes_multi_time_series(self): + metric = create_metric_ts() + exporter = MetricsExporter( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd' + ) + property_mock = mock.Mock() + envelope_mock = mock.Mock() + exporter._create_properties = property_mock + exporter._create_envelope = envelope_mock + exporter.metric_to_envelopes(metric) + property_mock.assert_any_call( + metric.time_series[0], + metric.descriptor.label_keys, + ) + property_mock.assert_any_call( + metric.time_series[1], + metric.descriptor.label_keys, + ) + envelope_mock.assert_called() - self.assertEqual(len(data_points), 1) - data_point = data_points[0] - self.assertEqual(data_point.ns, metric.descriptor.name) - self.assertEqual(data_point.name, metric.descriptor.name) - self.assertEqual(data_point.value, - metric.time_series[0].points[0].value.value) + def test_metric_to_envelopes_network_statsbeat(self): + metric = create_stats_metric(_REQ_SUCCESS_NAME, 10) + exporter = MetricsExporter( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd' + ) + statsbeat_mock = mock.Mock() + statsbeat_mock.return_value = True + property_mock = mock.Mock() + envelope_mock = mock.Mock() + exporter._create_properties = property_mock + exporter._create_envelope = envelope_mock + exporter._is_stats_exporter = statsbeat_mock + exporter.metric_to_envelopes(metric) + property_mock.assert_called_once() + envelope_mock.assert_called_once() + + def test_metric_to_envelopes_network_statsbeat_zero(self): + metric = create_stats_metric(_REQ_SUCCESS_NAME, 0) + exporter = MetricsExporter( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd' + ) + statsbeat_mock = mock.Mock() + statsbeat_mock.return_value = True + property_mock = mock.Mock() + envelope_mock = mock.Mock() + exporter._create_properties = property_mock + exporter._create_envelope = envelope_mock + exporter._is_stats_exporter = statsbeat_mock + exporter.metric_to_envelopes(metric) + property_mock.assert_not_called() + envelope_mock.assert_not_called() + + def test_metric_to_envelopes_not_network_statsbeat(self): + metric = create_stats_metric(_ATTACH_METRIC_NAME, 10) + exporter = MetricsExporter( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd' + ) + statsbeat_mock = mock.Mock() + statsbeat_mock.return_value = True + property_mock = mock.Mock() + envelope_mock = mock.Mock() + exporter._create_properties = property_mock + exporter._create_envelope = envelope_mock + exporter._is_stats_exporter = statsbeat_mock + exporter.metric_to_envelopes(metric) + property_mock.assert_called_once() + envelope_mock.assert_called_once() + + def test_metric_to_envelopes_not_network_statsbeat_zero(self): + metric = create_stats_metric(_ATTACH_METRIC_NAME, 0) + exporter = MetricsExporter( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd' + ) + statsbeat_mock = mock.Mock() + statsbeat_mock.return_value = True + property_mock = mock.Mock() + envelope_mock = mock.Mock() + exporter._create_properties = property_mock + exporter._create_envelope = envelope_mock + exporter._is_stats_exporter = statsbeat_mock + exporter.metric_to_envelopes(metric) + property_mock.assert_called_once() + envelope_mock.assert_called_once() def test_create_properties(self): metric = create_metric() @@ -150,7 +321,7 @@ def test_create_properties(self): instrumentation_key='12345678-1234-5678-abcd-12345678abcd' ) properties = exporter._create_properties(metric.time_series[0], - metric.descriptor) + metric.descriptor.label_keys) self.assertEqual(len(properties), 1) self.assertEqual(properties['key'], 'val') @@ -162,7 +333,7 @@ def test_create_properties_none(self): ) metric.time_series[0].label_values[0]._value = None properties = exporter._create_properties(metric.time_series[0], - metric.descriptor) + metric.descriptor.label_keys) self.assertEqual(len(properties), 1) self.assertEqual(properties['key'], 'null') @@ -206,11 +377,24 @@ def test_shutdown(self): mock_thread.close.assert_called_once() mock_storage.close.assert_called_once() + def test_shutdown_statsbeat(self): + mock_thread = mock.Mock() + mock_storage = mock.Mock() + exporter = MetricsExporter( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd' + ) + exporter.exporter_thread = mock_thread + exporter._is_stats = True + exporter.storage = mock_storage + exporter.shutdown() + mock_thread.cancel.assert_called_once() + mock_storage.close.assert_called_once() + @mock.patch('opencensus.ext.azure.metrics_exporter' '.transport.get_exporter_thread') def test_new_metrics_exporter(self, exporter_mock): - with mock.patch('opencensus.ext.azure.metrics_exporter' - '.statsbeat_metrics.collect_statsbeat_metrics') as hb: + with mock.patch('opencensus.ext.azure.statsbeat' + '.statsbeat.collect_statsbeat_metrics') as hb: hb.return_value = None iKey = '12345678-1234-5678-abcd-12345678abcd' exporter = new_metrics_exporter(instrumentation_key=iKey) @@ -227,8 +411,8 @@ def test_new_metrics_exporter(self, exporter_mock): @mock.patch('opencensus.ext.azure.metrics_exporter' '.transport.get_exporter_thread') def test_new_metrics_exporter_no_standard_metrics(self, exporter_mock): - with mock.patch('opencensus.ext.azure.metrics_exporter' - '.statsbeat_metrics.collect_statsbeat_metrics') as hb: + with mock.patch('opencensus.ext.azure.statsbeat' + '.statsbeat.collect_statsbeat_metrics') as hb: hb.return_value = None iKey = '12345678-1234-5678-abcd-12345678abcd' exporter = new_metrics_exporter( @@ -240,18 +424,3 @@ def test_new_metrics_exporter_no_standard_metrics(self, exporter_mock): producer_class = standard_metrics.AzureStandardMetricsProducer self.assertFalse(isinstance(exporter_mock.call_args[0][0][0], producer_class)) - - @unittest.skip("Skip because disabling heartbeat metrics") - @mock.patch('opencensus.ext.azure.metrics_exporter' - '.transport.get_exporter_thread') - def test_new_metrics_exporter_heartbeat(self, exporter_mock): - with mock.patch('opencensus.ext.azure.metrics_exporter' - '.statsbeat_metrics.collect_statsbeat_metrics') as hb: - iKey = '12345678-1234-5678-abcd-12345678abcd' - exporter = new_metrics_exporter(instrumentation_key=iKey) - - self.assertEqual(exporter.options.instrumentation_key, iKey) - self.assertEqual(len(hb.call_args_list), 1) - self.assertEqual(len(hb.call_args[0]), 2) - self.assertEqual(hb.call_args[0][0], None) - self.assertEqual(hb.call_args[0][1], iKey) diff --git a/contrib/opencensus-ext-azure/tests/test_azure_statsbeat_metrics.py b/contrib/opencensus-ext-azure/tests/test_azure_statsbeat_metrics.py index 9160b98b3..f5e4b6519 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_statsbeat_metrics.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_statsbeat_metrics.py @@ -23,10 +23,19 @@ from opencensus.ext.azure.common import Options from opencensus.ext.azure.common.transport import _requests_map from opencensus.ext.azure.common.version import __version__ as ext_version -from opencensus.ext.azure.metrics_exporter import statsbeat_metrics -from opencensus.ext.azure.metrics_exporter.statsbeat_metrics.statsbeat import ( +from opencensus.ext.azure.metrics_exporter import MetricsExporter +from opencensus.ext.azure.statsbeat import statsbeat +from opencensus.ext.azure.statsbeat.statsbeat_metrics import ( + _DEFAULT_EU_STATS_CONNECTION_STRING, + _DEFAULT_NON_EU_STATS_CONNECTION_STRING, _ENDPOINT_TYPES, _FEATURE_TYPES, + _REQ_DURATION_NAME, + _REQ_EXCEPTION_NAME, + _REQ_FAILURE_NAME, + _REQ_RETRY_NAME, + _REQ_SUCCESS_NAME, + _REQ_THROTTLE_NAME, _RP_NAMES, _STATS_LONG_INTERVAL_THRESHOLD, _get_attach_properties, @@ -37,8 +46,10 @@ _get_feature_properties, _get_network_properties, _get_retry_count_value, + _get_stats_connection_string, _get_success_count_value, _get_throttle_count_value, + _shorten_host, _StatsbeatMetrics, ) from opencensus.metrics.export.gauge import ( @@ -51,7 +62,7 @@ _OPTIONS = Options( instrumentation_key="ikey", enable_local_storage=True, - endpoint="test-endpoint", + endpoint="https://eastus-1.in.applicationinsights.azure.com/", credential=None, ) @@ -76,11 +87,17 @@ def func(*_args, **_kwargs): class TestStatsbeatMetrics(unittest.TestCase): def setUp(self): # pylint: disable=protected-access - statsbeat_metrics._STATSBEAT_METRICS = None + statsbeat._STATSBEAT_METRICS = None + statsbeat._STATSBEAT_EXPORTER = None + _STATSBEAT_STATE = { # noqa: F841 + "INITIAL_FAILURE_COUNT": 0, + "INITIAL_SUCCESS": False, + "SHUTDOWN": False, + } def test_producer_ctor(self): # pylint: disable=protected-access - producer = statsbeat_metrics._AzureStatsbeatMetricsProducer(_OPTIONS) + producer = statsbeat._AzureStatsbeatMetricsProducer(_OPTIONS) metrics = producer._statsbeat self.assertTrue( isinstance( @@ -92,7 +109,7 @@ def test_producer_ctor(self): def test_producer_get_metrics(self): # pylint: disable=protected-access - producer = statsbeat_metrics._AzureStatsbeatMetricsProducer(_OPTIONS) + producer = statsbeat._AzureStatsbeatMetricsProducer(_OPTIONS) mock_stats = mock.Mock() producer._statsbeat = mock_stats producer.get_metrics() @@ -101,7 +118,7 @@ def test_producer_get_metrics(self): def test_producer_get_initial_metrics(self): # pylint: disable=protected-access - producer = statsbeat_metrics._AzureStatsbeatMetricsProducer(_OPTIONS) + producer = statsbeat._AzureStatsbeatMetricsProducer(_OPTIONS) mock_stats = mock.Mock() producer._statsbeat = mock_stats producer.get_initial_metrics() @@ -112,16 +129,22 @@ def test_producer_get_initial_metrics(self): @mock.patch('opencensus.metrics.transport.get_exporter_thread') def test_collect_statsbeat_metrics(self, thread_mock, stats_mock): # pylint: disable=protected-access - self.assertIsNone(statsbeat_metrics._STATSBEAT_METRICS) - statsbeat_metrics.collect_statsbeat_metrics(_OPTIONS) + self.assertIsNone(statsbeat._STATSBEAT_METRICS) + statsbeat.collect_statsbeat_metrics(_OPTIONS) + self.assertTrue( + isinstance( + statsbeat._STATSBEAT_METRICS, + statsbeat._AzureStatsbeatMetricsProducer + ) + ) self.assertTrue( isinstance( - statsbeat_metrics._STATSBEAT_METRICS, - statsbeat_metrics._AzureStatsbeatMetricsProducer + statsbeat._STATSBEAT_EXPORTER, + MetricsExporter, ) ) self.assertEqual( - statsbeat_metrics._STATSBEAT_METRICS._statsbeat._instrumentation_key, "ikey") # noqa: E501 + statsbeat._STATSBEAT_METRICS._statsbeat._instrumentation_key, "ikey") # noqa: E501 thread_mock.assert_called_once() stats_mock.assert_called_once() @@ -129,25 +152,119 @@ def test_collect_statsbeat_metrics(self, thread_mock, stats_mock): @mock.patch('opencensus.metrics.transport.get_exporter_thread') def test_collect_statsbeat_metrics_exists(self, thread_mock, stats_mock): # pylint: disable=protected-access - producer = statsbeat_metrics._AzureStatsbeatMetricsProducer(_OPTIONS) - statsbeat_metrics._STATSBEAT_METRICS = producer - statsbeat_metrics.collect_statsbeat_metrics(None) - self.assertEqual(statsbeat_metrics._STATSBEAT_METRICS, producer) + self.assertIsNone(statsbeat._STATSBEAT_METRICS) + producer = statsbeat._AzureStatsbeatMetricsProducer(_OPTIONS) + statsbeat._STATSBEAT_METRICS = producer + statsbeat.collect_statsbeat_metrics(None) + self.assertEqual(statsbeat._STATSBEAT_METRICS, producer) thread_mock.assert_not_called() stats_mock.assert_not_called() + @mock.patch.object(_StatsbeatMetrics, 'get_initial_metrics') + @mock.patch('opencensus.metrics.transport.get_exporter_thread') + def test_collect_statsbeat_metrics_non_eu(self, thread_mock, stats_mock): + # pylint: disable=protected-access + cs = "InstrumentationKey=1aa11111-bbbb-1ccc-8ddd-eeeeffff3333;IngestionEndpoint=https://westus-0.in.applicationinsights.azure.com/" # noqa: E501 + non_eu = Options( + connection_string=cs + ) + self.assertIsNone(statsbeat._STATSBEAT_METRICS) + with mock.patch.dict( + os.environ, { + "APPLICATION_INSIGHTS_STATS_CONNECTION_STRING": "", + }): + statsbeat.collect_statsbeat_metrics(non_eu) + self.assertTrue( + isinstance( + statsbeat._STATSBEAT_METRICS, + statsbeat._AzureStatsbeatMetricsProducer + ) + ) + self.assertTrue( + isinstance( + statsbeat._STATSBEAT_EXPORTER, + MetricsExporter, + ) + ) + self.assertEqual( + statsbeat._STATSBEAT_EXPORTER.options.instrumentation_key, # noqa: E501 + _DEFAULT_NON_EU_STATS_CONNECTION_STRING.split(";")[0].split("=")[1] # noqa: E501 + ) + self.assertEqual( + statsbeat._STATSBEAT_EXPORTER.options.endpoint, + _DEFAULT_NON_EU_STATS_CONNECTION_STRING.split(";")[1].split("=")[1] # noqa: E501 + ) + + @mock.patch.object(_StatsbeatMetrics, 'get_initial_metrics') + @mock.patch('opencensus.metrics.transport.get_exporter_thread') + def test_collect_statsbeat_metrics_eu(self, thread_mock, stats_mock): + # pylint: disable=protected-access + cs = "InstrumentationKey=1aa11111-bbbb-1ccc-8ddd-eeeeffff3333;IngestionEndpoint=https://northeurope-0.in.applicationinsights.azure.com/" # noqa: E501 + eu = Options( + connection_string=cs + ) + with mock.patch.dict( + os.environ, { + "APPLICATION_INSIGHTS_STATS_CONNECTION_STRING": "", + }): + statsbeat.collect_statsbeat_metrics(eu) + self.assertTrue( + isinstance( + statsbeat._STATSBEAT_METRICS, + statsbeat._AzureStatsbeatMetricsProducer + ) + ) + self.assertTrue( + isinstance( + statsbeat._STATSBEAT_EXPORTER, + MetricsExporter, + ) + ) + self.assertEqual( + statsbeat._STATSBEAT_EXPORTER.options.instrumentation_key, # noqa: E501 + _DEFAULT_EU_STATS_CONNECTION_STRING.split(";")[0].split("=")[1] # noqa: E501 + ) + self.assertEqual( + statsbeat._STATSBEAT_EXPORTER.options.endpoint, + _DEFAULT_EU_STATS_CONNECTION_STRING.split(";")[1].split("=")[1] # noqa: E501 + ) + + def test_shutdown_statsbeat_metrics(self): + # pylint: disable=protected-access + producer_mock = mock.Mock() + exporter_mock = mock.Mock() + statsbeat._STATSBEAT_METRICS = producer_mock + statsbeat._STATSBEAT_EXPORTER = exporter_mock + statsbeat.shutdown_statsbeat_metrics() + exporter_mock.shutdown.assert_called_once() + self.assertIsNone(statsbeat._STATSBEAT_METRICS) + self.assertIsNone(statsbeat._STATSBEAT_EXPORTER) + + def test_shutdown_statsbeat_metrics_already_shutdown(self): + # pylint: disable=protected-access + producer_mock = mock.Mock() + exporter_mock = mock.Mock() + statsbeat._STATSBEAT_METRICS = producer_mock + statsbeat._STATSBEAT_EXPORTER = exporter_mock + statsbeat._STATSBEAT_STATE["SHUTDOWN"] = True + statsbeat.shutdown_statsbeat_metrics() + exporter_mock.shutdown.assert_not_called() + self.assertIsNotNone(statsbeat._STATSBEAT_METRICS) + self.assertIsNotNone(statsbeat._STATSBEAT_EXPORTER) + @mock.patch( - 'opencensus.ext.azure.metrics_exporter.statsbeat_metrics.statsbeat._get_feature_properties') # noqa: E501 + 'opencensus.ext.azure.statsbeat.statsbeat_metrics._get_feature_properties') # noqa: E501 @mock.patch( - 'opencensus.ext.azure.metrics_exporter.statsbeat_metrics.statsbeat._get_network_properties') # noqa: E501 + 'opencensus.ext.azure.statsbeat.statsbeat_metrics._get_network_properties') # noqa: E501 @mock.patch( - 'opencensus.ext.azure.metrics_exporter.statsbeat_metrics.statsbeat._get_attach_properties') # noqa: E501 + 'opencensus.ext.azure.statsbeat.statsbeat_metrics._get_attach_properties') # noqa: E501 def test_statsbeat_metric_init(self, attach_mock, network_mock, feature_mock): # noqa: E501 # pylint: disable=protected-access metric = _StatsbeatMetrics(_OPTIONS) self.assertEqual(len(metric._vm_data), 0) self.assertTrue(metric._vm_retry) self.assertEqual(metric._instrumentation_key, "ikey") + self.assertEqual(metric._feature, 1) self.assertTrue( isinstance( metric._attach_metric, @@ -254,15 +371,55 @@ def test_get_common_properties(self): def test_get_success_count_value(self): _requests_map.clear() - _requests_map['last_success'] = 5 _requests_map['success'] = 10 - self.assertEqual(_get_success_count_value(), 5) - self.assertEqual(_requests_map['last_success'], 10) + self.assertEqual(_get_success_count_value(), 10) + self.assertEqual(_requests_map['success'], 0) + _requests_map.clear() + + def test_get_failure_count_value(self): + _requests_map.clear() + _requests_map['failure'] = {} + _requests_map['failure'][400] = 10 + self.assertEqual(_get_failure_count_value(400), 10) + self.assertEqual(_requests_map['failure'][400], 0) + _requests_map.clear() + + def test_get_average_duration_value(self): + _requests_map.clear() + _requests_map['duration'] = 10 + _requests_map['count'] = 2 + self.assertEqual(_get_average_duration_value(), 5000.0) + self.assertEqual(_requests_map['duration'], 0) + self.assertEqual(_requests_map['count'], 0) + _requests_map.clear() + + def test_get_retry_count_value(self): + _requests_map.clear() + _requests_map['retry'] = {} + _requests_map['retry'][401] = 10 + self.assertEqual(_get_retry_count_value(401), 10) + self.assertEqual(_requests_map['retry'][401], 0) + _requests_map.clear() + + def test_get_throttle_count_value(self): + _requests_map.clear() + _requests_map['throttle'] = {} + _requests_map['throttle'][402] = 10 + self.assertEqual(_get_throttle_count_value(402), 10) + self.assertEqual(_requests_map['throttle'][402], 0) + _requests_map.clear() + + def test_get_exception_count_value(self): + _requests_map.clear() + _requests_map['exception'] = {} + _requests_map['exception']['Timeout'] = 10 + self.assertEqual(_get_exception_count_value('Timeout'), 10) + self.assertEqual(_requests_map['exception']['Timeout'], 0) _requests_map.clear() def test_statsbeat_metric_get_initial_metrics(self): # pylint: disable=protected-access - metric = statsbeat_metrics._StatsbeatMetrics(_OPTIONS) + metric = _StatsbeatMetrics(_OPTIONS) attach_metric_mock = mock.Mock() attach_metric_mock.return_value = "attach" feature_metric_mock = mock.Mock() @@ -280,7 +437,7 @@ def test_statsbeat_metric_get_initial_metrics(self): def test_statsbeat_metric_get_metrics(self): # pylint: disable=protected-access - metric = statsbeat_metrics._StatsbeatMetrics(_OPTIONS) + metric = _StatsbeatMetrics(_OPTIONS) metric._long_threshold_count = _STATS_LONG_INTERVAL_THRESHOLD initial_metric_mock = mock.Mock() network_metric_mock = mock.Mock() @@ -296,7 +453,7 @@ def test_statsbeat_metric_get_metrics(self): def test_statsbeat_metric_get_metrics_short(self): # pylint: disable=protected-access - metric = statsbeat_metrics._StatsbeatMetrics(_OPTIONS) + metric = _StatsbeatMetrics(_OPTIONS) metric._long_threshold_count = 1 initial_metric_mock = mock.Mock() network_metric_mock = mock.Mock() @@ -326,7 +483,7 @@ def test_get_feature_metric(self): self.assertEqual( properties[8].value, ext_version) # noqa: E501 - def test_get_feature_metric_wtih_aad(self): + def test_get_feature_metric_with_aad(self): aad_options = Options( instrumentation_key="ikey", enable_local_storage=True, @@ -348,6 +505,17 @@ def test_get_feature_metric_wtih_aad(self): self.assertEqual( properties[8].value, ext_version) # noqa: E501 + def test_get_feature_metric_zero(self): + # pylint: disable=protected-access + options = Options( + instrumentation_key="ikey", + enable_local_storage=False, + credential=None, + ) + stats = _StatsbeatMetrics(options) + metric = stats._get_feature_metric() + self.assertIsNone(metric) + def test_get_instrumentation_metric(self): original_integrations = integrations._INTEGRATIONS_BIT_MASK integrations._INTEGRATIONS_BIT_MASK = 1024 @@ -367,20 +535,42 @@ def test_get_instrumentation_metric(self): properties[8].value, ext_version) # noqa: E501 integrations._INTEGRATIONS_BIT_MASK = original_integrations + def test_get_instrumentation_metrics_zero(self): + # pylint: disable=protected-access + original_integrations = integrations._INTEGRATIONS_BIT_MASK + integrations._INTEGRATIONS_BIT_MASK = 0 + stats = _StatsbeatMetrics(_OPTIONS) + metric = stats._get_instrumentation_metric() + self.assertIsNone(metric) + integrations._INTEGRATIONS_BIT_MASK = original_integrations + @mock.patch( - 'opencensus.ext.azure.metrics_exporter.statsbeat_metrics.statsbeat._get_exception_count_value') # noqa: E501 + 'opencensus.ext.azure.statsbeat.statsbeat_metrics._get_exception_count_value') # noqa: E501 @mock.patch( - 'opencensus.ext.azure.metrics_exporter.statsbeat_metrics.statsbeat._get_throttle_count_value') # noqa: E501 + 'opencensus.ext.azure.statsbeat.statsbeat_metrics._get_throttle_count_value') # noqa: E501 @mock.patch( - 'opencensus.ext.azure.metrics_exporter.statsbeat_metrics.statsbeat._get_retry_count_value') # noqa: E501 + 'opencensus.ext.azure.statsbeat.statsbeat_metrics._get_retry_count_value') # noqa: E501 @mock.patch( - 'opencensus.ext.azure.metrics_exporter.statsbeat_metrics.statsbeat._get_average_duration_value') # noqa: E501 + 'opencensus.ext.azure.statsbeat.statsbeat_metrics._get_average_duration_value') # noqa: E501 @mock.patch( - 'opencensus.ext.azure.metrics_exporter.statsbeat_metrics.statsbeat._get_failure_count_value') # noqa: E501 + 'opencensus.ext.azure.statsbeat.statsbeat_metrics._get_failure_count_value') # noqa: E501 @mock.patch( - 'opencensus.ext.azure.metrics_exporter.statsbeat_metrics.statsbeat._get_success_count_value') # noqa: E501 + 'opencensus.ext.azure.statsbeat.statsbeat_metrics._get_success_count_value') # noqa: E501 def test_get_network_metrics(self, mock1, mock2, mock3, mock4, mock5, mock6): # noqa: E501 # pylint: disable=protected-access + _requests_map.clear() + _requests_map['exception'] = {} + _requests_map['throttle'] = {} + _requests_map['retry'] = {} + _requests_map['failure'] = {} + _requests_map['exception']['Timeout'] = 5 + _requests_map['exception']['RequestException'] = 5 + _requests_map['throttle'][402] = 5 + _requests_map['throttle'][439] = 5 + _requests_map['retry'][401] = 5 + _requests_map['retry'][403] = 5 + _requests_map['failure'][400] = 5 + _requests_map['failure'][404] = 5 stats = _StatsbeatMetrics(_OPTIONS) mock1.return_value = 5 mock2.return_value = 5 @@ -389,45 +579,63 @@ def test_get_network_metrics(self, mock1, mock2, mock3, mock4, mock5, mock6): # mock5.return_value = 5 mock6.return_value = 5 metrics = stats._get_network_metrics() + mock1.assert_called_once() + self.assertEqual(mock2.call_count, 2) + mock3.assert_called_once() + self.assertEqual(mock4.call_count, 2) + self.assertEqual(mock5.call_count, 2) + self.assertEqual(mock6.call_count, 2) self.assertEqual(len(metrics), 6) - self.assertEqual(metrics[0]._time_series[0].points[0].value.value, 5) - self.assertEqual(metrics[1]._time_series[0].points[0].value.value, 5) - self.assertEqual(metrics[2]._time_series[0].points[0].value.value, 5) - self.assertEqual(metrics[3]._time_series[0].points[0].value.value, 5) - self.assertEqual(metrics[4]._time_series[0].points[0].value.value, 5) - self.assertEqual(metrics[5]._time_series[0].points[0].value.value, 5) for metric in metrics: - properties = metric._time_series[0]._label_values - self.assertEqual(len(properties), 9) - self.assertEqual(properties[0].value, _RP_NAMES[3]) - self.assertEqual(properties[1].value, "sdk") - self.assertEqual(properties[2].value, "ikey") - self.assertEqual(properties[3].value, platform.python_version()) - self.assertEqual(properties[4].value, platform.system()) - self.assertEqual(properties[5].value, "python") - self.assertEqual(properties[6].value, ext_version) - self.assertEqual(properties[7].value, _ENDPOINT_TYPES[0]) - self.assertEqual(properties[8].value, _OPTIONS.endpoint) + for ts in metric._time_series: + properties = ts._label_values + if metric.descriptor.name == _REQ_DURATION_NAME: + self.assertEqual(len(properties), 9) + else: + self.assertEqual(len(properties), 10) + if metric.descriptor.name == _REQ_SUCCESS_NAME: + self.assertEqual(ts.points[0].value.value, 5) + self.assertEqual(properties[9].value, 200) + if metric.descriptor.name == _REQ_DURATION_NAME: + self.assertEqual(ts.points[0].value.value, 5) + if metric.descriptor.name == _REQ_FAILURE_NAME: + self.assertEqual(ts.points[0].value.value, 5) + self.assertTrue(properties[9].value in (400, 404)) + if metric.descriptor.name == _REQ_RETRY_NAME: + self.assertEqual(ts.points[0].value.value, 5) + self.assertTrue(properties[9].value in (401, 403)) + if metric.descriptor.name == _REQ_THROTTLE_NAME: + self.assertEqual(ts.points[0].value.value, 5) + self.assertTrue(properties[9].value in (402, 439)) + if metric.descriptor.name == _REQ_EXCEPTION_NAME: + self.assertEqual(ts.points[0].value.value, 5) + self.assertTrue(properties[9].value in ('Timeout', 'RequestException')) # noqa: E501 + self.assertEqual(properties[0].value, _RP_NAMES[3]) + self.assertEqual(properties[1].value, "sdk") + self.assertEqual(properties[2].value, "ikey") + self.assertEqual(properties[3].value, platform.python_version()) # noqa: E501 + self.assertEqual(properties[4].value, platform.system()) + self.assertEqual(properties[5].value, "python") + self.assertEqual(properties[6].value, ext_version) + self.assertEqual(properties[7].value, _ENDPOINT_TYPES[0]) + short_host = _shorten_host(_OPTIONS.endpoint) + self.assertEqual(properties[8].value, short_host) + _requests_map.clear() @mock.patch( - 'opencensus.ext.azure.metrics_exporter.statsbeat_metrics.statsbeat._get_success_count_value') # noqa: E501 - def test_get_network_metrics_zero(self, suc_mock): + 'opencensus.ext.azure.statsbeat.statsbeat_metrics._get_success_count_value') # noqa: E501 + @mock.patch( + 'opencensus.ext.azure.statsbeat.statsbeat_metrics._get_average_duration_value') # noqa: E501 + def test_get_network_metrics_zero(self, suc_mock, dur_mock): # pylint: disable=protected-access + _requests_map.clear() stats = _StatsbeatMetrics(_OPTIONS) suc_mock.return_value = 0 + dur_mock.return_value = 0 metrics = stats._get_network_metrics() - self.assertEqual(len(metrics), 0) - for metric in metrics: - properties = metric._time_series[0]._label_values - self.assertEqual(len(properties), 7) - self.assertEqual(properties[0].value, _RP_NAMES[3]) - self.assertEqual(properties[1].value, "sdk") - self.assertEqual(properties[2].value, "ikey") - self.assertEqual(properties[3].value, platform.python_version()) - self.assertEqual(properties[4].value, platform.system()) - self.assertEqual(properties[5].value, "python") - self.assertEqual( - properties[6].value, ext_version) + self.assertEqual(len(metrics), 2) + self.assertEqual(metrics[0]._time_series[0].points[0].value.value, 0) + self.assertEqual(metrics[1]._time_series[0].points[0].value.value, 0) @mock.patch.dict( os.environ, @@ -457,6 +665,7 @@ def test_get_attach_metric_appsvc(self): { "FUNCTIONS_WORKER_RUNTIME": "runtime", "WEBSITE_HOSTNAME": "host_name", + "WEBSITE_SITE_NAME": "site_name", } ) def test_get_attach_metric_functions(self): @@ -482,7 +691,7 @@ def test_get_attach_metric_vm(self): _vm_data["subscriptionId"] = "sub123" _vm_data["osType"] = "linux" stats._vm_data = _vm_data - self._vm_retry = True + stats._vm_retry = True metadata_mock = mock.Mock() metadata_mock.return_value = True stats._get_azure_compute_metadata = metadata_mock @@ -506,7 +715,7 @@ def test_get_attach_metric_vm_no_os(self): _vm_data["subscriptionId"] = "sub123" _vm_data["osType"] = None stats._vm_data = _vm_data - self._vm_retry = True + stats._vm_retry = True metadata_mock = mock.Mock() metadata_mock.return_value = True stats._get_azure_compute_metadata = metadata_mock @@ -573,7 +782,7 @@ def test_get_azure_compute_metadata_not_vm_timeout(self): self.assertEqual(len(stats._vm_data), 0) self.assertFalse(stats._vm_retry) - def test_get_azure_compute_metadata__vm_retry(self): + def test_get_azure_compute_metadata_vm_retry(self): with mock.patch( 'requests.get', throw(requests.exceptions.RequestException) @@ -583,3 +792,57 @@ def test_get_azure_compute_metadata__vm_retry(self): self.assertFalse(vm_result) self.assertEqual(len(stats._vm_data), 0) self.assertTrue(stats._vm_retry) + + def test_shorten_host(self): + url = "https://fakehost-1.example.com/" + self.assertEqual(_shorten_host(url), "fakehost-1") + url = "https://fakehost-2.example.com/" + self.assertEqual(_shorten_host(url), "fakehost-2") + url = "http://www.fakehost-3.example.com/" + self.assertEqual(_shorten_host(url), "fakehost-3") + url = "http://www.fakehost.com/v2/track" + self.assertEqual(_shorten_host(url), "fakehost") + url = "https://www.fakehost0-4.com/" + self.assertEqual(_shorten_host(url), "fakehost0-4") + url = "https://www.fakehost-5.com" + self.assertEqual(_shorten_host(url), "fakehost-5") + url = "https://fakehost.com" + self.assertEqual(_shorten_host(url), "fakehost") + url = "http://fakehost-5/" + self.assertEqual(_shorten_host(url), "fakehost-5") + + def test_get_stats_connection_string_env(self): + cs = "InstrumentationKey=1aa11111-bbbb-1ccc-8ddd-eeeeffff3333;IngestionEndpoint=https://westus-0.in.applicationinsights.azure.com/" # noqa: E501 + with mock.patch.dict( + os.environ, { + "APPLICATION_INSIGHTS_STATS_CONNECTION_STRING": cs + } + ): + stats_cs = _get_stats_connection_string(_OPTIONS.endpoint) + self.assertEqual(stats_cs, cs) + + def test_get_stats_connection_string_non_eu(self): + with mock.patch.dict( + os.environ, { + "APPLICATION_INSIGHTS_STATS_CONNECTION_STRING": "" + } + ): + cs = "InstrumentationKey=1aa11111-bbbb-1ccc-8ddd-eeeeffff3333;IngestionEndpoint=https://westus-0.in.applicationinsights.azure.com/" # noqa: E501 + non_eu = Options( + connection_string=cs, + ) + stats_cs = _get_stats_connection_string(non_eu.endpoint) + self.assertEqual(stats_cs, _DEFAULT_NON_EU_STATS_CONNECTION_STRING) + + def test_get_stats_connection_string_eu(self): + with mock.patch.dict( + os.environ, { + "APPLICATION_INSIGHTS_STATS_CONNECTION_STRING": "" + } + ): + cs = "InstrumentationKey=1aa11111-bbbb-1ccc-8ddd-eeeeffff3333;IngestionEndpoint=https://northeurope-0.in.applicationinsights.azure.com/" # noqa: E501 + eu = Options( + connection_string=cs, + ) + stats_cs = _get_stats_connection_string(eu.endpoint) + self.assertEqual(stats_cs, _DEFAULT_EU_STATS_CONNECTION_STRING) diff --git a/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py b/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py index 43df5ec3e..c8bbd9185 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py @@ -20,9 +20,10 @@ import mock from opencensus.ext.azure import trace_exporter +from opencensus.ext.azure.common.transport import TransportStatusCode from opencensus.trace.link import Link -TEST_FOLDER = os.path.abspath('.test.exporter') +TEST_FOLDER = os.path.abspath('.test.trace.exporter') def setUpModule(): @@ -100,14 +101,14 @@ def test_emit_exception(self, mock_logger): exporter._stop() @mock.patch('opencensus.ext.azure.trace_exporter.AzureExporter.span_data_to_envelope') # noqa: E501 - def test_emit_failure(self, span_data_to_envelope_mock): + def test_emit_retry(self, span_data_to_envelope_mock): span_data_to_envelope_mock.return_value = ['bar'] exporter = trace_exporter.AzureExporter( instrumentation_key='12345678-1234-5678-abcd-12345678abcd', storage_path=os.path.join(TEST_FOLDER, self.id()), ) with mock.patch('opencensus.ext.azure.trace_exporter.AzureExporter._transmit') as transmit: # noqa: E501 - transmit.return_value = 10 + transmit.return_value = TransportStatusCode.RETRY exporter.emit(['foo']) self.assertEqual(len(os.listdir(exporter.storage.path)), 1) self.assertIsNone(exporter.storage.get()) @@ -122,7 +123,7 @@ def test_emit_success(self, span_data_to_envelope_mock): storage_path=os.path.join(TEST_FOLDER, self.id()), ) with mock.patch('opencensus.ext.azure.trace_exporter.AzureExporter._transmit') as transmit: # noqa: E501 - transmit.return_value = 0 + transmit.return_value = TransportStatusCode.SUCCESS exporter.emit([]) exporter.emit(['foo']) self.assertEqual(len(os.listdir(exporter.storage.path)), 0) @@ -497,7 +498,7 @@ def test_span_data_to_envelope(self): 'RequestData') # SpanKind.SERVER HTTP - with exceptions - envelopes = exporter.span_data_to_envelope(SpanData( + envelopes = list(exporter.span_data_to_envelope(SpanData( name='test', context=SpanContext( trace_id='6e0c63257de34c90bf9efcd03927272e', @@ -528,9 +529,10 @@ def test_span_data_to_envelope(self): same_process_as_parent_span=None, child_span_count=None, span_kind=SpanKind.SERVER, - )) + ))) + self.assertEqual(len(envelopes), 2) - envelope = next(envelopes) + envelope = envelopes[0] self.assertEqual( envelope.iKey, '12345678-1234-5678-abcd-12345678abcd') @@ -550,7 +552,7 @@ def test_span_data_to_envelope(self): envelope.data.baseType, 'ExceptionData') - envelope = next(envelopes) + envelope = envelopes[1] self.assertEqual( envelope.iKey, '12345678-1234-5678-abcd-12345678abcd') diff --git a/contrib/opencensus-ext-azure/tests/test_storage.py b/contrib/opencensus-ext-azure/tests/test_storage.py index 666f68647..62b12dea3 100644 --- a/contrib/opencensus-ext-azure/tests/test_storage.py +++ b/contrib/opencensus-ext-azure/tests/test_storage.py @@ -151,6 +151,16 @@ def test_check_storage_size_error(self): os_mock.return_value = True self.assertTrue(stor._check_storage_size()) + def test_check_storage_size_above_max_limit(self): + input = (1, 2, 3) + with LocalFileStorage(os.path.join(TEST_FOLDER, 'asd5'), 1) as stor: + with mock.patch('os.path.getsize') as os_mock: + os_mock.return_value = 52000000 + stor.put(input) + with mock.patch('os.path.islink') as os_mock: + os_mock.return_value = True + self.assertFalse(stor._check_storage_size()) + def test_maintenance_routine(self): with mock.patch('os.makedirs') as m: LocalFileStorage(os.path.join(TEST_FOLDER, 'baz')) diff --git a/contrib/opencensus-ext-azure/tests/test_transport_mixin.py b/contrib/opencensus-ext-azure/tests/test_transport_mixin.py index 1099b1ac9..f955adfc5 100644 --- a/contrib/opencensus-ext-azure/tests/test_transport_mixin.py +++ b/contrib/opencensus-ext-azure/tests/test_transport_mixin.py @@ -27,8 +27,12 @@ from opencensus.ext.azure.common.transport import ( _MAX_CONSECUTIVE_REDIRECTS, _MONITOR_OAUTH_SCOPE, + _REACHED_INGESTION_STATUS_CODES, TransportMixin, + TransportStatusCode, + _requests_map, ) +from opencensus.ext.azure.statsbeat import state TEST_FOLDER = os.path.abspath('.test.transport') @@ -56,18 +60,121 @@ def __init__(self, status_code, text, headers=None): # pylint: disable=W0212 class TestTransportMixin(unittest.TestCase): + def setUp(self): + # pylint: disable=protected-access + _requests_map.clear() + state._STATSBEAT_STATE = { + "INITIAL_FAILURE_COUNT": 0, + "INITIAL_SUCCESS": False, + "SHUTDOWN": False, + } + def test_check_stats_collection(self): mixin = TransportMixin() - mixin.options = Options() - mixin.options.enable_stats_metrics = True - self.assertTrue(mixin._check_stats_collection()) + with mock.patch.dict( + os.environ, { + "APPLICATIONINSIGHTS_STATSBEAT_DISABLED_ALL": "", + }): + self.assertTrue(mixin._check_stats_collection()) + with mock.patch.dict( + os.environ, { + "APPLICATIONINSIGHTS_STATSBEAT_DISABLED_ALL": "True", + }): + self.assertFalse(mixin._check_stats_collection()) mixin._is_stats = False self.assertTrue(mixin._check_stats_collection()) mixin._is_stats = True self.assertFalse(mixin._check_stats_collection()) - mixin.options.enable_stats_metrics = False + mixin._is_stats = False + state._STATSBEAT_STATE["SHUTDOWN"] = False + self.assertTrue(mixin._check_stats_collection()) + state._STATSBEAT_STATE["SHUTDOWN"] = True self.assertFalse(mixin._check_stats_collection()) + def test_initial_statsbeat_success(self): + self.assertFalse(state._STATSBEAT_STATE["INITIAL_SUCCESS"]) + mixin = TransportMixin() + mixin.options = Options() + mixin._is_stats = True + with mock.patch('requests.post') as post: + for code in _REACHED_INGESTION_STATUS_CODES: + post.return_value = MockResponse(code, 'unknown') + mixin._transmit([1]) + self.assertTrue(state._STATSBEAT_STATE["INITIAL_SUCCESS"]) + state._STATSBEAT_STATE["INITIAL_SUCCESS"] = False + + def test_exception_statsbeat_shutdown_increment(self): + mixin = TransportMixin() + mixin.options = Options() + mixin._is_stats = True + state._STATSBEAT_STATE["INITIAL_SUCCESS"] = False + state._STATSBEAT_STATE["INITIAL_FAILURE_COUNT"] = 0 + state._STATSBEAT_STATE["SHUTDOWN"] = False + with mock.patch.dict( + os.environ, { + "APPLICATIONINSIGHTS_STATSBEAT_DISABLED_ALL": "", + }): + with mock.patch('requests.post', throw(Exception)): + result = mixin._transmit([1, 2, 3]) + self.assertEqual(state._STATSBEAT_STATE["INITIAL_FAILURE_COUNT"], 1) # noqa: E501 + self.assertEqual(result, TransportStatusCode.DROP) + + def test_exception_statsbeat_shutdown(self): + mixin = TransportMixin() + mixin.options = Options() + mixin._is_stats = True + state._STATSBEAT_STATE["INITIAL_SUCCESS"] = False + state._STATSBEAT_STATE["INITIAL_FAILURE_COUNT"] = 2 + state._STATSBEAT_STATE["SHUTDOWN"] = False + with mock.patch.dict( + os.environ, { + "APPLICATIONINSIGHTS_STATSBEAT_DISABLED_ALL": "", + }): + with mock.patch('requests.post', throw(Exception)): + result = mixin._transmit([1, 2, 3]) + self.assertEqual(state._STATSBEAT_STATE["INITIAL_FAILURE_COUNT"], 3) # noqa: E501 + self.assertEqual(result, TransportStatusCode.STATSBEAT_SHUTDOWN) # noqa: E501 + + def test_status_code_statsbeat_shutdown_increment(self): + mixin = TransportMixin() + mixin.options = Options() + mixin._is_stats = True + state._STATSBEAT_STATE["INITIAL_SUCCESS"] = False + state._STATSBEAT_STATE["INITIAL_FAILURE_COUNT"] = 0 + state._STATSBEAT_STATE["SHUTDOWN"] = False + with mock.patch.dict( + os.environ, { + "APPLICATIONINSIGHTS_STATSBEAT_DISABLED_ALL": "", + }): + with mock.patch('requests.post') as post: + post.return_value = MockResponse(403, 'unknown') + mixin._transmit([1, 2, 3]) + self.assertEqual(state._STATSBEAT_STATE["INITIAL_FAILURE_COUNT"], 1) # noqa: E501 + self.assertFalse(state._STATSBEAT_STATE["INITIAL_SUCCESS"]) + with mock.patch('requests.post') as post: + post.return_value = MockResponse(200, 'unknown') + mixin._transmit([1, 2, 3]) + self.assertEqual(state._STATSBEAT_STATE["INITIAL_FAILURE_COUNT"], 1) # noqa: E501 + self.assertTrue(state._STATSBEAT_STATE["INITIAL_SUCCESS"]) + + def test_status_code_statsbeat_shutdown(self): + mixin = TransportMixin() + mixin.options = Options() + mixin._is_stats = True + state._STATSBEAT_STATE["INITIAL_SUCCESS"] = False + state._STATSBEAT_STATE["INITIAL_FAILURE_COUNT"] = 2 + state._STATSBEAT_STATE["SHUTDOWN"] = False + with mock.patch.dict( + os.environ, { + "APPLICATIONINSIGHTS_STATSBEAT_DISABLED_ALL": "", + }): + with mock.patch('requests.post') as post: + post.return_value = MockResponse(403, 'unknown') + result = mixin._transmit([1, 2, 3]) + self.assertEqual(state._STATSBEAT_STATE["INITIAL_FAILURE_COUNT"], 3) # noqa: E501 + self.assertFalse(state._STATSBEAT_STATE["INITIAL_SUCCESS"]) + self.assertEqual(result, TransportStatusCode.STATSBEAT_SHUTDOWN) # noqa: E501 + def test_transmission_nothing(self): mixin = TransportMixin() with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: @@ -76,7 +183,7 @@ def test_transmission_nothing(self): post.return_value = None mixin._transmit_from_storage() - def test_transmission_pre_timeout(self): + def test_transmission_timeout(self): mixin = TransportMixin() mixin.options = Options() with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: @@ -87,7 +194,18 @@ def test_transmission_pre_timeout(self): self.assertIsNone(mixin.storage.get()) self.assertEqual(len(os.listdir(mixin.storage.path)), 1) - def test_transmission_pre_req_exception(self): + def test_statsbeat_timeout(self): + mixin = TransportMixin() + mixin.options = Options() + with mock.patch('requests.post', throw(requests.Timeout)): + result = mixin._transmit([1, 2, 3]) + self.assertEqual(len(_requests_map), 3) + self.assertIsNotNone(_requests_map['duration']) + self.assertEqual(_requests_map['count'], 1) + self.assertEqual(_requests_map['exception']['Timeout'], 1) + self.assertEqual(result, TransportStatusCode.RETRY) + + def test_transmission_req_exception(self): mixin = TransportMixin() mixin.options = Options() with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: @@ -98,6 +216,17 @@ def test_transmission_pre_req_exception(self): self.assertIsNone(mixin.storage.get()) self.assertEqual(len(os.listdir(mixin.storage.path)), 1) + def test_statsbeat_req_exception(self): + mixin = TransportMixin() + mixin.options = Options() + with mock.patch('requests.post', throw(requests.RequestException)): + result = mixin._transmit([1, 2, 3]) + self.assertEqual(len(_requests_map), 3) + self.assertIsNotNone(_requests_map['duration']) + self.assertEqual(_requests_map['count'], 1) + self.assertEqual(_requests_map['exception']['RequestException'], 1) + self.assertEqual(result, TransportStatusCode.RETRY) + def test_transmission_cred_exception(self): mixin = TransportMixin() mixin.options = Options() @@ -109,6 +238,17 @@ def test_transmission_cred_exception(self): self.assertIsNone(mixin.storage.get()) self.assertEqual(len(os.listdir(mixin.storage.path)), 0) + def test_statsbeat_cred_exception(self): + mixin = TransportMixin() + mixin.options = Options() + with mock.patch('requests.post', throw(CredentialUnavailableError)): + result = mixin._transmit([1, 2, 3]) + self.assertEqual(len(_requests_map), 3) + self.assertIsNotNone(_requests_map['duration']) + self.assertEqual(_requests_map['exception']['CredentialUnavailableError'], 1) # noqa: E501 + self.assertEqual(_requests_map['count'], 1) + self.assertEqual(result, TransportStatusCode.DROP) + def test_transmission_client_exception(self): mixin = TransportMixin() mixin.options = Options() @@ -120,7 +260,18 @@ def test_transmission_client_exception(self): self.assertIsNone(mixin.storage.get()) self.assertEqual(len(os.listdir(mixin.storage.path)), 1) - def test_transmission_pre_exception(self): + def test_statsbeat_client_exception(self): + mixin = TransportMixin() + mixin.options = Options() + with mock.patch('requests.post', throw(ClientAuthenticationError)): + result = mixin._transmit([1, 2, 3]) + self.assertEqual(len(_requests_map), 3) + self.assertIsNotNone(_requests_map['duration']) + self.assertEqual(_requests_map['exception']['ClientAuthenticationError'], 1) # noqa: E501 + self.assertEqual(_requests_map['count'], 1) + self.assertEqual(result, TransportStatusCode.RETRY) + + def test_transmission_exception(self): mixin = TransportMixin() mixin.options = Options() with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: @@ -131,6 +282,17 @@ def test_transmission_pre_exception(self): self.assertIsNone(mixin.storage.get()) self.assertEqual(len(os.listdir(mixin.storage.path)), 0) + def test_statsbeat_exception(self): + mixin = TransportMixin() + mixin.options = Options() + with mock.patch('requests.post', throw(Exception)): + result = mixin._transmit([1, 2, 3]) + self.assertEqual(len(_requests_map), 3) + self.assertIsNotNone(_requests_map['duration']) + self.assertEqual(_requests_map['exception']['Exception'], 1) + self.assertEqual(_requests_map['count'], 1) + self.assertEqual(result, TransportStatusCode.DROP) + @mock.patch('requests.post', return_value=mock.Mock()) def test_transmission_lease_failure(self, requests_mock): requests_mock.return_value = MockResponse(200, 'unknown') @@ -146,7 +308,7 @@ def test_transmission_lease_failure(self, requests_mock): mixin._transmit_from_storage() self.assertTrue(mixin.storage.get()) - def test_transmission_exception(self): + def test_transmission_text_exception(self): mixin = TransportMixin() mixin.options = Options() with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: @@ -172,6 +334,18 @@ def test_transmission_200(self): self.assertIsNone(mixin.storage.get()) self.assertEqual(len(os.listdir(mixin.storage.path)), 0) + def test_statsbeat_200(self): + mixin = TransportMixin() + mixin.options = Options() + with mock.patch('requests.post') as post: + post.return_value = MockResponse(200, 'unknown') + result = mixin._transmit([1, 2, 3]) + self.assertEqual(len(_requests_map), 3) + self.assertIsNotNone(_requests_map['duration']) + self.assertEqual(_requests_map['success'], 1) + self.assertEqual(_requests_map['count'], 1) + self.assertEqual(result, TransportStatusCode.SUCCESS) + def test_transmission_auth(self): mixin = TransportMixin() mixin.options = Options() @@ -207,7 +381,7 @@ def test_transmission_auth(self): self.assertEqual(len(os.listdir(mixin.storage.path)), 0) credential.get_token.assert_called_once() - def test_transmission_206(self): + def test_transmission_206_invalid_data(self): mixin = TransportMixin() mixin.options = Options() with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: @@ -217,9 +391,20 @@ def test_transmission_206(self): post.return_value = MockResponse(206, 'unknown') mixin._transmit_from_storage() self.assertIsNone(mixin.storage.get()) - self.assertEqual(len(os.listdir(mixin.storage.path)), 1) + self.assertEqual(len(os.listdir(mixin.storage.path)), 0) - def test_transmission_206_500(self): + def test_statsbeat_206_invalid_data(self): + mixin = TransportMixin() + mixin.options = Options() + with mock.patch('requests.post') as post: + post.return_value = MockResponse(206, 'unknown') + result = mixin._transmit([1, 2, 3]) + self.assertEqual(len(_requests_map), 3) + self.assertIsNotNone(_requests_map['duration']) + self.assertEqual(_requests_map['count'], 1) + self.assertEqual(result, TransportStatusCode.DROP) + + def test_transmission_206_partial_retry(self): mixin = TransportMixin() mixin.options = Options() with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: @@ -232,12 +417,12 @@ def test_transmission_206_500(self): 'errors': [ { 'index': 0, - 'statusCode': 400, + 'statusCode': 400, # dropped 'message': '', }, { 'index': 2, - 'statusCode': 500, + 'statusCode': 500, # retry 'message': 'Internal Server Error', }, ], @@ -246,6 +431,37 @@ def test_transmission_206_500(self): self.assertEqual(len(os.listdir(mixin.storage.path)), 1) self.assertEqual(mixin.storage.get().get(), (3,)) + def test_statsbeat_206_partial_retry(self): + mixin = TransportMixin() + mixin.options = Options() + storage_mock = mock.Mock() + mixin.storage = storage_mock + with mock.patch('requests.post') as post: + post.return_value = MockResponse(206, json.dumps({ + 'itemsReceived': 5, + 'itemsAccepted': 3, + 'errors': [ + { + 'index': 0, + 'statusCode': 400, # dropped + 'message': '', + }, + { + 'index': 2, + 'statusCode': 500, # retry + 'message': 'Internal Server Error', + }, + ], + })) + result = mixin._transmit([1, 2, 3]) + # We do not record any network statsbeat for 206 status code + self.assertEqual(len(_requests_map), 2) + self.assertIsNotNone(_requests_map['duration']) + self.assertIsNone(_requests_map.get('retry')) + self.assertEqual(_requests_map['count'], 1) + self.assertEqual(result, TransportStatusCode.DROP) + storage_mock.put.assert_called_once() + def test_transmission_206_no_retry(self): mixin = TransportMixin() mixin.options = Options() @@ -259,7 +475,7 @@ def test_transmission_206_no_retry(self): 'errors': [ { 'index': 0, - 'statusCode': 400, + 'statusCode': 400, # dropped 'message': '', }, ], @@ -267,6 +483,30 @@ def test_transmission_206_no_retry(self): mixin._transmit_from_storage() self.assertEqual(len(os.listdir(mixin.storage.path)), 0) + def test_statsbeat_206_no_retry(self): + mixin = TransportMixin() + mixin.options = Options() + storage_mock = mock.Mock() + mixin.storage = storage_mock + with mock.patch('requests.post') as post: + post.return_value = MockResponse(206, json.dumps({ + 'itemsReceived': 3, + 'itemsAccepted': 2, + 'errors': [ + { + 'index': 0, + 'statusCode': 400, # dropped + 'message': '', + }, + ], + })) + result = mixin._transmit([1, 2, 3]) + self.assertEqual(len(_requests_map), 2) + self.assertIsNotNone(_requests_map['duration']) + self.assertEqual(_requests_map['count'], 1) + self.assertEqual(result, TransportStatusCode.DROP) + storage_mock.put.assert_not_called() + def test_transmission_206_bogus(self): mixin = TransportMixin() mixin.options = Options() @@ -288,17 +528,54 @@ def test_transmission_206_bogus(self): self.assertIsNone(mixin.storage.get()) self.assertEqual(len(os.listdir(mixin.storage.path)), 0) - def test_transmission_401(self): + def test_statsbeat_206_bogus(self): + mixin = TransportMixin() + mixin.options = Options() + storage_mock = mock.Mock() + mixin.storage = storage_mock + with mock.patch('requests.post') as post: + post.return_value = MockResponse(206, json.dumps({ + 'itemsReceived': 5, + 'itemsAccepted': 3, + 'errors': [ + { + 'foo': 0, + 'bar': 1, + }, + ], + })) + result = mixin._transmit([1, 2, 3]) + self.assertEqual(len(_requests_map), 3) + self.assertIsNotNone(_requests_map['duration']) + self.assertEqual(_requests_map['count'], 1) + self.assertEqual(_requests_map['exception']['KeyError'], 1) + self.assertEqual(result, TransportStatusCode.DROP) + storage_mock.put.assert_not_called() + + def test_transmission_429(self): mixin = TransportMixin() mixin.options = Options() with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: mixin.storage = stor mixin.storage.put([1, 2, 3]) with mock.patch('requests.post') as post: - post.return_value = MockResponse(401, '{}') + post.return_value = MockResponse(429, 'unknown') mixin._transmit_from_storage() + self.assertIsNone(mixin.storage.get()) self.assertEqual(len(os.listdir(mixin.storage.path)), 1) + def test_statsbeat_429(self): + mixin = TransportMixin() + mixin.options = Options() + with mock.patch('requests.post') as post: + post.return_value = MockResponse(429, 'unknown') + result = mixin._transmit([1, 2, 3]) + self.assertEqual(len(_requests_map), 3) + self.assertIsNotNone(_requests_map['duration']) + self.assertEqual(_requests_map['retry'][429], 1) + self.assertEqual(_requests_map['count'], 1) + self.assertEqual(result, TransportStatusCode.RETRY) + def test_transmission_500(self): mixin = TransportMixin() mixin.options = Options() @@ -306,21 +583,140 @@ def test_transmission_500(self): mixin.storage = stor mixin.storage.put([1, 2, 3]) with mock.patch('requests.post') as post: - post.return_value = MockResponse(500, '{}') + post.return_value = MockResponse(500, 'unknown') mixin._transmit_from_storage() self.assertIsNone(mixin.storage.get()) self.assertEqual(len(os.listdir(mixin.storage.path)), 1) - def test_transmission_400(self): + def test_statsbeat_500(self): + mixin = TransportMixin() + mixin.options = Options() + with mock.patch('requests.post') as post: + post.return_value = MockResponse(500, 'unknown') + result = mixin._transmit([1, 2, 3]) + self.assertEqual(len(_requests_map), 3) + self.assertIsNotNone(_requests_map['duration']) + self.assertEqual(_requests_map['retry'][500], 1) + self.assertEqual(_requests_map['count'], 1) + self.assertEqual(result, TransportStatusCode.RETRY) + + def test_transmission_502(self): mixin = TransportMixin() mixin.options = Options() with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: mixin.storage = stor mixin.storage.put([1, 2, 3]) with mock.patch('requests.post') as post: - post.return_value = MockResponse(400, '{}') + post.return_value = MockResponse(502, 'unknown') mixin._transmit_from_storage() - self.assertEqual(len(os.listdir(mixin.storage.path)), 0) + self.assertIsNone(mixin.storage.get()) + self.assertEqual(len(os.listdir(mixin.storage.path)), 1) + + def test_statsbeat_502(self): + mixin = TransportMixin() + mixin.options = Options() + with mock.patch('requests.post') as post: + post.return_value = MockResponse(502, 'unknown') + result = mixin._transmit([1, 2, 3]) + self.assertEqual(len(_requests_map), 3) + self.assertIsNotNone(_requests_map['duration']) + self.assertEqual(_requests_map['retry'][502], 1) + self.assertEqual(_requests_map['count'], 1) + self.assertEqual(result, TransportStatusCode.RETRY) + + def test_transmission_503(self): + mixin = TransportMixin() + mixin.options = Options() + with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: + mixin.storage = stor + mixin.storage.put([1, 2, 3]) + with mock.patch('requests.post') as post: + post.return_value = MockResponse(503, 'unknown') + mixin._transmit_from_storage() + self.assertIsNone(mixin.storage.get()) + self.assertEqual(len(os.listdir(mixin.storage.path)), 1) + + def test_statsbeat_503(self): + mixin = TransportMixin() + mixin.options = Options() + with mock.patch('requests.post') as post: + post.return_value = MockResponse(503, 'unknown') + result = mixin._transmit([1, 2, 3]) + self.assertEqual(len(_requests_map), 3) + self.assertIsNotNone(_requests_map['duration']) + self.assertEqual(_requests_map['retry'][503], 1) + self.assertEqual(_requests_map['count'], 1) + self.assertEqual(result, TransportStatusCode.RETRY) + + def test_transmission_504(self): + mixin = TransportMixin() + mixin.options = Options() + with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: + mixin.storage = stor + mixin.storage.put([1, 2, 3]) + with mock.patch('requests.post') as post: + post.return_value = MockResponse(504, 'unknown') + mixin._transmit_from_storage() + self.assertIsNone(mixin.storage.get()) + self.assertEqual(len(os.listdir(mixin.storage.path)), 1) + + def test_statsbeat_504(self): + mixin = TransportMixin() + mixin.options = Options() + with mock.patch('requests.post') as post: + post.return_value = MockResponse(504, 'unknown') + result = mixin._transmit([1, 2, 3]) + self.assertEqual(len(_requests_map), 3) + self.assertIsNotNone(_requests_map['duration']) + self.assertEqual(_requests_map['retry'][504], 1) + self.assertEqual(_requests_map['count'], 1) + self.assertEqual(result, TransportStatusCode.RETRY) + + def test_transmission_401(self): + mixin = TransportMixin() + mixin.options = Options() + with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: + mixin.storage = stor + mixin.storage.put([1, 2, 3]) + with mock.patch('requests.post') as post: + post.return_value = MockResponse(401, '{}') + mixin._transmit_from_storage() + self.assertEqual(len(os.listdir(mixin.storage.path)), 1) + + def test_statsbeat_401(self): + mixin = TransportMixin() + mixin.options = Options() + with mock.patch('requests.post') as post: + post.return_value = MockResponse(401, 'unknown') + result = mixin._transmit([1, 2, 3]) + self.assertEqual(len(_requests_map), 3) + self.assertIsNotNone(_requests_map['duration']) + self.assertEqual(_requests_map['retry'][401], 1) + self.assertEqual(_requests_map['count'], 1) + self.assertEqual(result, TransportStatusCode.RETRY) + + def test_transmission_403(self): + mixin = TransportMixin() + mixin.options = Options() + with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: + mixin.storage = stor + mixin.storage.put([1, 2, 3]) + with mock.patch('requests.post') as post: + post.return_value = MockResponse(403, '{}') + mixin._transmit_from_storage() + self.assertEqual(len(os.listdir(mixin.storage.path)), 1) + + def test_statsbeat_403(self): + mixin = TransportMixin() + mixin.options = Options() + with mock.patch('requests.post') as post: + post.return_value = MockResponse(403, 'unknown') + result = mixin._transmit([1, 2, 3]) + self.assertEqual(len(_requests_map), 3) + self.assertIsNotNone(_requests_map['duration']) + self.assertEqual(_requests_map['retry'][403], 1) + self.assertEqual(_requests_map['count'], 1) + self.assertEqual(result, TransportStatusCode.RETRY) def test_transmission_307(self): mixin = TransportMixin() @@ -345,6 +741,43 @@ def test_transmission_307_circular_reference(self): with mock.patch('requests.post') as post: post.return_value = MockResponse(307, '{}', {"location": "https://example.com"}) # noqa: E501 result = mixin._transmit([1, 2, 3]) - self.assertEqual(result, -307) + self.assertEqual(result, TransportStatusCode.DROP) self.assertEqual(post.call_count, _MAX_CONSECUTIVE_REDIRECTS) self.assertEqual(mixin.options.endpoint, "https://example.com") + + def test_statsbeat_307(self): + mixin = TransportMixin() + mixin.options = Options() + mixin._consecutive_redirects = 0 + mixin.options.endpoint = "test.endpoint" + with mock.patch('requests.post') as post: + post.return_value = MockResponse(307, '{}', {"location": "https://example.com"}) # noqa: E501 + result = mixin._transmit([1, 2, 3]) + self.assertEqual(len(_requests_map), 3) + self.assertIsNotNone(_requests_map['duration']) + self.assertEqual(_requests_map['exception']['Circular Redirect'], 1) # noqa: E501 + self.assertEqual(_requests_map['count'], 10) + self.assertEqual(result, TransportStatusCode.DROP) + + def test_transmission_439(self): + mixin = TransportMixin() + mixin.options = Options() + with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: + mixin.storage = stor + mixin.storage.put([1, 2, 3]) + with mock.patch('requests.post') as post: + post.return_value = MockResponse(439, '{}') + mixin._transmit_from_storage() + self.assertEqual(len(os.listdir(mixin.storage.path)), 0) + + def test_statsbeat_439(self): + mixin = TransportMixin() + mixin.options = Options() + with mock.patch('requests.post') as post: + post.return_value = MockResponse(439, 'unknown') + result = mixin._transmit([1, 2, 3]) + self.assertEqual(len(_requests_map), 3) + self.assertIsNotNone(_requests_map['duration']) + self.assertEqual(_requests_map['throttle'][439], 1) + self.assertEqual(_requests_map['count'], 1) + self.assertEqual(result, TransportStatusCode.DROP) diff --git a/contrib/opencensus-ext-django/CHANGELOG.md b/contrib/opencensus-ext-django/CHANGELOG.md index 1228e692a..54ff9668e 100644 --- a/contrib/opencensus-ext-django/CHANGELOG.md +++ b/contrib/opencensus-ext-django/CHANGELOG.md @@ -2,6 +2,12 @@ ## Unreleased +## 0.8.0 +Released 2022-10-17 + +- Fixed support for Django 4.1 +- ([#1159](https://github.com/census-instrumentation/opencensus-python/pull/1159)) + ## 0.7.5 Released 2021-05-13 diff --git a/contrib/opencensus-ext-django/opencensus/ext/django/middleware.py b/contrib/opencensus-ext-django/opencensus/ext/django/middleware.py index 80436f1e9..e9c116983 100644 --- a/contrib/opencensus-ext-django/opencensus/ext/django/middleware.py +++ b/contrib/opencensus-ext-django/opencensus/ext/django/middleware.py @@ -146,8 +146,8 @@ def _trace_db_call(execute, sql, params, many, context): class OpencensusMiddleware(MiddlewareMixin): """Saves the request in thread local""" - def __init__(self, get_response=None): - self.get_response = get_response + def __init__(self, get_response): + super(OpencensusMiddleware, self).__init__(get_response) settings = getattr(django.conf.settings, 'OPENCENSUS', {}) settings = settings.get('TRACE', {}) diff --git a/contrib/opencensus-ext-django/tests/test_django_db_middleware.py b/contrib/opencensus-ext-django/tests/test_django_db_middleware.py index 18bf385c9..4c6969ae4 100644 --- a/contrib/opencensus-ext-django/tests/test_django_db_middleware.py +++ b/contrib/opencensus-ext-django/tests/test_django_db_middleware.py @@ -18,11 +18,16 @@ import django import mock import pytest +from django.http import HttpResponse from django.test.utils import teardown_test_environment from opencensus.trace import execution_context +def get_response(request): + return HttpResponse() + + class TestOpencensusDatabaseMiddleware(unittest.TestCase): def setUp(self): from django.conf import settings as django_settings @@ -50,7 +55,7 @@ def test_process_request(self): mock_execute = mock.Mock() mock_execute.return_value = "Mock result" - middleware.OpencensusMiddleware() + middleware.OpencensusMiddleware(get_response) patch_no_tracer = mock.patch( 'opencensus.ext.django.middleware._get_current_tracer', diff --git a/contrib/opencensus-ext-django/tests/test_django_middleware.py b/contrib/opencensus-ext-django/tests/test_django_middleware.py index 522ea458c..e502e0be0 100644 --- a/contrib/opencensus-ext-django/tests/test_django_middleware.py +++ b/contrib/opencensus-ext-django/tests/test_django_middleware.py @@ -17,6 +17,7 @@ import unittest import mock +from django.http import HttpResponse from django.test import RequestFactory from django.test.utils import teardown_test_environment @@ -27,6 +28,10 @@ from opencensus.trace.propagation import trace_context_http_header_format +def get_response(request): + return HttpResponse() + + class TestOpencensusMiddleware(unittest.TestCase): def setUp(self): @@ -44,7 +49,7 @@ def tearDown(self): def test_constructor_default(self): from opencensus.ext.django import middleware - middleware = middleware.OpencensusMiddleware() + middleware = middleware.OpencensusMiddleware(get_response) assert isinstance(middleware.sampler, samplers.ProbabilitySampler) assert isinstance(middleware.exporter, print_exporter.PrintExporter) @@ -69,7 +74,7 @@ def test_configuration(self): settings) with patch_settings: - middleware = middleware.OpencensusMiddleware() + middleware = middleware.OpencensusMiddleware(get_response) assert isinstance(middleware.sampler, samplers.AlwaysOnSampler) assert isinstance(middleware.exporter, print_exporter.PrintExporter) @@ -100,7 +105,7 @@ def test_process_request(self): settings) with patch_settings: - middleware_obj = middleware.OpencensusMiddleware() + middleware_obj = middleware.OpencensusMiddleware(get_response) # test process_request middleware_obj.process_request(django_request) @@ -148,7 +153,7 @@ def test_excludelist_path(self): settings) with patch_settings: - middleware_obj = middleware.OpencensusMiddleware() + middleware_obj = middleware.OpencensusMiddleware(get_response) django_request = RequestFactory().get('/test_excludelist_path') disabled = utils.disable_tracing_url(django_request.path, @@ -204,7 +209,7 @@ def test_process_response(self): settings) with patch_settings: - middleware_obj = middleware.OpencensusMiddleware() + middleware_obj = middleware.OpencensusMiddleware(get_response) middleware_obj.process_request(django_request) tracer = middleware._get_current_tracer() @@ -259,7 +264,7 @@ def test_process_response_unfinished_child_span(self): settings) with patch_settings: - middleware_obj = middleware.OpencensusMiddleware() + middleware_obj = middleware.OpencensusMiddleware(get_response) middleware_obj.process_request(django_request) tracer = middleware._get_current_tracer() @@ -316,7 +321,7 @@ def test_process_exception(self): settings) with patch_settings: - middleware_obj = middleware.OpencensusMiddleware() + middleware_obj = middleware.OpencensusMiddleware(get_response) tb = None try: diff --git a/contrib/opencensus-ext-django/version.py b/contrib/opencensus-ext-django/version.py index dffc606db..671fc3d04 100644 --- a/contrib/opencensus-ext-django/version.py +++ b/contrib/opencensus-ext-django/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.8.dev0' +__version__ = '0.9.dev0' diff --git a/contrib/opencensus-ext-fastapi/CHANGELOG.md b/contrib/opencensus-ext-fastapi/CHANGELOG.md new file mode 100644 index 000000000..f4c2570de --- /dev/null +++ b/contrib/opencensus-ext-fastapi/CHANGELOG.md @@ -0,0 +1,10 @@ +# Changelog + +## Unreleased + +## 0.1.0 + +Released 2023-03-10 + +- Initial version +([#1124](https://github.com/census-instrumentation/opencensus-python/pull/1124)) diff --git a/contrib/opencensus-ext-fastapi/README.rst b/contrib/opencensus-ext-fastapi/README.rst new file mode 100644 index 000000000..7946d56ed --- /dev/null +++ b/contrib/opencensus-ext-fastapi/README.rst @@ -0,0 +1,50 @@ +OpenCensus FastAPI Integration +============================================================================ + +|pypi| + +.. |pypi| image:: https://badge.fury.io/py/opencensus-ext-fastapi.svg + :target: https://pypi.org/project/opencensus-ext-fastapi/ + +Installation +------------ + +:: + + pip install opencensus-ext-fastapi + +Usage +----- + +.. code:: python + + from fastapi import FastAPI + from opencensus.ext.fastapi.fastapi_middleware import FastAPIMiddleware + + app = FastAPI(__name__) + app.add_middleware(FastAPIMiddleware) + + @app.get('/') + def hello(): + return 'Hello World!' + +Additional configuration can be provided, please read +`Customization `_ +for a complete reference. + +.. code:: python + + app.add_middleware( + FastAPIMiddleware, + excludelist_paths=["paths"], + excludelist_hostnames=["hostnames"], + sampler=sampler, + exporter=exporter, + propagator=propagator, + ) + + +References +---------- + +* `OpenCensus Project `_ diff --git a/contrib/opencensus-ext-fastapi/opencensus/__init__.py b/contrib/opencensus-ext-fastapi/opencensus/__init__.py new file mode 100644 index 000000000..69e3be50d --- /dev/null +++ b/contrib/opencensus-ext-fastapi/opencensus/__init__.py @@ -0,0 +1 @@ +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/contrib/opencensus-ext-fastapi/opencensus/ext/__init__.py b/contrib/opencensus-ext-fastapi/opencensus/ext/__init__.py new file mode 100644 index 000000000..69e3be50d --- /dev/null +++ b/contrib/opencensus-ext-fastapi/opencensus/ext/__init__.py @@ -0,0 +1 @@ +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/contrib/opencensus-ext-fastapi/opencensus/ext/fastapi/__init__.py b/contrib/opencensus-ext-fastapi/opencensus/ext/fastapi/__init__.py new file mode 100644 index 000000000..69e3be50d --- /dev/null +++ b/contrib/opencensus-ext-fastapi/opencensus/ext/fastapi/__init__.py @@ -0,0 +1 @@ +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/contrib/opencensus-ext-fastapi/opencensus/ext/fastapi/fastapi_middleware.py b/contrib/opencensus-ext-fastapi/opencensus/ext/fastapi/fastapi_middleware.py new file mode 100644 index 000000000..6dfd1a812 --- /dev/null +++ b/contrib/opencensus-ext-fastapi/opencensus/ext/fastapi/fastapi_middleware.py @@ -0,0 +1,182 @@ +# Copyright 2022, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import traceback +from typing import Union + +from starlette.middleware.base import ( + BaseHTTPMiddleware, + RequestResponseEndpoint, +) +from starlette.requests import Request +from starlette.responses import Response +from starlette.status import HTTP_500_INTERNAL_SERVER_ERROR +from starlette.types import ASGIApp + +from opencensus.trace import ( + attributes_helper, + execution_context, + integrations, + print_exporter, + samplers, +) +from opencensus.trace import span as span_module +from opencensus.trace import tracer as tracer_module +from opencensus.trace import utils +from opencensus.trace.blank_span import BlankSpan +from opencensus.trace.propagation import trace_context_http_header_format +from opencensus.trace.span import Span + +HTTP_HOST = attributes_helper.COMMON_ATTRIBUTES["HTTP_HOST"] +HTTP_METHOD = attributes_helper.COMMON_ATTRIBUTES["HTTP_METHOD"] +HTTP_PATH = attributes_helper.COMMON_ATTRIBUTES["HTTP_PATH"] +HTTP_ROUTE = attributes_helper.COMMON_ATTRIBUTES["HTTP_ROUTE"] +HTTP_URL = attributes_helper.COMMON_ATTRIBUTES["HTTP_URL"] +HTTP_STATUS_CODE = attributes_helper.COMMON_ATTRIBUTES["HTTP_STATUS_CODE"] +ERROR_MESSAGE = attributes_helper.COMMON_ATTRIBUTES['ERROR_MESSAGE'] +ERROR_NAME = attributes_helper.COMMON_ATTRIBUTES['ERROR_NAME'] +STACKTRACE = attributes_helper.COMMON_ATTRIBUTES["STACKTRACE"] + +module_logger = logging.getLogger(__name__) + + +class FastAPIMiddleware(BaseHTTPMiddleware): + """FastAPI middleware to automatically trace requests. + + :type app: :class: `~fastapi.FastAPI` + :param app: A fastapi application. + + :type excludelist_paths: list + :param excludelist_paths: Paths that do not trace. + + :type excludelist_hostnames: list + :param excludelist_hostnames: Hostnames that do not trace. + + :type sampler: :class:`~opencensus.trace.samplers.base.Sampler` + :param sampler: A sampler. It should extend from the base + :class:`.Sampler` type and implement + :meth:`.Sampler.should_sample`. Defaults to + :class:`.ProbabilitySampler`. Other options include + :class:`.AlwaysOnSampler` and :class:`.AlwaysOffSampler`. + + :type exporter: :class:`~opencensus.trace.base_exporter.exporter` + :param exporter: An exporter. Default to + :class:`.PrintExporter`. The rest options are + :class:`.FileExporter`, :class:`.LoggingExporter` and + trace exporter extensions. + + :type propagator: :class: 'object' + :param propagator: A propagator. Default to + :class:`.TraceContextPropagator`. The rest options + are :class:`.BinaryFormatPropagator`, + :class:`.GoogleCloudFormatPropagator` and + :class:`.TextFormatPropagator`. + """ + + def __init__( + self, + app: ASGIApp, + excludelist_paths=None, + excludelist_hostnames=None, + sampler=None, + exporter=None, + propagator=None, + ) -> None: + super().__init__(app) + self.excludelist_paths = excludelist_paths + self.excludelist_hostnames = excludelist_hostnames + self.sampler = sampler or samplers.AlwaysOnSampler() + self.exporter = exporter or print_exporter.PrintExporter() + self.propagator = ( + propagator or + trace_context_http_header_format.TraceContextPropagator() + ) + + # pylint: disable=protected-access + integrations.add_integration(integrations._Integrations.FASTAPI) + + def _prepare_tracer(self, request: Request) -> tracer_module.Tracer: + span_context = self.propagator.from_headers(request.headers) + tracer = tracer_module.Tracer( + span_context=span_context, + sampler=self.sampler, + exporter=self.exporter, + propagator=self.propagator, + ) + return tracer + + def _before_request(self, span: Union[Span, BlankSpan], request: Request): + span.span_kind = span_module.SpanKind.SERVER + span.name = "[{}]{}".format(request.method, request.url) + span.add_attribute(HTTP_HOST, request.url.hostname) + span.add_attribute(HTTP_METHOD, request.method) + span.add_attribute(HTTP_PATH, request.url.path) + span.add_attribute(HTTP_URL, str(request.url)) + span.add_attribute(HTTP_ROUTE, request.url.path) + execution_context.set_opencensus_attr( + "excludelist_hostnames", self.excludelist_hostnames + ) + + def _after_request(self, span: Union[Span, BlankSpan], response: Response): + span.add_attribute(HTTP_STATUS_CODE, response.status_code) + + def _handle_exception(self, + span: Union[Span, BlankSpan], exception: Exception): + span.add_attribute(ERROR_NAME, exception.__class__.__name__) + span.add_attribute(ERROR_MESSAGE, str(exception)) + span.add_attribute( + STACKTRACE, + "\n".join(traceback.format_tb(exception.__traceback__))) + span.add_attribute(HTTP_STATUS_CODE, HTTP_500_INTERNAL_SERVER_ERROR) + + async def dispatch( + self, request: Request, call_next: RequestResponseEndpoint + ) -> Response: + + # Do not trace if the url is in the exclude list + if utils.disable_tracing_url(str(request.url), self.excludelist_paths): + return await call_next(request) + + try: + tracer = self._prepare_tracer(request) + span = tracer.start_span() + except Exception: # pragma: NO COVER + module_logger.error("Failed to trace request", exc_info=True) + return await call_next(request) + + try: + self._before_request(span, request) + except Exception: # pragma: NO COVER + module_logger.error("Failed to trace request", exc_info=True) + + try: + response = await call_next(request) + except Exception as err: # pragma: NO COVER + try: + self._handle_exception(span, err) + tracer.end_span() + tracer.finish() + except Exception: # pragma: NO COVER + module_logger.error("Failed to trace response", exc_info=True) + raise err + + try: + self._after_request(span, response) + tracer.end_span() + tracer.finish() + except Exception: # pragma: NO COVER + module_logger.error("Failed to trace response", exc_info=True) + + return response diff --git a/contrib/opencensus-ext-fastapi/setup.cfg b/contrib/opencensus-ext-fastapi/setup.cfg new file mode 100644 index 000000000..2a9acf13d --- /dev/null +++ b/contrib/opencensus-ext-fastapi/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal = 1 diff --git a/contrib/opencensus-ext-fastapi/setup.py b/contrib/opencensus-ext-fastapi/setup.py new file mode 100644 index 000000000..f4ade731e --- /dev/null +++ b/contrib/opencensus-ext-fastapi/setup.py @@ -0,0 +1,49 @@ +# Copyright 2022, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from setuptools import find_packages, setup + +from version import __version__ + +setup( + name='opencensus-ext-fastapi', + version=__version__, # noqa + author='OpenCensus Authors', + author_email='census-developers@googlegroups.com', + classifiers=[ + 'Intended Audience :: Developers', + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: Apache Software License', + 'Programming Language :: Python', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + ], + description='OpenCensus FastAPI Integration', + include_package_data=True, + long_description=open('README.rst').read(), + install_requires=[ + 'fastapi >= 0.75.2', + 'opencensus >= 0.9.dev0, < 1.0.0', + ], + extras_require={}, + license='Apache-2.0', + packages=find_packages(exclude=('tests',)), + namespace_packages=[], + url='https://github.com/census-instrumentation/opencensus-python/tree/master/contrib/opencensus-ext-fastapi', # noqa: E501 + zip_safe=False, +) diff --git a/contrib/opencensus-ext-fastapi/tests/test_fastapi_middleware.py b/contrib/opencensus-ext-fastapi/tests/test_fastapi_middleware.py new file mode 100644 index 000000000..e2d8f113f --- /dev/null +++ b/contrib/opencensus-ext-fastapi/tests/test_fastapi_middleware.py @@ -0,0 +1,197 @@ +# Copyright 2022, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +import traceback +import unittest +from unittest.mock import ANY + +import mock +from fastapi import FastAPI +from starlette.testclient import TestClient + +from opencensus.ext.fastapi.fastapi_middleware import FastAPIMiddleware +from opencensus.trace import print_exporter, samplers +from opencensus.trace import span as span_module +from opencensus.trace import tracer as tracer_module +from opencensus.trace.propagation import trace_context_http_header_format + + +class FastAPITestException(Exception): + pass + + +class TestFastAPIMiddleware(unittest.TestCase): + + def tearDown(self) -> None: + from opencensus.trace import execution_context + execution_context.clear() + + return super().tearDown() + + def create_app(self): + app = FastAPI() + + @app.get('/') + def index(): + return 'test fastapi trace' # pragma: NO COVER + + @app.get('/wiki/{entry}') + def wiki(entry): + return 'test fastapi trace' # pragma: NO COVER + + @app.get('/health') + def health_check(): + return 'test health check' # pragma: NO COVER + + @app.get('/error') + def error(): + raise FastAPITestException('test error') + + return app + + def test_constructor_default(self): + app = self.create_app() + middleware = FastAPIMiddleware(app) + + self.assertIs(middleware.app, app) + self.assertIsNone(middleware.excludelist_paths) + self.assertIsNone(middleware.excludelist_hostnames) + self.assertIsInstance(middleware.sampler, samplers.AlwaysOnSampler) + self.assertIsInstance(middleware.exporter, + print_exporter.PrintExporter) + self.assertIsInstance( + middleware.propagator, + trace_context_http_header_format.TraceContextPropagator) + + def test_constructor_explicit(self): + excludelist_paths = mock.Mock() + excludelist_hostnames = mock.Mock() + sampler = mock.Mock() + exporter = mock.Mock() + propagator = mock.Mock() + + app = self.create_app() + middleware = FastAPIMiddleware( + app=app, + excludelist_paths=excludelist_paths, + excludelist_hostnames=excludelist_hostnames, + sampler=sampler, + exporter=exporter, + propagator=propagator) + + self.assertEqual(middleware.app, app) + self.assertEqual(middleware.excludelist_paths, excludelist_paths) + self.assertEqual( + middleware.excludelist_hostnames, excludelist_hostnames) + self.assertEqual(middleware.sampler, sampler) + self.assertEqual(middleware.exporter, exporter) + self.assertEqual(middleware.propagator, propagator) + + @mock.patch.object(tracer_module.Tracer, "finish") + @mock.patch.object(tracer_module.Tracer, "end_span") + @mock.patch.object(tracer_module.Tracer, "start_span") + def test_request(self, mock_m1, mock_m2, mock_m3): + app = self.create_app() + app.add_middleware( + FastAPIMiddleware, sampler=samplers.AlwaysOnSampler()) + + test_client = TestClient(app) + test_client.get("/wiki/Rabbit") + + mock_span = mock_m1.return_value + self.assertEqual(mock_span.add_attribute.call_count, 6) + mock_span.add_attribute.assert_has_calls([ + mock.call("http.host", "testserver"), + mock.call("http.method", "GET"), + mock.call("http.path", "/wiki/Rabbit"), + mock.call("http.url", "http://testserver/wiki/Rabbit"), + mock.call("http.route", "/wiki/Rabbit"), + mock.call("http.status_code", 200) + ]) + mock_m2.assert_called_once() + mock_m3.assert_called_once() + + self.assertEqual( + mock_span.span_kind, + span_module.SpanKind.SERVER) + self.assertEqual( + mock_span.name, + "[{}]{}".format("GET", "http://testserver/wiki/Rabbit")) + + @mock.patch.object(FastAPIMiddleware, "_prepare_tracer") + def test_request_excludelist(self, mock_m): + app = self.create_app() + app.add_middleware( + FastAPIMiddleware, + excludelist_paths=["health"], + sampler=samplers.AlwaysOnSampler()) + + test_client = TestClient(app) + test_client.get("/health") + + mock_m.assert_not_called() + + @mock.patch.object(tracer_module.Tracer, "finish") + @mock.patch.object(tracer_module.Tracer, "end_span") + @mock.patch.object(tracer_module.Tracer, "start_span") + def test_request_exception(self, mock_m1, mock_m2, mock_m3): + app = self.create_app() + app.add_middleware(FastAPIMiddleware) + + test_client = TestClient(app) + + with self.assertRaises(FastAPITestException): + test_client.get("/error") + + mock_span = mock_m1.return_value + self.assertEqual(mock_span.add_attribute.call_count, 9) + mock_span.add_attribute.assert_has_calls([ + mock.call("http.host", "testserver"), + mock.call("http.method", "GET"), + mock.call("http.path", "/error"), + mock.call("http.url", "http://testserver/error"), + mock.call("http.route", "/error"), + mock.call("error.name", "FastAPITestException"), + mock.call("error.message", "test error"), + mock.call("stacktrace", ANY), + mock.call("http.status_code", 500) + ]) + mock_m2.assert_called_once() + mock_m3.assert_called_once() + + def test_request_exception_stacktrace(self): + tb = None + try: + raise RuntimeError("bork bork bork") + except Exception as exc: + test_exception = exc + if hasattr(exc, "__traceback__"): + tb = exc.__traceback__ + else: + _, _, tb = sys.exc_info() + + app = self.create_app() + middleware = FastAPIMiddleware(app) + + mock_span = mock.Mock() + mock_span.add_attribute = mock.Mock() + middleware._handle_exception(mock_span, test_exception) + + mock_span.add_attribute.assert_has_calls([ + mock.call("error.name", "RuntimeError"), + mock.call("error.message", "bork bork bork"), + mock.call("stacktrace", "\n".join(traceback.format_tb(tb))), + mock.call("http.status_code", 500) + ]) diff --git a/contrib/opencensus-ext-fastapi/version.py b/contrib/opencensus-ext-fastapi/version.py new file mode 100644 index 000000000..50e4d191e --- /dev/null +++ b/contrib/opencensus-ext-fastapi/version.py @@ -0,0 +1,15 @@ +# Copyright 2022, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +__version__ = '0.2.dev0' diff --git a/contrib/opencensus-ext-flask/CHANGELOG.md b/contrib/opencensus-ext-flask/CHANGELOG.md index 3b301655f..1218b195d 100644 --- a/contrib/opencensus-ext-flask/CHANGELOG.md +++ b/contrib/opencensus-ext-flask/CHANGELOG.md @@ -2,6 +2,24 @@ ## Unreleased +## 0.8.2 +Released 2023-03-10 + +- Add exception information to span attributes +([#1188](https://github.com/census-instrumentation/opencensus-python/pull/1188)) + +## 0.8.1 +Released 2022-08-03 + +- Move `version.py` file into `common` folder +([#1143](https://github.com/census-instrumentation/opencensus-python/pull/1143)) + +## 0.8.0 +Released 2022-04-20 + +- Add support for Flask 2 +([#1032](https://github.com/census-instrumentation/opencensus-python/pull/1032)) + ## 0.7.5 Released 2021-05-13 diff --git a/contrib/opencensus-ext-flask/opencensus/ext/flask/common/__init__.py b/contrib/opencensus-ext-flask/opencensus/ext/flask/common/__init__.py new file mode 100644 index 000000000..69e3be50d --- /dev/null +++ b/contrib/opencensus-ext-flask/opencensus/ext/flask/common/__init__.py @@ -0,0 +1 @@ +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/contrib/opencensus-ext-flask/version.py b/contrib/opencensus-ext-flask/opencensus/ext/flask/common/version.py similarity index 95% rename from contrib/opencensus-ext-flask/version.py rename to contrib/opencensus-ext-flask/opencensus/ext/flask/common/version.py index dffc606db..671fc3d04 100644 --- a/contrib/opencensus-ext-flask/version.py +++ b/contrib/opencensus-ext-flask/opencensus/ext/flask/common/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.8.dev0' +__version__ = '0.9.dev0' diff --git a/contrib/opencensus-ext-flask/opencensus/ext/flask/flask_middleware.py b/contrib/opencensus-ext-flask/opencensus/ext/flask/flask_middleware.py index 3900c4d61..08ccc10ab 100644 --- a/contrib/opencensus-ext-flask/opencensus/ext/flask/flask_middleware.py +++ b/contrib/opencensus-ext-flask/opencensus/ext/flask/flask_middleware.py @@ -16,6 +16,7 @@ import logging import sys +import traceback import flask from google.rpc import code_pb2 @@ -40,6 +41,9 @@ HTTP_ROUTE = attributes_helper.COMMON_ATTRIBUTES['HTTP_ROUTE'] HTTP_URL = attributes_helper.COMMON_ATTRIBUTES['HTTP_URL'] HTTP_STATUS_CODE = attributes_helper.COMMON_ATTRIBUTES['HTTP_STATUS_CODE'] +ERROR_MESSAGE = attributes_helper.COMMON_ATTRIBUTES['ERROR_MESSAGE'] +ERROR_NAME = attributes_helper.COMMON_ATTRIBUTES['ERROR_NAME'] +STACKTRACE = attributes_helper.COMMON_ATTRIBUTES['STACKTRACE'] EXCLUDELIST_PATHS = 'EXCLUDELIST_PATHS' EXCLUDELIST_HOSTNAMES = 'EXCLUDELIST_HOSTNAMES' @@ -217,16 +221,28 @@ def _teardown_request(self, exception): code=code_pb2.UNKNOWN, message=str(exception) ) - # try attaching the stack trace to the span, only populated - # if the app has 'PROPAGATE_EXCEPTIONS', 'DEBUG', or - # 'TESTING' enabled - exc_type, _, exc_traceback = sys.exc_info() + span.add_attribute( + attribute_key=ERROR_NAME, + attribute_value=exception.__class__.__name__) + span.add_attribute( + attribute_key=ERROR_MESSAGE, + attribute_value=str(exception)) + + if hasattr(exception, '__traceback__'): + exc_traceback = exception.__traceback__ + else: + exc_type, _, exc_traceback = sys.exc_info() if exc_traceback is not None: span.stack_trace = ( stack_trace.StackTrace.from_traceback( exc_traceback ) ) + span.add_attribute( + attribute_key=STACKTRACE, + attribute_value='\n'.join( + traceback.format_tb(exc_traceback)) + ) tracer.end_span() tracer.finish() diff --git a/contrib/opencensus-ext-flask/setup.py b/contrib/opencensus-ext-flask/setup.py index 75616980c..63aa51329 100644 --- a/contrib/opencensus-ext-flask/setup.py +++ b/contrib/opencensus-ext-flask/setup.py @@ -12,13 +12,21 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + from setuptools import find_packages, setup -from version import __version__ +BASE_DIR = os.path.dirname(__file__) +VERSION_FILENAME = os.path.join( + BASE_DIR, "opencensus", "ext", "flask", "common", "version.py" +) +PACKAGE_INFO = {} +with open(VERSION_FILENAME) as f: + exec(f.read(), PACKAGE_INFO) setup( name='opencensus-ext-flask', - version=__version__, # noqa + version=PACKAGE_INFO["__version__"], # noqa author='OpenCensus Authors', author_email='census-developers@googlegroups.com', classifiers=[ @@ -41,8 +49,8 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'flask >= 0.12.3, < 2.0.0, != 1.1.3', - 'opencensus >= 0.9.dev0, < 1.0.0', + 'flask >= 0.12.3, < 3.0.0, != 1.1.3', + 'opencensus >= 0.12.dev0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-flask/tests/test_flask_middleware.py b/contrib/opencensus-ext-flask/tests/test_flask_middleware.py index 4ef5a9982..218acab34 100644 --- a/contrib/opencensus-ext-flask/tests/test_flask_middleware.py +++ b/contrib/opencensus-ext-flask/tests/test_flask_middleware.py @@ -383,6 +383,13 @@ def test_teardown_include_exception(self): exported_spandata.status.canonical_code, code_pb2.UNKNOWN ) self.assertEqual(exported_spandata.status.description, 'error') + self.assertEqual( + exported_spandata.attributes["error.name"], 'FlaskTestException' + ) + self.assertEqual( + exported_spandata.attributes["error.message"], 'error' + ) + self.assertIsNotNone(exported_spandata.attributes["error.message"]) def test_teardown_include_exception_and_traceback(self): mock_exporter = mock.MagicMock() diff --git a/contrib/opencensus-ext-grpc/opencensus/ext/grpc/server_interceptor.py b/contrib/opencensus-ext-grpc/opencensus/ext/grpc/server_interceptor.py index 7b82773f2..e8ffd6b91 100644 --- a/contrib/opencensus-ext-grpc/opencensus/ext/grpc/server_interceptor.py +++ b/contrib/opencensus-ext-grpc/opencensus/ext/grpc/server_interceptor.py @@ -25,11 +25,40 @@ from opencensus.trace import tracer as tracer_module from opencensus.trace.propagation import binary_format -ATTRIBUTE_COMPONENT = 'COMPONENT' -ATTRIBUTE_ERROR_NAME = 'ERROR_NAME' -ATTRIBUTE_ERROR_MESSAGE = 'ERROR_MESSAGE' +COMPONENT = attributes_helper.COMMON_ATTRIBUTES['COMPONENT'] +ERROR_NAME = attributes_helper.COMMON_ATTRIBUTES['ERROR_NAME'] +ERROR_MESSAGE = attributes_helper.COMMON_ATTRIBUTES['ERROR_MESSAGE'] + +HTTP_HOST = attributes_helper.COMMON_ATTRIBUTES['HTTP_HOST'] +HTTP_METHOD = attributes_helper.COMMON_ATTRIBUTES['HTTP_METHOD'] +HTTP_PATH = attributes_helper.COMMON_ATTRIBUTES['HTTP_PATH'] +HTTP_ROUTE = attributes_helper.COMMON_ATTRIBUTES['HTTP_ROUTE'] +HTTP_URL = attributes_helper.COMMON_ATTRIBUTES['HTTP_URL'] +HTTP_STATUS_CODE = attributes_helper.COMMON_ATTRIBUTES['HTTP_STATUS_CODE'] +GRPC_METHOD = attributes_helper.GRPC_ATTRIBUTES['GRPC_METHOD'] + RECV_PREFIX = 'Recv' +GRPC_HTTP_STATUS_MAPPING = { + grpc.StatusCode.OK: 200, + grpc.StatusCode.FAILED_PRECONDITION: 400, + grpc.StatusCode.INVALID_ARGUMENT: 400, + grpc.StatusCode.OUT_OF_RANGE: 400, + grpc.StatusCode.UNAUTHENTICATED: 401, + grpc.StatusCode.PERMISSION_DENIED: 403, + grpc.StatusCode.NOT_FOUND: 404, + grpc.StatusCode.ABORTED: 409, + grpc.StatusCode.ALREADY_EXISTS: 409, + grpc.StatusCode.RESOURCE_EXHAUSTED: 429, + grpc.StatusCode.CANCELLED: 499, + grpc.StatusCode.UNKNOWN: 500, + grpc.StatusCode.INTERNAL: 500, + grpc.StatusCode.DATA_LOSS: 500, + grpc.StatusCode.UNIMPLEMENTED: 501, + grpc.StatusCode.UNAVAILABLE: 503, + grpc.StatusCode.DEADLINE_EXCEEDED: 504 +} + class OpenCensusServerInterceptor(grpc.ServerInterceptor): def __init__(self, sampler=None, exporter=None): @@ -56,6 +85,12 @@ def new_behavior(request_or_iterator, servicer_context): # invoke the original rpc behavior response_or_iterator = behavior(request_or_iterator, servicer_context) + + http_status_code = _convert_grpc_code_to_http_status_code( + servicer_context._state.code + ) + span.add_attribute(HTTP_STATUS_CODE, http_status_code) + if response_streaming: response_or_iterator = grpc_utils.wrap_iter_with_message_events( # noqa: E501 request_or_response_iter=response_or_iterator, @@ -107,28 +142,60 @@ def _start_server_span(self, servicer_context): ) span.span_kind = span_module.SpanKind.SERVER + + grpc_call_details = servicer_context._rpc_event.call_details + grpc_host = grpc_call_details.host.decode('utf-8') + grpc_method = grpc_call_details.method.decode('utf-8') + + tracer.add_attribute_to_current_span( + COMPONENT, 'grpc' + ) + tracer.add_attribute_to_current_span( + GRPC_METHOD, grpc_method + ) + + tracer.add_attribute_to_current_span( + HTTP_HOST, grpc_host + ) + tracer.add_attribute_to_current_span( + HTTP_METHOD, 'POST' + ) + tracer.add_attribute_to_current_span( + HTTP_ROUTE, grpc_method + ) + tracer.add_attribute_to_current_span( + HTTP_PATH, grpc_method + ) tracer.add_attribute_to_current_span( - attribute_key=attributes_helper.COMMON_ATTRIBUTES.get( - ATTRIBUTE_COMPONENT), - attribute_value='grpc') + HTTP_URL, 'grpc://' + grpc_host + grpc_method + ) execution_context.set_opencensus_tracer(tracer) execution_context.set_current_span(span) return span +def _convert_grpc_code_to_http_status_code(grpc_state_code): + """ + Converts a gRPC state code into the corresponding HTTP response status. + See: + https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto + """ + if grpc_state_code is None: + return 200 + else: + return GRPC_HTTP_STATUS_MAPPING.get(grpc_state_code, 500) + + def _add_exc_info(span): exc_type, exc_value, tb = sys.exc_info() - span.add_attribute( - attributes_helper.COMMON_ATTRIBUTES.get( - ATTRIBUTE_ERROR_MESSAGE), - str(exc_value) - ) + span.add_attribute(ERROR_MESSAGE, str(exc_value)) span.stack_trace = stack_trace.StackTrace.from_traceback(tb) span.status = status.Status( code=code_pb2.UNKNOWN, message=str(exc_value) ) + span.add_attribute(HTTP_STATUS_CODE, 500) def _wrap_rpc_behavior(handler, fn): diff --git a/contrib/opencensus-ext-grpc/tests/test_server_interceptor.py b/contrib/opencensus-ext-grpc/tests/test_server_interceptor.py index 850c036fa..d74f3003b 100644 --- a/contrib/opencensus-ext-grpc/tests/test_server_interceptor.py +++ b/contrib/opencensus-ext-grpc/tests/test_server_interceptor.py @@ -14,6 +14,7 @@ import unittest +import grpc import mock from google.rpc import code_pb2 @@ -22,6 +23,9 @@ from opencensus.trace import execution_context from opencensus.trace import span as span_module +MOCK_HOST = b'localhost:5000' +MOCK_METHOD = b'/helloworld.Greeter/SayHello' + class TestOpenCensusServerInterceptor(unittest.TestCase): def test_constructor(self): @@ -38,7 +42,10 @@ def test_intercept_service_no_metadata(self): '.tracer_module.Tracer', MockTracer) mock_context = mock.Mock() mock_context.invocation_metadata = mock.Mock(return_value=None) - mock_context._rpc_event.call_details.method = 'hello' + + mock_context._rpc_event.call_details.host = MOCK_HOST + mock_context._rpc_event.call_details.method = MOCK_METHOD + mock_context._state.code = grpc.StatusCode.OK interceptor = server_interceptor.OpenCensusServerInterceptor( None, None) mock_handler = mock.Mock() @@ -53,6 +60,13 @@ def test_intercept_service_no_metadata(self): expected_attributes = { 'component': 'grpc', + 'grpc.method': '/helloworld.Greeter/SayHello', + 'http.host': 'localhost:5000', + 'http.method': 'POST', + 'http.route': '/helloworld.Greeter/SayHello', + 'http.path': '/helloworld.Greeter/SayHello', + 'http.url': 'grpc://localhost:5000/helloworld.Greeter/SayHello', + 'http.status_code': 200 } self.assertEqual( @@ -78,7 +92,9 @@ def test_intercept_service(self): mock_handler.response_streaming = rsp_streaming mock_continuation = mock.Mock(return_value=mock_handler) - mock_context._rpc_event.call_details.method = 'hello' + mock_context._rpc_event.call_details.host = MOCK_HOST + mock_context._rpc_event.call_details.method = MOCK_METHOD + mock_context._state.code = grpc.StatusCode.OK interceptor = server_interceptor.OpenCensusServerInterceptor( None, None) @@ -89,6 +105,13 @@ def test_intercept_service(self): expected_attributes = { 'component': 'grpc', + 'grpc.method': '/helloworld.Greeter/SayHello', + 'http.host': 'localhost:5000', + 'http.method': 'POST', + 'http.route': '/helloworld.Greeter/SayHello', + 'http.path': '/helloworld.Greeter/SayHello', + 'http.url': 'grpc://localhost:5000/helloworld.Greeter/SayHello', # noqa: E501 + 'http.status_code': 200 } self.assertEqual( @@ -110,7 +133,9 @@ def test_intercept_handler_exception(self): None, None) mock_context = mock.Mock() mock_context.invocation_metadata = mock.Mock(return_value=None) - mock_context._rpc_event.call_details.method = 'hello' + + mock_context._rpc_event.call_details.host = MOCK_HOST + mock_context._rpc_event.call_details.method = MOCK_METHOD mock_handler = mock.Mock() mock_handler.request_streaming = req_streaming mock_handler.response_streaming = rsp_streaming @@ -128,6 +153,13 @@ def test_intercept_handler_exception(self): expected_attributes = { 'component': 'grpc', + 'grpc.method': '/helloworld.Greeter/SayHello', + 'http.host': 'localhost:5000', + 'http.method': 'POST', + 'http.route': '/helloworld.Greeter/SayHello', + 'http.path': '/helloworld.Greeter/SayHello', + 'http.url': 'grpc://localhost:5000/helloworld.Greeter/SayHello', # noqa: E501 + 'http.status_code': 500, 'error.message': 'Test' } diff --git a/contrib/opencensus-ext-httpx/CHANGELOG.md b/contrib/opencensus-ext-httpx/CHANGELOG.md new file mode 100644 index 000000000..755e63048 --- /dev/null +++ b/contrib/opencensus-ext-httpx/CHANGELOG.md @@ -0,0 +1,10 @@ +# Changelog + +## Unreleased + +## 0.1.0 + +Released 2023-01-18 + +- Initial release +([#1098](https://github.com/census-instrumentation/opencensus-python/pull/1098)) diff --git a/contrib/opencensus-ext-httpx/README.rst b/contrib/opencensus-ext-httpx/README.rst new file mode 100644 index 000000000..6e4d5b87a --- /dev/null +++ b/contrib/opencensus-ext-httpx/README.rst @@ -0,0 +1,42 @@ +OpenCensus httpx Integration +============================================================================ + +|pypi| + +.. |pypi| image:: https://badge.fury.io/py/opencensus-ext-httpx.svg + :target: https://pypi.org/project/opencensus-ext-httpx/ + +OpenCensus can trace HTTP requests made with the `httpx package `_. The request URL, +method, and status will be collected. + +You can enable httpx integration by specifying ``'httpx'`` to ``trace_integrations``. + +Only the hostname must be specified if only the hostname is specified in the URL request. + + +Installation +------------ + +:: + + pip install opencensus-ext-httpx + +Usage +----- + +.. code:: python + + import httpx + from opencensus.trace import config_integration + from opencensus.trace.tracer import Tracer + + if __name__ == '__main__': + config_integration.trace_integrations(['httpx']) + tracer = Tracer() + with tracer.span(name='parent'): + response = httpx.get(url='https://www.example.org') + +References +---------- + +* `OpenCensus Project `_ diff --git a/contrib/opencensus-ext-httpx/opencensus/__init__.py b/contrib/opencensus-ext-httpx/opencensus/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/contrib/opencensus-ext-httpx/opencensus/ext/__init__.py b/contrib/opencensus-ext-httpx/opencensus/ext/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/contrib/opencensus-ext-httpx/opencensus/ext/httpx/__init__.py b/contrib/opencensus-ext-httpx/opencensus/ext/httpx/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/contrib/opencensus-ext-httpx/opencensus/ext/httpx/trace.py b/contrib/opencensus-ext-httpx/opencensus/ext/httpx/trace.py new file mode 100644 index 000000000..b77f29630 --- /dev/null +++ b/contrib/opencensus-ext-httpx/opencensus/ext/httpx/trace.py @@ -0,0 +1,118 @@ +import logging + +import httpx +import wrapt + +from opencensus.trace import ( + attributes_helper, + exceptions_status, + execution_context, + integrations, +) +from opencensus.trace import span as span_module +from opencensus.trace import utils + +try: + from urllib.parse import urlparse +except ImportError: + from urlparse import urlparse + + +log = logging.getLogger(__name__) + +MODULE_NAME = "httpx" + +HTTP_HOST = attributes_helper.COMMON_ATTRIBUTES["HTTP_HOST"] +HTTP_METHOD = attributes_helper.COMMON_ATTRIBUTES["HTTP_METHOD"] +HTTP_PATH = attributes_helper.COMMON_ATTRIBUTES["HTTP_PATH"] +HTTP_ROUTE = attributes_helper.COMMON_ATTRIBUTES["HTTP_ROUTE"] +HTTP_STATUS_CODE = attributes_helper.COMMON_ATTRIBUTES["HTTP_STATUS_CODE"] +HTTP_URL = attributes_helper.COMMON_ATTRIBUTES["HTTP_URL"] + + +def trace_integration(tracer=None): + """Wrap the requests library to trace it.""" + log.info("Integrated module: {}".format(MODULE_NAME)) + + if tracer is not None: + # The execution_context tracer should never be None - if it has not + # been set it returns a no-op tracer. Most code in this library does + # not handle None being used in the execution context. + execution_context.set_opencensus_tracer(tracer) + + wrapt.wrap_function_wrapper( + MODULE_NAME, "Client.request", wrap_client_request + ) + # pylint: disable=protected-access + integrations.add_integration(integrations._Integrations.HTTPX) + + +def wrap_client_request(wrapped, instance, args, kwargs): + """Wrap the session function to trace it.""" + # Check if request was sent from an exporter. If so, do not wrap. + if execution_context.is_exporter(): + return wrapped(*args, **kwargs) + + method = kwargs.get("method") or args[0] + url = kwargs.get("url") or args[1] + + excludelist_hostnames = execution_context.get_opencensus_attr( + "excludelist_hostnames" + ) + parsed_url = urlparse(url) + if parsed_url.port is None: + dest_url = parsed_url.hostname + else: + dest_url = "{}:{}".format(parsed_url.hostname, parsed_url.port) + if utils.disable_tracing_hostname(dest_url, excludelist_hostnames): + return wrapped(*args, **kwargs) + + path = parsed_url.path if parsed_url.path else "/" + + _tracer = execution_context.get_opencensus_tracer() + _span = _tracer.start_span() + + _span.name = "{}".format(path) + _span.span_kind = span_module.SpanKind.CLIENT + + try: + tracer_headers = _tracer.propagator.to_headers(_tracer.span_context) + kwargs.setdefault("headers", {}).update(tracer_headers) + except Exception: # pragma: NO COVER + pass + + # Add the component type to attributes + _tracer.add_attribute_to_current_span("component", "HTTP") + + # Add the requests host to attributes + _tracer.add_attribute_to_current_span(HTTP_HOST, dest_url) + + # Add the requests method to attributes + _tracer.add_attribute_to_current_span(HTTP_METHOD, method.upper()) + + # Add the requests path to attributes + _tracer.add_attribute_to_current_span(HTTP_PATH, path) + + # Add the requests url to attributes + _tracer.add_attribute_to_current_span(HTTP_URL, url) + + try: + result = wrapped(*args, **kwargs) + except httpx.TimeoutException: + _span.set_status(exceptions_status.TIMEOUT) + raise + except httpx.InvalidURL: + _span.set_status(exceptions_status.INVALID_URL) + raise + except Exception as e: + _span.set_status(exceptions_status.unknown(e)) + raise + else: + # Add the status code to attributes + _tracer.add_attribute_to_current_span( + HTTP_STATUS_CODE, result.status_code + ) + _span.set_status(utils.status_from_http_code(result.status_code)) + return result + finally: + _tracer.end_span() diff --git a/contrib/opencensus-ext-httpx/setup.cfg b/contrib/opencensus-ext-httpx/setup.cfg new file mode 100644 index 000000000..2a9acf13d --- /dev/null +++ b/contrib/opencensus-ext-httpx/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal = 1 diff --git a/contrib/opencensus-ext-httpx/setup.py b/contrib/opencensus-ext-httpx/setup.py new file mode 100644 index 000000000..bf920415c --- /dev/null +++ b/contrib/opencensus-ext-httpx/setup.py @@ -0,0 +1,44 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from setuptools import find_packages, setup + +from version import __version__ + +setup( + name="opencensus-ext-httpx", + version=__version__, # noqa + author="MichaƂ Klich", + author_email="michal@klichx.dev", + classifiers=[ + "Intended Audience :: Developers", + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + ], + description="OpenCensus HTTPX Integration", + include_package_data=True, + long_description=open('README.rst').read(), + install_requires=["opencensus >= 0.12.dev0, < 1.0.0", "httpx >= 0.22.0"], + extras_require={}, + license="Apache-2.0", + packages=find_packages(exclude=("tests",)), + namespace_packages=[], + url="", + zip_safe=False, +) diff --git a/contrib/opencensus-ext-httpx/tests/test_httpx_trace.py b/contrib/opencensus-ext-httpx/tests/test_httpx_trace.py new file mode 100644 index 000000000..ddb9cb87f --- /dev/null +++ b/contrib/opencensus-ext-httpx/tests/test_httpx_trace.py @@ -0,0 +1,474 @@ +# Copyright 2017, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import httpx +import mock + +from opencensus.ext.httpx import trace +from opencensus.trace import execution_context +from opencensus.trace import span as span_module +from opencensus.trace import status as status_module +from opencensus.trace.tracers import noop_tracer + + +class Test_httpx_trace(unittest.TestCase): + def test_trace_integration(self): + mock_wrap = mock.Mock() + patch_wrapt = mock.patch("wrapt.wrap_function_wrapper", mock_wrap) + + with patch_wrapt: + trace.trace_integration() + + self.assertIsInstance( + execution_context.get_opencensus_tracer(), + noop_tracer.NoopTracer, + ) + mock_wrap.assert_called_once_with( + trace.MODULE_NAME, "Client.request", trace.wrap_client_request + ) + + def test_trace_integration_set_tracer(self): + mock_wrap = mock.Mock() + patch_wrapt = mock.patch("wrapt.wrap_function_wrapper", mock_wrap) + + class TmpTracer(noop_tracer.NoopTracer): + pass + + with patch_wrapt: + trace.trace_integration(tracer=TmpTracer()) + + self.assertIsInstance( + execution_context.get_opencensus_tracer(), TmpTracer + ) + mock_wrap.assert_called_once_with( + trace.MODULE_NAME, "Client.request", trace.wrap_client_request + ) + + def test_wrap_client_request(self): + wrapped = mock.Mock(return_value=mock.Mock(status_code=200)) + + mock_tracer = MockTracer( + propagator=mock.Mock( + to_headers=lambda x: {"x-trace": "some-value"}) + ) + + patch = mock.patch( + "opencensus.ext.httpx.trace.execution_context." + "get_opencensus_tracer", + return_value=mock_tracer, + ) + patch_thread = mock.patch( + "opencensus.ext.httpx.trace.execution_context.is_exporter", + return_value=False, + ) + + url = "http://localhost:8080/test" + request_method = "POST" + kwargs = {} + + with patch, patch_thread: + trace.wrap_client_request( + wrapped, "Client.request", (request_method, url), kwargs + ) + + expected_attributes = { + "component": "HTTP", + "http.host": "localhost:8080", + "http.method": "POST", + "http.path": "/test", + "http.status_code": 200, + "http.url": url, + } + expected_name = "/test" + expected_status = status_module.Status(0) + + self.assertEqual( + span_module.SpanKind.CLIENT, mock_tracer.current_span.span_kind + ) + self.assertEqual( + expected_attributes, mock_tracer.current_span.attributes + ) + self.assertEqual(kwargs["headers"]["x-trace"], "some-value") + self.assertEqual(expected_name, mock_tracer.current_span.name) + self.assertEqual( + expected_status.__dict__, mock_tracer.current_span.status.__dict__ + ) + + def test_wrap_client_request_excludelist_ok(self): + def wrapped(*args, **kwargs): + result = mock.Mock() + result.status_code = 200 + return result + + mock_tracer = MockTracer( + propagator=mock.Mock( + to_headers=lambda x: {"x-trace": "some-value"}) + ) + + patch_tracer = mock.patch( + "opencensus.ext.httpx.trace.execution_context." + "get_opencensus_tracer", + return_value=mock_tracer, + ) + patch_attr = mock.patch( + "opencensus.ext.httpx.trace.execution_context.get_opencensus_attr", + return_value=None, + ) + patch_thread = mock.patch( + "opencensus.ext.httpx.trace.execution_context.is_exporter", + return_value=False, + ) + + url = "http://localhost/" + request_method = "POST" + + with patch_tracer, patch_attr, patch_thread: + trace.wrap_client_request( + wrapped, "Client.request", (request_method, url), {} + ) + + expected_name = "/" + self.assertEqual(expected_name, mock_tracer.current_span.name) + + def test_wrap_client_request_excludelist_nok(self): + def wrapped(*args, **kwargs): + result = mock.Mock() + result.status_code = 200 + return result + + mock_tracer = MockTracer() + + patch_tracer = mock.patch( + "opencensus.ext.httpx.trace.execution_context." + "get_opencensus_tracer", + return_value=mock_tracer, + ) + patch_attr = mock.patch( + "opencensus.ext.httpx.trace.execution_context.get_opencensus_attr", + return_value=["localhost:8080"], + ) + patch_thread = mock.patch( + "opencensus.ext.httpx.trace.execution_context.is_exporter", + return_value=False, + ) + + url = "http://localhost:8080" + request_method = "POST" + + with patch_tracer, patch_attr, patch_thread: + trace.wrap_client_request( + wrapped, "Client.request", (request_method, url), {} + ) + + self.assertEqual(None, mock_tracer.current_span) + + def test_wrap_client_request_exporter_thread(self): + def wrapped(*args, **kwargs): + result = mock.Mock() + result.status_code = 200 + return result + + mock_tracer = MockTracer() + + patch_tracer = mock.patch( + "opencensus.ext.httpx.trace.execution_context." + "get_opencensus_tracer", + return_value=mock_tracer, + ) + patch_attr = mock.patch( + "opencensus.ext.httpx.trace.execution_context.get_opencensus_attr", + return_value=["localhost:8080"], + ) + patch_thread = mock.patch( + "opencensus.ext.httpx.trace.execution_context.is_exporter", + return_value=True, + ) + + url = "http://localhost:8080" + request_method = "POST" + + with patch_tracer, patch_attr, patch_thread: + trace.wrap_client_request( + wrapped, "Client.request", (request_method, url), {} + ) + + self.assertEqual(None, mock_tracer.current_span) + + def test_header_is_passed_in(self): + wrapped = mock.Mock(return_value=mock.Mock(status_code=200)) + mock_tracer = MockTracer( + propagator=mock.Mock( + to_headers=lambda x: {"x-trace": "some-value"}) + ) + + patch = mock.patch( + "opencensus.ext.httpx.trace.execution_context." + "get_opencensus_tracer", + return_value=mock_tracer, + ) + patch_thread = mock.patch( + "opencensus.ext.httpx.trace.execution_context.is_exporter", + return_value=False, + ) + + url = "http://localhost:8080" + request_method = "POST" + kwargs = {} + + with patch, patch_thread: + trace.wrap_client_request( + wrapped, "Client.request", (request_method, url), kwargs + ) + + self.assertEqual(kwargs["headers"]["x-trace"], "some-value") + + def test_headers_are_preserved(self): + wrapped = mock.Mock(return_value=mock.Mock(status_code=200)) + mock_tracer = MockTracer( + propagator=mock.Mock( + to_headers=lambda x: {"x-trace": "some-value"}) + ) + + patch = mock.patch( + "opencensus.ext.httpx.trace.execution_context." + "get_opencensus_tracer", + return_value=mock_tracer, + ) + patch_thread = mock.patch( + "opencensus.ext.httpx.trace.execution_context.is_exporter", + return_value=False, + ) + + url = "http://localhost:8080" + request_method = "POST" + kwargs = {"headers": {"key": "value"}} + + with patch, patch_thread: + trace.wrap_client_request( + wrapped, "Client.request", (request_method, url), kwargs + ) + + self.assertEqual(kwargs["headers"]["key"], "value") + self.assertEqual(kwargs["headers"]["x-trace"], "some-value") + + def test_tracer_headers_are_overwritten(self): + wrapped = mock.Mock(return_value=mock.Mock(status_code=200)) + mock_tracer = MockTracer( + propagator=mock.Mock( + to_headers=lambda x: {"x-trace": "some-value"}) + ) + + patch = mock.patch( + "opencensus.ext.httpx.trace.execution_context." + "get_opencensus_tracer", + return_value=mock_tracer, + ) + + patch_thread = mock.patch( + "opencensus.ext.httpx.trace.execution_context.is_exporter", + return_value=False, + ) + + url = "http://localhost:8080" + request_method = "POST" + kwargs = {"headers": {"x-trace": "original-value"}} + + with patch, patch_thread: + trace.wrap_client_request( + wrapped, "Client.request", (request_method, url), kwargs + ) + + self.assertEqual(kwargs["headers"]["x-trace"], "some-value") + + def test_wrap_client_request_timeout(self): + wrapped = mock.Mock(return_value=mock.Mock(status_code=200)) + wrapped.side_effect = httpx.TimeoutException("timeout") + + mock_tracer = MockTracer( + propagator=mock.Mock( + to_headers=lambda x: {"x-trace": "some-value"}) + ) + + patch = mock.patch( + "opencensus.ext.httpx.trace.execution_context." + "get_opencensus_tracer", + return_value=mock_tracer, + ) + patch_thread = mock.patch( + "opencensus.ext.httpx.trace.execution_context.is_exporter", + return_value=False, + ) + + url = "http://localhost:8080/test" + request_method = "POST" + kwargs = {} + + with patch, patch_thread: + with self.assertRaises(httpx.TimeoutException): + # breakpoint() + trace.wrap_client_request( + wrapped, "Client.request", (request_method, url), kwargs + ) + + expected_attributes = { + "component": "HTTP", + "http.host": "localhost:8080", + "http.method": "POST", + "http.path": "/test", + "http.url": url, + } + expected_name = "/test" + expected_status = status_module.Status(4, "request timed out") + + self.assertEqual( + span_module.SpanKind.CLIENT, mock_tracer.current_span.span_kind + ) + self.assertEqual( + expected_attributes, mock_tracer.current_span.attributes + ) + self.assertEqual(kwargs["headers"]["x-trace"], "some-value") + self.assertEqual(expected_name, mock_tracer.current_span.name) + self.assertEqual( + expected_status.__dict__, mock_tracer.current_span.status.__dict__ + ) + + def test_wrap_client_request_invalid_url(self): + wrapped = mock.Mock(return_value=mock.Mock(status_code=200)) + wrapped.side_effect = httpx.InvalidURL("invalid url") + + mock_tracer = MockTracer( + propagator=mock.Mock( + to_headers=lambda x: {"x-trace": "some-value"}) + ) + + patch = mock.patch( + "opencensus.ext.httpx.trace.execution_context." + "get_opencensus_tracer", + return_value=mock_tracer, + ) + patch_thread = mock.patch( + "opencensus.ext.httpx.trace.execution_context.is_exporter", + return_value=False, + ) + + url = "http://localhost:8080/test" + request_method = "POST" + kwargs = {} + + with patch, patch_thread: + with self.assertRaises(httpx.InvalidURL): + trace.wrap_client_request( + wrapped, "Client.request", (request_method, url), kwargs + ) + + expected_attributes = { + "component": "HTTP", + "http.host": "localhost:8080", + "http.method": "POST", + "http.path": "/test", + "http.url": url, + } + expected_name = "/test" + expected_status = status_module.Status(3, "invalid URL") + + self.assertEqual( + span_module.SpanKind.CLIENT, mock_tracer.current_span.span_kind + ) + self.assertEqual( + expected_attributes, mock_tracer.current_span.attributes + ) + self.assertEqual(kwargs["headers"]["x-trace"], "some-value") + self.assertEqual(expected_name, mock_tracer.current_span.name) + self.assertEqual( + expected_status.__dict__, mock_tracer.current_span.status.__dict__ + ) + + def test_wrap_client_request_exception(self): + wrapped = mock.Mock(return_value=mock.Mock(status_code=200)) + wrapped.side_effect = httpx.TooManyRedirects("too many redirects") + + mock_tracer = MockTracer( + propagator=mock.Mock( + to_headers=lambda x: {"x-trace": "some-value"}) + ) + + patch = mock.patch( + "opencensus.ext.httpx.trace.execution_context." + "get_opencensus_tracer", + return_value=mock_tracer, + ) + patch_thread = mock.patch( + "opencensus.ext.httpx.trace.execution_context.is_exporter", + return_value=False, + ) + + url = "http://localhost:8080/test" + request_method = "POST" + kwargs = {} + + with patch, patch_thread: + with self.assertRaises(httpx.TooManyRedirects): + trace.wrap_client_request( + wrapped, "Client.request", (request_method, url), kwargs + ) + + expected_attributes = { + "component": "HTTP", + "http.host": "localhost:8080", + "http.method": "POST", + "http.path": "/test", + "http.url": url, + } + expected_name = "/test" + expected_status = status_module.Status(2, "too many redirects") + + self.assertEqual( + span_module.SpanKind.CLIENT, mock_tracer.current_span.span_kind + ) + self.assertEqual( + expected_attributes, mock_tracer.current_span.attributes + ) + self.assertEqual(kwargs["headers"]["x-trace"], "some-value") + self.assertEqual(expected_name, mock_tracer.current_span.name) + self.assertEqual( + expected_status.__dict__, mock_tracer.current_span.status.__dict__ + ) + + +class MockTracer(object): + def __init__(self, propagator=None): + self.current_span = None + self.span_context = {} + self.propagator = propagator + + def start_span(self): + span = MockSpan() + self.current_span = span + return span + + def end_span(self): + pass + + def add_attribute_to_current_span(self, key, value): + self.current_span.attributes[key] = value + + +class MockSpan(object): + def __init__(self): + self.attributes = {} + + def set_status(self, status): + self.status = status diff --git a/contrib/opencensus-ext-httpx/version.py b/contrib/opencensus-ext-httpx/version.py new file mode 100644 index 000000000..506a49340 --- /dev/null +++ b/contrib/opencensus-ext-httpx/version.py @@ -0,0 +1 @@ +__version__ = '0.2.dev0' diff --git a/contrib/opencensus-ext-requests/CHANGELOG.md b/contrib/opencensus-ext-requests/CHANGELOG.md index eb561cefe..ac6f5f597 100644 --- a/contrib/opencensus-ext-requests/CHANGELOG.md +++ b/contrib/opencensus-ext-requests/CHANGELOG.md @@ -2,6 +2,14 @@ ## Unreleased +## 0.8.0 +Released 2022-08-03 + +- Move `version.py` file into `common` folder +([#1143](https://github.com/census-instrumentation/opencensus-python/pull/1143)) +- Add `requests` library as a hard dependency +([#1146](https://github.com/census-instrumentation/opencensus-python/pull/1146)) + ## 0.7.5 Released 2021-05-13 diff --git a/contrib/opencensus-ext-requests/opencensus/ext/requests/common/__init__.py b/contrib/opencensus-ext-requests/opencensus/ext/requests/common/__init__.py new file mode 100644 index 000000000..69e3be50d --- /dev/null +++ b/contrib/opencensus-ext-requests/opencensus/ext/requests/common/__init__.py @@ -0,0 +1 @@ +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/contrib/opencensus-ext-requests/version.py b/contrib/opencensus-ext-requests/opencensus/ext/requests/common/version.py similarity index 95% rename from contrib/opencensus-ext-requests/version.py rename to contrib/opencensus-ext-requests/opencensus/ext/requests/common/version.py index dffc606db..671fc3d04 100644 --- a/contrib/opencensus-ext-requests/version.py +++ b/contrib/opencensus-ext-requests/opencensus/ext/requests/common/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.8.dev0' +__version__ = '0.9.dev0' diff --git a/contrib/opencensus-ext-requests/setup.py b/contrib/opencensus-ext-requests/setup.py index 2bb3ce6c4..5d6dad59f 100644 --- a/contrib/opencensus-ext-requests/setup.py +++ b/contrib/opencensus-ext-requests/setup.py @@ -12,13 +12,21 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + from setuptools import find_packages, setup -from version import __version__ +BASE_DIR = os.path.dirname(__file__) +VERSION_FILENAME = os.path.join( + BASE_DIR, "opencensus", "ext", "requests", "common", "version.py" +) +PACKAGE_INFO = {} +with open(VERSION_FILENAME) as f: + exec(f.read(), PACKAGE_INFO) setup( name='opencensus-ext-requests', - version=__version__, # noqa + version=PACKAGE_INFO["__version__"], # noqa author='OpenCensus Authors', author_email='census-developers@googlegroups.com', classifiers=[ @@ -41,7 +49,8 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.9.dev0, < 1.0.0', + 'opencensus >= 0.12.dev0, < 1.0.0', + 'requests >= 2.19.0, < 3.0.0', 'wrapt >= 1.0.0, < 2.0.0', ], extras_require={}, diff --git a/contrib/opencensus-ext-sqlalchemy/setup.py b/contrib/opencensus-ext-sqlalchemy/setup.py index 61a81c6a2..f13621cbd 100644 --- a/contrib/opencensus-ext-sqlalchemy/setup.py +++ b/contrib/opencensus-ext-sqlalchemy/setup.py @@ -42,7 +42,7 @@ long_description=open('README.rst').read(), install_requires=[ 'opencensus >= 0.9.dev0, < 1.0.0', - 'SQLAlchemy >= 1.1.14, < 1.3.24', # https://github.com/sqlalchemy/sqlalchemy/issues/6168 # noqa: E501 + 'SQLAlchemy >= 1.1.14', ], extras_require={}, license='Apache-2.0', diff --git a/noxfile.py b/noxfile.py index abf5aa8c4..aa9d3dba6 100644 --- a/noxfile.py +++ b/noxfile.py @@ -28,10 +28,12 @@ def _install_dev_packages(session): session.install('-e', 'contrib/opencensus-ext-datadog') session.install('-e', 'contrib/opencensus-ext-dbapi') session.install('-e', 'contrib/opencensus-ext-django') + session.install('-e', 'contrib/opencensus-ext-fastapi') session.install('-e', 'contrib/opencensus-ext-flask') session.install('-e', 'contrib/opencensus-ext-gevent') session.install('-e', 'contrib/opencensus-ext-grpc') session.install('-e', 'contrib/opencensus-ext-httplib') + session.install('-e', 'contrib/opencensus-ext-httpx') session.install('-e', 'contrib/opencensus-ext-jaeger') session.install('-e', 'contrib/opencensus-ext-logging') session.install('-e', 'contrib/opencensus-ext-mysql') diff --git a/opencensus/common/schedule/__init__.py b/opencensus/common/schedule/__init__.py index f5de7108a..ed997a620 100644 --- a/opencensus/common/schedule/__init__.py +++ b/opencensus/common/schedule/__init__.py @@ -112,6 +112,9 @@ def _gets(self, count, timeout): def gets(self, count, timeout): return tuple(self._gets(count, timeout)) + def is_empty(self): + return not self._queue.qsize() + def flush(self, timeout=None): if self._queue.qsize() == 0: return 0 @@ -124,7 +127,7 @@ def flush(self, timeout=None): return elapsed_time = time.time() - start_time wait_time = timeout and max(timeout - elapsed_time, 0) - if event.wait(timeout): + if event.wait(wait_time): return time.time() - start_time # time taken to flush def put(self, item, block=True, timeout=None): diff --git a/opencensus/common/version/__init__.py b/opencensus/common/version/__init__.py index 671fc3d04..ebf29a38b 100644 --- a/opencensus/common/version/__init__.py +++ b/opencensus/common/version/__init__.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.9.dev0' +__version__ = '0.12.dev0' diff --git a/opencensus/metrics/export/gauge.py b/opencensus/metrics/export/gauge.py index 10b35419e..5ae05985c 100644 --- a/opencensus/metrics/export/gauge.py +++ b/opencensus/metrics/export/gauge.py @@ -194,15 +194,17 @@ class DerivedGaugePoint(GaugePoint): :class:`opencensus.metrics.export.cumulative.CumulativePointDouble` :param gauge_point: The underlying `GaugePoint`. """ - def __init__(self, func, gauge_point): + def __init__(self, func, gauge_point, **kwargs): self.gauge_point = gauge_point self.func = utils.get_weakref(func) + self._kwargs = kwargs def __repr__(self): - return ("{}({})" + return ("{}({})({})" .format( type(self).__name__, - self.func() + self.func(), + self._kwargs )) def get_value(self): @@ -216,7 +218,7 @@ def get_value(self): longer exists. """ try: - val = self.func()() + val = self.func()(**self._kwargs) except TypeError: # The underlying function has been GC'd return None @@ -406,13 +408,13 @@ class DerivedGauge(BaseGauge): instead of using this class directly. """ - def _create_time_series(self, label_values, func): + def _create_time_series(self, label_values, func, **kwargs): with self._points_lock: return self.points.setdefault( tuple(label_values), - DerivedGaugePoint(func, self.point_type())) + DerivedGaugePoint(func, self.point_type(), **kwargs)) - def create_time_series(self, label_values, func): + def create_time_series(self, label_values, func, **kwargs): """Create a derived measurement to trac `func`. :type label_values: list(:class:`LabelValue`) @@ -432,7 +434,7 @@ def create_time_series(self, label_values, func): raise ValueError if func is None: raise ValueError - return self._create_time_series(label_values, func) + return self._create_time_series(label_values, func, **kwargs) def create_default_time_series(self, func): """Create the default derived measurement for this gauge. diff --git a/opencensus/trace/integrations.py b/opencensus/trace/integrations.py index e8e73f1bf..59934881d 100644 --- a/opencensus/trace/integrations.py +++ b/opencensus/trace/integrations.py @@ -32,6 +32,8 @@ class _Integrations: PYRAMID = 512 REQUESTS = 1024 SQLALCHEMY = 2056 + HTTPX = 16777216 + FASTAPI = 4194304 def get_integrations(): diff --git a/opencensus/trace/stack_trace.py b/opencensus/trace/stack_trace.py index 17975d311..30a810b68 100644 --- a/opencensus/trace/stack_trace.py +++ b/opencensus/trace/stack_trace.py @@ -181,7 +181,7 @@ def generate_hash_id(): def generate_hash_id_from_traceback(tb): - m = hashlib.md5() + m = hashlib.md5() # nosec for tb_line in traceback.format_tb(tb): m.update(tb_line.encode('utf-8')) # truncate the hash for easier compatibility with StackDriver, diff --git a/setup.py b/setup.py index de949d8f6..ae24e21bd 100644 --- a/setup.py +++ b/setup.py @@ -45,6 +45,7 @@ 'opencensus-context >= 0.2.dev0', 'google-api-core >= 1.0.0, < 2.0.0; python_version<"3.6"', 'google-api-core >= 1.0.0, < 3.0.0; python_version>="3.6"', + "six ~= 1.16", ], extras_require={}, license='Apache-2.0', diff --git a/tests/unit/metrics/export/test_gauge.py b/tests/unit/metrics/export/test_gauge.py index 59fafd5ef..bd7ad791c 100644 --- a/tests/unit/metrics/export/test_gauge.py +++ b/tests/unit/metrics/export/test_gauge.py @@ -393,6 +393,15 @@ def test_create_time_series(self): unused_mock_fn.assert_not_called() self.assertEqual(len(derived_gauge.points.keys()), 1) + # with kwargs + def fn_with_args(value=None): + if value: + return value + return 0 + label_values2 = [1, 2] + point3 = derived_gauge.create_time_series(label_values2, fn_with_args, value=5) # noqa: E501 + self.assertEqual(point3.get_value(), 5) + def test_create_default_time_series(self): derived_gauge = gauge.DerivedLongGauge( Mock(), Mock(), Mock(), [Mock(), Mock]) diff --git a/tox.ini b/tox.ini index 34d4a0224..9802276e5 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = +envlist = py{27,35,36,37,38,39}-unit py39-bandit py39-lint @@ -12,12 +12,15 @@ unit-base-command = py.test --quiet --cov={envdir}/opencensus --cov=context --co [testenv] install_command = python -m pip install {opts} {packages} -deps = +deps = unit,lint: mock==3.0.5 unit,lint: pytest==4.6.4 unit,lint: pytest-cov unit,lint: retrying unit,lint: unittest2 + py27-unit: markupsafe==1.1.1 + py35-unit: markupsafe==1.1.1 + py3{6,7,8,9}-unit: markupsafe==2.0.1 # https://github.com/pallets/markupsafe/issues/282 bandit: bandit unit,lint,setup,docs,bandit: -e context/opencensus-context unit,lint,docs,bandit: -e contrib/opencensus-correlation @@ -27,6 +30,7 @@ deps = ; unit,lint: -e contrib/opencensus-ext-datadog unit,lint,bandit: -e contrib/opencensus-ext-dbapi unit,lint,bandit: -e contrib/opencensus-ext-django + py3{6,7,8,9}-unit,lint,bandit: -e contrib/opencensus-ext-fastapi unit,lint,bandit: -e contrib/opencensus-ext-flask unit,lint,bandit: -e contrib/opencensus-ext-gevent unit,lint,bandit: -e contrib/opencensus-ext-grpc @@ -36,26 +40,29 @@ deps = unit,lint,bandit: -e contrib/opencensus-ext-mysql unit,lint,bandit: -e contrib/opencensus-ext-ocagent unit,lint,bandit: -e contrib/opencensus-ext-postgresql + py{36,37,38,39}-unit,lint,bandit: prometheus-client==0.13.1 unit,lint,bandit: -e contrib/opencensus-ext-prometheus unit,lint,bandit: -e contrib/opencensus-ext-pymongo unit,lint,bandit: -e contrib/opencensus-ext-pymysql unit,lint,bandit: -e contrib/opencensus-ext-pyramid unit,lint,bandit: -e contrib/opencensus-ext-requests + py3{7,8,9}-unit,lint,bandit: -e contrib/opencensus-ext-httpx unit,lint,bandit: -e contrib/opencensus-ext-sqlalchemy py3{6,7,8,9}-unit,lint,bandit: -e contrib/opencensus-ext-stackdriver unit,lint,bandit: -e contrib/opencensus-ext-threading unit,lint,bandit: -e contrib/opencensus-ext-zipkin unit,lint,bandit: -e contrib/opencensus-ext-google-cloud-clientlibs - lint: flake8 + lint: flake8 ~= 4.0.1 lint: isort ~= 4.3.21 setup: docutils setup: pygments docs: setuptools >= 36.4.0 docs: sphinx >= 1.6.3 -commands = - py3{6,7,8,9}-unit: {[constants]unit-base-command} - py{27,34,35}-unit: {[constants]unit-base-command} --ignore=contrib/opencensus-ext-stackdriver +commands = + py{27,34,35}-unit: {[constants]unit-base-command} --ignore=contrib/opencensus-ext-stackdriver --ignore=contrib/opencensus-ext-flask --ignore=contrib/opencensus-ext-httpx --ignore=contrib/opencensus-ext-fastapi + py36-unit: {[constants]unit-base-command} --ignore=contrib/opencensus-ext-httpx + py3{7,8,9}-unit: {[constants]unit-base-command} ; TODO system tests lint: isort --check-only --diff --recursive . @@ -64,4 +71,4 @@ commands = bandit: bandit -r context/ contrib/ opencensus/ -lll -q py39-setup: python setup.py check --restructuredtext --strict py39-docs: bash ./scripts/update_docs.sh - ; TODO deployment + ; TODO deployment \ No newline at end of file