From dfd6d4f62e2ad0c0b9ead9b3cf71fa8aa4d33d5c Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Thu, 1 Aug 2019 16:39:12 -0700 Subject: [PATCH 01/79] Bump version and Merge Changelog after 0.7.0 release (#740) * Prepare for 0.7.0 release (#739) * Bump version to 0.7.0 for only packages that have changed (#741) * bump version * fix typos --- CHANGELOG.md | 4 +++- contrib/opencensus-correlation/CHANGELOG.md | 3 +++ contrib/opencensus-ext-azure/CHANGELOG.md | 8 ++++++++ .../opencensus/ext/azure/common/version.py | 2 +- contrib/opencensus-ext-azure/setup.py | 2 +- contrib/opencensus-ext-dbapi/setup.py | 2 +- contrib/opencensus-ext-django/CHANGELOG.md | 4 ++++ contrib/opencensus-ext-django/setup.py | 2 +- contrib/opencensus-ext-django/version.py | 2 +- contrib/opencensus-ext-flask/CHANGELOG.md | 4 ++++ contrib/opencensus-ext-flask/setup.py | 2 +- contrib/opencensus-ext-flask/version.py | 2 +- contrib/opencensus-ext-gevent/setup.py | 2 +- contrib/opencensus-ext-google-cloud-clientlibs/setup.py | 2 +- contrib/opencensus-ext-grpc/setup.py | 2 +- contrib/opencensus-ext-httplib/setup.py | 2 +- contrib/opencensus-ext-jaeger/setup.py | 2 +- contrib/opencensus-ext-logging/setup.py | 2 +- contrib/opencensus-ext-mysql/setup.py | 2 +- contrib/opencensus-ext-ocagent/setup.py | 2 +- contrib/opencensus-ext-postgresql/setup.py | 2 +- contrib/opencensus-ext-prometheus/setup.py | 2 +- contrib/opencensus-ext-pymongo/setup.py | 2 +- contrib/opencensus-ext-pymysql/setup.py | 2 +- contrib/opencensus-ext-pyramid/CHANGELOG.md | 4 ++++ contrib/opencensus-ext-pyramid/setup.py | 2 +- contrib/opencensus-ext-pyramid/version.py | 2 +- contrib/opencensus-ext-requests/setup.py | 2 +- contrib/opencensus-ext-sqlalchemy/setup.py | 2 +- contrib/opencensus-ext-stackdriver/setup.py | 2 +- contrib/opencensus-ext-threading/setup.py | 2 +- contrib/opencensus-ext-zipkin/setup.py | 2 +- opencensus/common/version/__init__.py | 2 +- 33 files changed, 53 insertions(+), 28 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 04601a9ff..4de9c3180 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,12 +2,14 @@ ## Unreleased +## 0.7.0 +Released 2019-07-31 + - Fix exporting int-valued stats with sum and lastvalue aggregations ([#696](https://github.com/census-instrumentation/opencensus-python/pull/696)) - Fix cloud format propagator to use decimal span_id encoding instead of hex ([#719](https://github.com/census-instrumentation/opencensus-python/pull/719)) - ## 0.6.0 Released 2019-05-31 diff --git a/contrib/opencensus-correlation/CHANGELOG.md b/contrib/opencensus-correlation/CHANGELOG.md index 805c3d79d..cb718632e 100644 --- a/contrib/opencensus-correlation/CHANGELOG.md +++ b/contrib/opencensus-correlation/CHANGELOG.md @@ -2,4 +2,7 @@ ## Unreleased +## 0.3.0 +Released 2019-05-31 + - Add this changelog. diff --git a/contrib/opencensus-ext-azure/CHANGELOG.md b/contrib/opencensus-ext-azure/CHANGELOG.md index 7dd47b3d0..870afd94d 100644 --- a/contrib/opencensus-ext-azure/CHANGELOG.md +++ b/contrib/opencensus-ext-azure/CHANGELOG.md @@ -1,6 +1,10 @@ # Changelog ## Unreleased + +## 0.7.0 +Released 2019-07-31 + - Added standard metrics ([#708](https://github.com/census-instrumentation/opencensus-python/pull/708), [#718](https://github.com/census-instrumentation/opencensus-python/pull/718), @@ -11,16 +15,20 @@ ([#735](https://github.com/census-instrumentation/opencensus-python/pull/735)) ## 0.3.1 +Released 2019-06-30 + - Added metrics exporter ([#678](https://github.com/census-instrumentation/opencensus-python/pull/678)) ## 0.2.1 Released 2019-06-13 + - Support span attributes ([#682](https://github.com/census-instrumentation/opencensus-python/pull/682)) ## 0.2.0 Released 2019-05-31 + - Added log exporter ([#657](https://github.com/census-instrumentation/opencensus-python/pull/657), [#668](https://github.com/census-instrumentation/opencensus-python/pull/668)) diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/version.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/version.py index deb2f374d..dffc606db 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/version.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.4.dev0' +__version__ = '0.8.dev0' diff --git a/contrib/opencensus-ext-azure/setup.py b/contrib/opencensus-ext-azure/setup.py index f645174dd..3b5976bcd 100644 --- a/contrib/opencensus-ext-azure/setup.py +++ b/contrib/opencensus-ext-azure/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.dev0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', 'psutil >= 5.6.3', 'requests >= 2.19.0', ], diff --git a/contrib/opencensus-ext-dbapi/setup.py b/contrib/opencensus-ext-dbapi/setup.py index 1583edbf6..bd5a8469e 100644 --- a/contrib/opencensus-ext-dbapi/setup.py +++ b/contrib/opencensus-ext-dbapi/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.dev0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-django/CHANGELOG.md b/contrib/opencensus-ext-django/CHANGELOG.md index ff716cb0d..e783a6b7c 100644 --- a/contrib/opencensus-ext-django/CHANGELOG.md +++ b/contrib/opencensus-ext-django/CHANGELOG.md @@ -1,6 +1,10 @@ # Changelog ## Unreleased + +## 0.7.0 +Released 2019-07-31 + - Updated span attributes to include some missing attributes listed [here](https://github.com/census-instrumentation/opencensus-specs/blob/master/trace/HTTP.md#attributes) ([#735](https://github.com/census-instrumentation/opencensus-python/pull/735)) diff --git a/contrib/opencensus-ext-django/setup.py b/contrib/opencensus-ext-django/setup.py index 0062f846a..699dd62e8 100644 --- a/contrib/opencensus-ext-django/setup.py +++ b/contrib/opencensus-ext-django/setup.py @@ -44,7 +44,7 @@ long_description=open('README.rst').read(), install_requires=[ 'Django >= 1.11', - 'opencensus >= 0.7.dev0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-django/version.py b/contrib/opencensus-ext-django/version.py index deb2f374d..dffc606db 100644 --- a/contrib/opencensus-ext-django/version.py +++ b/contrib/opencensus-ext-django/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.4.dev0' +__version__ = '0.8.dev0' diff --git a/contrib/opencensus-ext-flask/CHANGELOG.md b/contrib/opencensus-ext-flask/CHANGELOG.md index b10ba830b..f0f06e915 100644 --- a/contrib/opencensus-ext-flask/CHANGELOG.md +++ b/contrib/opencensus-ext-flask/CHANGELOG.md @@ -1,6 +1,10 @@ # Changelog ## Unreleased + +## 0.7.0 +Released 2019-07-31 + - Make ProbabilitySampler default - Updated span attributes to include some missing attributes listed [here](https://github.com/census-instrumentation/opencensus-specs/blob/master/trace/HTTP.md#attributes) ([#735](https://github.com/census-instrumentation/opencensus-python/pull/735)) diff --git a/contrib/opencensus-ext-flask/setup.py b/contrib/opencensus-ext-flask/setup.py index a1d6db8b1..f05797d62 100644 --- a/contrib/opencensus-ext-flask/setup.py +++ b/contrib/opencensus-ext-flask/setup.py @@ -40,7 +40,7 @@ long_description=open('README.rst').read(), install_requires=[ 'flask >= 0.12.3, < 2.0.0', - 'opencensus >= 0.7.dev0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-flask/version.py b/contrib/opencensus-ext-flask/version.py index deb2f374d..dffc606db 100644 --- a/contrib/opencensus-ext-flask/version.py +++ b/contrib/opencensus-ext-flask/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.4.dev0' +__version__ = '0.8.dev0' diff --git a/contrib/opencensus-ext-gevent/setup.py b/contrib/opencensus-ext-gevent/setup.py index 4109854aa..03b666db6 100644 --- a/contrib/opencensus-ext-gevent/setup.py +++ b/contrib/opencensus-ext-gevent/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.dev0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', 'gevent >= 1.3' ], extras_require={}, diff --git a/contrib/opencensus-ext-google-cloud-clientlibs/setup.py b/contrib/opencensus-ext-google-cloud-clientlibs/setup.py index 2dc87bd9e..005deedfc 100644 --- a/contrib/opencensus-ext-google-cloud-clientlibs/setup.py +++ b/contrib/opencensus-ext-google-cloud-clientlibs/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.dev0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', 'opencensus-ext-grpc >= 0.4.dev0, < 1.0.0', 'opencensus-ext-requests >= 0.2.dev0, < 1.0.0', ], diff --git a/contrib/opencensus-ext-grpc/setup.py b/contrib/opencensus-ext-grpc/setup.py index b42e607a4..9674d4ff1 100644 --- a/contrib/opencensus-ext-grpc/setup.py +++ b/contrib/opencensus-ext-grpc/setup.py @@ -40,7 +40,7 @@ long_description=open('README.rst').read(), install_requires=[ 'grpcio >= 1.0.0, < 2.0.0', - 'opencensus >= 0.7.dev0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-httplib/setup.py b/contrib/opencensus-ext-httplib/setup.py index 7a29e9d24..f8a1cbc83 100644 --- a/contrib/opencensus-ext-httplib/setup.py +++ b/contrib/opencensus-ext-httplib/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.dev0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-jaeger/setup.py b/contrib/opencensus-ext-jaeger/setup.py index a46f25a1b..d345a92b1 100644 --- a/contrib/opencensus-ext-jaeger/setup.py +++ b/contrib/opencensus-ext-jaeger/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.dev0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', 'thrift >= 0.10.0', ], extras_require={}, diff --git a/contrib/opencensus-ext-logging/setup.py b/contrib/opencensus-ext-logging/setup.py index e36b84dc3..ab09453c9 100644 --- a/contrib/opencensus-ext-logging/setup.py +++ b/contrib/opencensus-ext-logging/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.dev0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-mysql/setup.py b/contrib/opencensus-ext-mysql/setup.py index d84b765e4..4d481e91e 100644 --- a/contrib/opencensus-ext-mysql/setup.py +++ b/contrib/opencensus-ext-mysql/setup.py @@ -40,7 +40,7 @@ long_description=open('README.rst').read(), install_requires=[ 'mysql-connector >= 2.1.6, < 3.0.0', - 'opencensus >= 0.7.dev0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', 'opencensus-ext-dbapi >= 0.2.dev0, < 1.0.0', ], extras_require={}, diff --git a/contrib/opencensus-ext-ocagent/setup.py b/contrib/opencensus-ext-ocagent/setup.py index 8c1b26858..d8f24e8ee 100644 --- a/contrib/opencensus-ext-ocagent/setup.py +++ b/contrib/opencensus-ext-ocagent/setup.py @@ -40,7 +40,7 @@ long_description=open('README.rst').read(), install_requires=[ 'grpcio >= 1.0.0, < 2.0.0', - 'opencensus >= 0.7.dev0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', 'opencensus-proto >= 0.1.0, < 1.0.0', ], extras_require={}, diff --git a/contrib/opencensus-ext-postgresql/setup.py b/contrib/opencensus-ext-postgresql/setup.py index 5e8eb9bbe..c26c5e143 100644 --- a/contrib/opencensus-ext-postgresql/setup.py +++ b/contrib/opencensus-ext-postgresql/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.dev0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', 'psycopg2-binary >= 2.7.3.1', ], extras_require={}, diff --git a/contrib/opencensus-ext-prometheus/setup.py b/contrib/opencensus-ext-prometheus/setup.py index a76eaddf3..ff5271744 100644 --- a/contrib/opencensus-ext-prometheus/setup.py +++ b/contrib/opencensus-ext-prometheus/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.dev0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', 'prometheus_client >= 0.5.0, < 1.0.0', ], extras_require={}, diff --git a/contrib/opencensus-ext-pymongo/setup.py b/contrib/opencensus-ext-pymongo/setup.py index 0f26c789c..7cc54d5c5 100644 --- a/contrib/opencensus-ext-pymongo/setup.py +++ b/contrib/opencensus-ext-pymongo/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.dev0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', 'pymongo >= 3.1.0', ], extras_require={}, diff --git a/contrib/opencensus-ext-pymysql/setup.py b/contrib/opencensus-ext-pymysql/setup.py index 3f9d08d50..8a88d3da0 100644 --- a/contrib/opencensus-ext-pymysql/setup.py +++ b/contrib/opencensus-ext-pymysql/setup.py @@ -40,7 +40,7 @@ long_description=open('README.rst').read(), install_requires=[ 'PyMySQL >= 0.7.11, < 1.0.0', - 'opencensus >= 0.7.dev0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', 'opencensus-ext-dbapi >= 0.2.dev0, < 1.0.0', ], extras_require={}, diff --git a/contrib/opencensus-ext-pyramid/CHANGELOG.md b/contrib/opencensus-ext-pyramid/CHANGELOG.md index 359dd1205..7dc6896b2 100644 --- a/contrib/opencensus-ext-pyramid/CHANGELOG.md +++ b/contrib/opencensus-ext-pyramid/CHANGELOG.md @@ -1,6 +1,10 @@ # Changelog ## Unreleased + +## 0.7.0 +Released 2019-07-31 + - Updated span attributes to include some missing attributes listed [here](https://github.com/census-instrumentation/opencensus-specs/blob/master/trace/HTTP.md#attributes) ([#735](https://github.com/census-instrumentation/opencensus-python/pull/735)) diff --git a/contrib/opencensus-ext-pyramid/setup.py b/contrib/opencensus-ext-pyramid/setup.py index dd9589a3c..189e42d68 100644 --- a/contrib/opencensus-ext-pyramid/setup.py +++ b/contrib/opencensus-ext-pyramid/setup.py @@ -40,7 +40,7 @@ long_description=open('README.rst').read(), install_requires=[ 'pyramid >= 1.9.1, < 2.0.0', - 'opencensus >= 0.7.dev0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-pyramid/version.py b/contrib/opencensus-ext-pyramid/version.py index deb2f374d..dffc606db 100644 --- a/contrib/opencensus-ext-pyramid/version.py +++ b/contrib/opencensus-ext-pyramid/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.4.dev0' +__version__ = '0.8.dev0' diff --git a/contrib/opencensus-ext-requests/setup.py b/contrib/opencensus-ext-requests/setup.py index 761e324fd..3ee29bb18 100644 --- a/contrib/opencensus-ext-requests/setup.py +++ b/contrib/opencensus-ext-requests/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.dev0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', 'wrapt >= 1.0.0, < 2.0.0', ], extras_require={}, diff --git a/contrib/opencensus-ext-sqlalchemy/setup.py b/contrib/opencensus-ext-sqlalchemy/setup.py index e89bd146b..3ac55a79e 100644 --- a/contrib/opencensus-ext-sqlalchemy/setup.py +++ b/contrib/opencensus-ext-sqlalchemy/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.dev0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', 'SQLAlchemy >= 1.1.14, < 2.0.0', ], extras_require={}, diff --git a/contrib/opencensus-ext-stackdriver/setup.py b/contrib/opencensus-ext-stackdriver/setup.py index 06abc5bc4..808badb1f 100644 --- a/contrib/opencensus-ext-stackdriver/setup.py +++ b/contrib/opencensus-ext-stackdriver/setup.py @@ -41,7 +41,7 @@ install_requires=[ 'google-cloud-monitoring >= 0.30.0, < 1.0.0', 'google-cloud-trace >= 0.20.0, < 1.0.0', - 'opencensus >= 0.7.dev0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-threading/setup.py b/contrib/opencensus-ext-threading/setup.py index 6aa3e9425..4d2e579df 100644 --- a/contrib/opencensus-ext-threading/setup.py +++ b/contrib/opencensus-ext-threading/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.dev0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-zipkin/setup.py b/contrib/opencensus-ext-zipkin/setup.py index 8611fa5d5..e6f790eed 100644 --- a/contrib/opencensus-ext-zipkin/setup.py +++ b/contrib/opencensus-ext-zipkin/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.dev0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/opencensus/common/version/__init__.py b/opencensus/common/version/__init__.py index ae27d1d16..dffc606db 100644 --- a/opencensus/common/version/__init__.py +++ b/opencensus/common/version/__init__.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.7.dev0' +__version__ = '0.8.dev0' From 5831ba9774f23493ab8c473c08c721e47a6f6c15 Mon Sep 17 00:00:00 2001 From: Victor Date: Fri, 2 Aug 2019 16:58:50 -0300 Subject: [PATCH 02/79] Add set_status to span, update pymongo integration (#738) --- CHANGELOG.md | 2 + .../tests/test_flask_middleware.py | 12 +++-- .../tests/test_server_interceptor.py | 6 ++- .../ext/jaeger/trace_exporter/__init__.py | 4 +- .../ext/ocagent/trace_exporter/utils.py | 5 +- contrib/opencensus-ext-pymongo/CHANGELOG.md | 2 + .../opencensus/ext/pymongo/trace.py | 45 +++++++++++----- .../tests/test_pymongo_trace.py | 52 ++++++++++++++----- opencensus/trace/base_span.py | 8 +++ opencensus/trace/blank_span.py | 8 +++ opencensus/trace/span.py | 18 ++++++- opencensus/trace/status.py | 26 ++++++++-- tests/unit/trace/test_base_span.py | 6 +++ tests/unit/trace/test_blank_span.py | 4 ++ tests/unit/trace/test_span.py | 24 ++++++++- tests/unit/trace/test_status.py | 33 ++++++++++-- 16 files changed, 209 insertions(+), 46 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4de9c3180..c84046d2e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,8 @@ # Changelog ## Unreleased +- Added `set_status` to `span` + ([#738](https://github.com/census-instrumentation/opencensus-python/pull/738)) ## 0.7.0 Released 2019-07-31 diff --git a/contrib/opencensus-ext-flask/tests/test_flask_middleware.py b/contrib/opencensus-ext-flask/tests/test_flask_middleware.py index 5c094792e..d299c15b6 100644 --- a/contrib/opencensus-ext-flask/tests/test_flask_middleware.py +++ b/contrib/opencensus-ext-flask/tests/test_flask_middleware.py @@ -316,8 +316,10 @@ def test_teardown_include_exception(self): exported_spandata = mock_exporter.export.call_args[0][0][0] self.assertIsInstance(exported_spandata, span_data.SpanData) self.assertIsInstance(exported_spandata.status, status.Status) - self.assertEqual(exported_spandata.status.code, code_pb2.UNKNOWN) - self.assertEqual(exported_spandata.status.message, 'error') + self.assertEqual( + exported_spandata.status.canonical_code, code_pb2.UNKNOWN + ) + self.assertEqual(exported_spandata.status.description, 'error') def test_teardown_include_exception_and_traceback(self): mock_exporter = mock.MagicMock() @@ -331,8 +333,10 @@ def test_teardown_include_exception_and_traceback(self): exported_spandata = mock_exporter.export.call_args[0][0][0] self.assertIsInstance(exported_spandata, span_data.SpanData) self.assertIsInstance(exported_spandata.status, status.Status) - self.assertEqual(exported_spandata.status.code, code_pb2.UNKNOWN) - self.assertEqual(exported_spandata.status.message, 'error') + self.assertEqual( + exported_spandata.status.canonical_code, code_pb2.UNKNOWN + ) + self.assertEqual(exported_spandata.status.description, 'error') self.assertIsInstance( exported_spandata.stack_trace, stack_trace.StackTrace ) diff --git a/contrib/opencensus-ext-grpc/tests/test_server_interceptor.py b/contrib/opencensus-ext-grpc/tests/test_server_interceptor.py index b5f3cad8d..680de1002 100644 --- a/contrib/opencensus-ext-grpc/tests/test_server_interceptor.py +++ b/contrib/opencensus-ext-grpc/tests/test_server_interceptor.py @@ -147,8 +147,10 @@ def test_intercept_handler_exception(self): # check that the status obj is attached to the current span self.assertIsNotNone(current_span.status) - self.assertEqual(current_span.status.code, code_pb2.UNKNOWN) - self.assertEqual(current_span.status.message, 'Test') + self.assertEqual( + current_span.status.canonical_code, code_pb2.UNKNOWN + ) + self.assertEqual(current_span.status.description, 'Test') @mock.patch( 'opencensus.trace.execution_context.get_opencensus_tracer') diff --git a/contrib/opencensus-ext-jaeger/opencensus/ext/jaeger/trace_exporter/__init__.py b/contrib/opencensus-ext-jaeger/opencensus/ext/jaeger/trace_exporter/__init__.py index 509e054d2..e738573ed 100644 --- a/contrib/opencensus-ext-jaeger/opencensus/ext/jaeger/trace_exporter/__init__.py +++ b/contrib/opencensus-ext-jaeger/opencensus/ext/jaeger/trace_exporter/__init__.py @@ -184,12 +184,12 @@ def translate_to_jaeger(self, span_datas): tags.append(jaeger.Tag( key='status.code', vType=jaeger.TagType.LONG, - vLong=status.code)) + vLong=status.canonical_code)) tags.append(jaeger.Tag( key='status.message', vType=jaeger.TagType.STRING, - vStr=status.message)) + vStr=status.description)) refs = _extract_refs_from_span(span) logs = _extract_logs_from_span(span) diff --git a/contrib/opencensus-ext-ocagent/opencensus/ext/ocagent/trace_exporter/utils.py b/contrib/opencensus-ext-ocagent/opencensus/ext/ocagent/trace_exporter/utils.py index 27bfa0b5d..f92a9fce7 100644 --- a/contrib/opencensus-ext-ocagent/opencensus/ext/ocagent/trace_exporter/utils.py +++ b/contrib/opencensus-ext-ocagent/opencensus/ext/ocagent/trace_exporter/utils.py @@ -43,8 +43,9 @@ def translate_to_trace_proto(span_data): span_data.start_time), end_time=ocagent_utils.proto_ts_from_datetime_str(span_data.end_time), status=trace_pb2.Status( - code=span_data.status.code, - message=span_data.status.message) + code=span_data.status.canonical_code, + message=span_data.status.description, + ) if span_data.status is not None else None, same_process_as_parent_span=BoolValue( value=span_data.same_process_as_parent_span) diff --git a/contrib/opencensus-ext-pymongo/CHANGELOG.md b/contrib/opencensus-ext-pymongo/CHANGELOG.md index 9ab12d5b5..49b40d03e 100644 --- a/contrib/opencensus-ext-pymongo/CHANGELOG.md +++ b/contrib/opencensus-ext-pymongo/CHANGELOG.md @@ -1,6 +1,8 @@ # Changelog ## Unreleased +- Changed attributes names to make it compatible with [OpenTelemetry](https://github.com/open-telemetry/opentelemetry-specification/blob/master/specification/data-semantic-conventions.md), maintaining OpenCensus specs fidelity +([#738](https://github.com/census-instrumentation/opencensus-python/pull/738)) ## 0.1.3 Released 2019-05-31 diff --git a/contrib/opencensus-ext-pymongo/opencensus/ext/pymongo/trace.py b/contrib/opencensus-ext-pymongo/opencensus/ext/pymongo/trace.py index 655377013..425580b8e 100644 --- a/contrib/opencensus-ext-pymongo/opencensus/ext/pymongo/trace.py +++ b/contrib/opencensus-ext-pymongo/opencensus/ext/pymongo/trace.py @@ -16,8 +16,11 @@ from pymongo import monitoring +from google.rpc import code_pb2 + from opencensus.trace import execution_context from opencensus.trace import span as span_module +from opencensus.trace import status as status_module log = logging.getLogger(__name__) @@ -34,7 +37,6 @@ def trace_integration(tracer=None): class MongoCommandListener(monitoring.CommandListener): - def __init__(self, tracer=None): self._tracer = tracer @@ -44,30 +46,47 @@ def tracer(self): def started(self, event): span = self.tracer.start_span( - name='{}.{}.{}.{}'.format(MODULE_NAME, - event.database_name, - event.command.get(event.command_name), - event.command_name)) + name='{}.{}.{}.{}'.format( + MODULE_NAME, + event.database_name, + event.command.get(event.command_name), + event.command_name, + ) + ) span.span_kind = span_module.SpanKind.CLIENT + self.tracer.add_attribute_to_current_span('component', 'mongodb') + self.tracer.add_attribute_to_current_span('db.type', 'mongodb') + self.tracer.add_attribute_to_current_span( + 'db.instance', event.database_name + ) + self.tracer.add_attribute_to_current_span( + 'db.statement', event.command.get(event.command_name) + ) + for attr in COMMAND_ATTRIBUTES: _attr = event.command.get(attr) if _attr is not None: self.tracer.add_attribute_to_current_span(attr, str(_attr)) self.tracer.add_attribute_to_current_span( - 'request_id', event.request_id) + 'request_id', event.request_id + ) self.tracer.add_attribute_to_current_span( - 'connection_id', str(event.connection_id)) + 'connection_id', str(event.connection_id) + ) def succeeded(self, event): - self._stop('succeeded') + self._stop(code_pb2.OK) def failed(self, event): - self._stop('failed') - - def _stop(self, status): - self.tracer.add_attribute_to_current_span('status', status) - + self._stop(code_pb2.UNKNOWN, 'MongoDB error', event.failure) + + def _stop(self, code, message='', details=None): + span = self.tracer.current_span() + status = status_module.Status( + code=code, message=message, details=details + ) + span.set_status(status) self.tracer.end_span() diff --git a/contrib/opencensus-ext-pymongo/tests/test_pymongo_trace.py b/contrib/opencensus-ext-pymongo/tests/test_pymongo_trace.py index 7ceffd2d0..a8e15250e 100644 --- a/contrib/opencensus-ext-pymongo/tests/test_pymongo_trace.py +++ b/contrib/opencensus-ext-pymongo/tests/test_pymongo_trace.py @@ -49,6 +49,10 @@ def test_started(self): } expected_attrs = { + 'component': 'mongodb', + 'db.type': 'mongodb', + 'db.instance': 'database_name', + 'db.statement': 'find', 'filter': 'filter', 'sort': 'sort', 'limit': 'limit', @@ -63,8 +67,8 @@ def test_started(self): trace.MongoCommandListener().started( event=MockEvent(command_attrs)) - self.assertEqual(mock_tracer.current_span.attributes, expected_attrs) - self.assertEqual(mock_tracer.current_span.name, expected_name) + self.assertEqual(mock_tracer.span.attributes, expected_attrs) + self.assertEqual(mock_tracer.span.name, expected_name) def test_succeed(self): mock_tracer = MockTracer() @@ -74,12 +78,16 @@ def test_succeed(self): 'opencensus.trace.execution_context.get_opencensus_tracer', return_value=mock_tracer) - expected_attrs = {'status': 'succeeded'} + expected_status = { + 'code': 0, + 'message': '', + 'details': None + } with patch: trace.MongoCommandListener().succeeded(event=MockEvent(None)) - self.assertEqual(mock_tracer.current_span.attributes, expected_attrs) + self.assertEqual(mock_tracer.span.status, expected_status) mock_tracer.end_span.assert_called_with() def test_failed(self): @@ -90,12 +98,16 @@ def test_failed(self): 'opencensus.trace.execution_context.get_opencensus_tracer', return_value=mock_tracer) - expected_attrs = {'status': 'failed'} + expected_status = { + 'code': 2, + 'message': 'MongoDB error', + 'details': 'failure' + } with patch: trace.MongoCommandListener().failed(event=MockEvent(None)) - self.assertEqual(mock_tracer.current_span.attributes, expected_attrs) + self.assertEqual(mock_tracer.span.status, expected_status) mock_tracer.end_span.assert_called_with() @@ -115,17 +127,31 @@ def __getattr__(self, item): return item +class MockSpan(object): + def __init__(self): + self.status = None + + def set_status(self, status): + self.status = { + 'code': status.canonical_code, + 'message': status.description, + 'details': status.details, + } + + class MockTracer(object): def __init__(self): - self.current_span = None + self.span = MockSpan() self.end_span = mock.Mock() def start_span(self, name=None): - span = mock.Mock() - span.name = name - span.attributes = {} - self.current_span = span - return span + self.span.name = name + self.span.attributes = {} + self.span.status = {} + return self.span def add_attribute_to_current_span(self, key, value): - self.current_span.attributes[key] = value + self.span.attributes[key] = value + + def current_span(self): + return self.span diff --git a/opencensus/trace/base_span.py b/opencensus/trace/base_span.py index 4eacccde8..cabf5df20 100644 --- a/opencensus/trace/base_span.py +++ b/opencensus/trace/base_span.py @@ -80,6 +80,14 @@ def add_link(self, link): """ raise NotImplementedError + def set_status(self, status): + """Sets span status. + + :type code: :class: `~opencensus.trace.status.Status` + :param code: A Status object. + """ + raise NotImplementedError + def start(self): """Set the start time for a span.""" raise NotImplementedError diff --git a/opencensus/trace/blank_span.py b/opencensus/trace/blank_span.py index d5b09fbf1..8911bf358 100644 --- a/opencensus/trace/blank_span.py +++ b/opencensus/trace/blank_span.py @@ -136,6 +136,14 @@ def add_link(self, link): """ pass + def set_status(self, status): + """No-op implementation of this method. + + :type code: :class: `~opencensus.trace.status.Status` + :param code: A Status object. + """ + pass + def start(self): """No-op implementation of this method.""" pass diff --git a/opencensus/trace/span.py b/opencensus/trace/span.py index a8e9164b8..cb8e42dae 100644 --- a/opencensus/trace/span.py +++ b/opencensus/trace/span.py @@ -264,9 +264,13 @@ def __init__( else: self.links = BoundedList.from_seq(MAX_NUM_LINKS, links) + if status is None: + self.status = status_module.Status.as_ok() + else: + self.status = status + self.span_id = span_id self.stack_trace = stack_trace - self.status = status self.same_process_as_parent_span = same_process_as_parent_span self._child_spans = [] self.context_tracer = context_tracer @@ -346,6 +350,18 @@ def add_link(self, link): raise TypeError("Type Error: received {}, but requires Link.". format(type(link).__name__)) + def set_status(self, status): + """Sets span status. + + :type code: :class: `~opencensus.trace.status.Status` + :param code: A Status object. + """ + if isinstance(status, status_module.Status): + self.status = status + else: + raise TypeError("Type Error: received {}, but requires Status.". + format(type(status).__name__)) + def start(self): """Set the start time for a span.""" self.start_time = utils.to_iso_str() diff --git a/opencensus/trace/status.py b/opencensus/trace/status.py index 26e7fd53d..612425495 100644 --- a/opencensus/trace/status.py +++ b/opencensus/trace/status.py @@ -39,17 +39,31 @@ class Status(object): See: https://cloud.google.com/trace/docs/reference/v2/ rest/v2/Status#FIELDS.details """ - def __init__(self, code, message, details=None): + def __init__(self, code, message=None, details=None): self.code = code self.message = message self.details = details + @property + def canonical_code(self): + return self.code + + @property + def description(self): + return self.message + + @property + def is_ok(self): + return self.canonical_code == code_pb2.OK + def format_status_json(self): """Convert a Status object to json format.""" status_json = {} - status_json['code'] = self.code - status_json['message'] = self.message + status_json['code'] = self.canonical_code + + if self.description is not None: + status_json['message'] = self.description if self.details is not None: status_json['details'] = self.details @@ -62,3 +76,9 @@ def from_exception(cls, exc): code=code_pb2.UNKNOWN, message=str(exc) ) + + @classmethod + def as_ok(cls): + return cls( + code=code_pb2.OK, + ) diff --git a/tests/unit/trace/test_base_span.py b/tests/unit/trace/test_base_span.py index d5c6d2eb1..f6477e8a0 100644 --- a/tests/unit/trace/test_base_span.py +++ b/tests/unit/trace/test_base_span.py @@ -73,6 +73,12 @@ def test_add_link_abstract(self): with self.assertRaises(NotImplementedError): span.add_link(None) + def test_set_status_abstract(self): + span = BaseSpan() + + with self.assertRaises(NotImplementedError): + span.set_status(None) + def test_iter_abstract(self): span = BaseSpan() diff --git a/tests/unit/trace/test_blank_span.py b/tests/unit/trace/test_blank_span.py index 82ed3f003..c0bf333c7 100644 --- a/tests/unit/trace/test_blank_span.py +++ b/tests/unit/trace/test_blank_span.py @@ -18,6 +18,7 @@ from opencensus.common import utils from opencensus.trace.link import Link +from opencensus.trace.status import Status from opencensus.trace.span import format_span_json from opencensus.trace.time_event import MessageEvent @@ -59,6 +60,9 @@ def test_do_not_crash(self): link = Link(span_id='1234', trace_id='4567') span.add_link(link) + status = Status(0, 'Ok', {'details': 'ok'}) + span.set_status(status) + message_event = mock.Mock() message_event = MessageEvent(datetime.datetime.utcnow(), mock.Mock()) span.add_message_event(message_event) diff --git a/tests/unit/trace/test_span.py b/tests/unit/trace/test_span.py index 2d1c6ecd3..f7ddf5d7b 100644 --- a/tests/unit/trace/test_span.py +++ b/tests/unit/trace/test_span.py @@ -45,6 +45,7 @@ def _make_one(self, *args, **kw): def test_constructor_defaults(self): span_id = 'test_span_id' span_name = 'test_span_name' + status = Status.as_ok() patch = mock.patch( 'opencensus.trace.span.generate_span_id', return_value=span_id) @@ -56,6 +57,7 @@ def test_constructor_defaults(self): self.assertEqual(span.span_id, span_id) self.assertIsNone(span.parent_span) self.assertEqual(span.attributes, {}) + self.assertDictEqual(span.status.__dict__, status.__dict__) self.assertIsNone(span.start_time) self.assertIsNone(span.end_time) self.assertEqual(span.children, []) @@ -181,6 +183,24 @@ def test_add_link(self): self.assertEqual(len(span.links), 1) + def test_set_status(self): + span_name = 'test_span_name' + span = self._make_one(span_name) + status = mock.Mock() + + with self.assertRaises(TypeError): + span.set_status(status) + + code = 1 + message = 'ok' + details = {'object': 'ok'} + status = Status(code=code, message=message, details=details) + span.set_status(status) + + self.assertEqual(span.status.canonical_code, code) + self.assertEqual(span.status.description, message) + self.assertEqual(span.status.details, details) + def test_start(self): span_name = 'root_span' span = self._make_one(span_name) @@ -278,8 +298,8 @@ def test_exception_in_span(self): self.assertIsNotNone(stack_frame['load_module']['build_id']['value']) self.assertIsNotNone(root_span.status) - self.assertEqual(root_span.status.message, exception_message) - self.assertEqual(root_span.status.code, code_pb2.UNKNOWN) + self.assertEqual(root_span.status.description, exception_message) + self.assertEqual(root_span.status.canonical_code, code_pb2.UNKNOWN) class Test_format_span_json(unittest.TestCase): diff --git a/tests/unit/trace/test_status.py b/tests/unit/trace/test_status.py index 1c4a8e816..c94ad903a 100644 --- a/tests/unit/trace/test_status.py +++ b/tests/unit/trace/test_status.py @@ -25,10 +25,21 @@ def test_constructor(self): message = 'test message' status = status_module.Status(code=code, message=message) - self.assertEqual(status.code, code) - self.assertEqual(status.message, message) + self.assertEqual(status.canonical_code, code) + self.assertEqual(status.description, message) self.assertIsNone(status.details) + def test_format_status_json_without_message(self): + code = 100 + status = status_module.Status(code=code) + status_json = status.format_status_json() + + expected_status_json = { + 'code': code + } + + self.assertEqual(expected_status_json, status_json) + def test_format_status_json_with_details(self): code = 100 message = 'test message' @@ -64,9 +75,23 @@ def test_format_status_json_without_details(self): self.assertEqual(expected_status_json, status_json) + def test_is_ok(self): + status = status_module.Status.as_ok() + self.assertTrue(status.is_ok) + + status = status_module.Status(code=code_pb2.UNKNOWN) + self.assertFalse(status.is_ok) + def test_create_from_exception(self): message = 'test message' exc = ValueError(message) status = status_module.Status.from_exception(exc) - self.assertEqual(status.message, message) - self.assertEqual(status.code, code_pb2.UNKNOWN) + self.assertEqual(status.description, message) + self.assertEqual(status.canonical_code, code_pb2.UNKNOWN) + self.assertIsNone(status.details) + + def test_create_as_ok(self): + status = status_module.Status.as_ok() + self.assertEqual(status.canonical_code, code_pb2.OK) + self.assertIsNone(status.description) + self.assertIsNone(status.details) From 0aa3f5a1460b26557bc877ce35e59c46e7ef40cb Mon Sep 17 00:00:00 2001 From: Chris Kleinknecht Date: Mon, 5 Aug 2019 12:55:31 -0700 Subject: [PATCH 03/79] Bump SD exporter package version for release (#747) --- contrib/opencensus-ext-stackdriver/CHANGELOG.md | 5 +++++ contrib/opencensus-ext-stackdriver/version.py | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/contrib/opencensus-ext-stackdriver/CHANGELOG.md b/contrib/opencensus-ext-stackdriver/CHANGELOG.md index ae8f2670e..605685920 100644 --- a/contrib/opencensus-ext-stackdriver/CHANGELOG.md +++ b/contrib/opencensus-ext-stackdriver/CHANGELOG.md @@ -2,6 +2,11 @@ ## Unreleased +## 0.5.0 +Released 2019-08-05 + + - Support exporter changes in `opencensus>=0.7.0` + ## 0.4.0 Released 2019-05-31 diff --git a/contrib/opencensus-ext-stackdriver/version.py b/contrib/opencensus-ext-stackdriver/version.py index 235cf3f15..0363359c3 100644 --- a/contrib/opencensus-ext-stackdriver/version.py +++ b/contrib/opencensus-ext-stackdriver/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.5.dev0' +__version__ = '0.6.dev0' From 36196f182d6efb64eb948a370dac0d28e67bd097 Mon Sep 17 00:00:00 2001 From: Chris Kleinknecht Date: Mon, 5 Aug 2019 13:25:00 -0700 Subject: [PATCH 04/79] Skip existing packages on twine upload (#744) --- scripts/twine_upload.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/twine_upload.sh b/scripts/twine_upload.sh index 68eddb907..b505a5bb5 100755 --- a/scripts/twine_upload.sh +++ b/scripts/twine_upload.sh @@ -41,5 +41,5 @@ done # Upload the distributions. for p in dist/* ; do - twine upload $p + twine upload --skip-existing $p done From 7f70b657c5fc2a4795e77210a6cd616cd5ce921f Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Tue, 6 Aug 2019 10:39:28 -0700 Subject: [PATCH 05/79] Update CHANGELOG and bump versions (#754) --- contrib/opencensus-ext-grpc/version.py | 2 +- contrib/opencensus-ext-httplib/CHANGELOG.md | 5 +++++ contrib/opencensus-ext-httplib/version.py | 2 +- contrib/opencensus-ext-jaeger/version.py | 2 +- contrib/opencensus-ext-pymongo/version.py | 2 +- contrib/opencensus-ext-requests/CHANGELOG.md | 5 +++++ contrib/opencensus-ext-requests/version.py | 2 +- contrib/opencensus-ext-stackdriver/version.py | 2 +- 8 files changed, 16 insertions(+), 6 deletions(-) diff --git a/contrib/opencensus-ext-grpc/version.py b/contrib/opencensus-ext-grpc/version.py index deb2f374d..dffc606db 100644 --- a/contrib/opencensus-ext-grpc/version.py +++ b/contrib/opencensus-ext-grpc/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.4.dev0' +__version__ = '0.8.dev0' diff --git a/contrib/opencensus-ext-httplib/CHANGELOG.md b/contrib/opencensus-ext-httplib/CHANGELOG.md index 3f88b4499..36cba4701 100644 --- a/contrib/opencensus-ext-httplib/CHANGELOG.md +++ b/contrib/opencensus-ext-httplib/CHANGELOG.md @@ -2,6 +2,11 @@ ## Unreleased +## 0.7.1 +Released 2019-08-06 + + - Support exporter changes in `opencensus>=0.7.0` + ## 0.1.3 Released 2019-05-31 diff --git a/contrib/opencensus-ext-httplib/version.py b/contrib/opencensus-ext-httplib/version.py index ff18aeb50..dffc606db 100644 --- a/contrib/opencensus-ext-httplib/version.py +++ b/contrib/opencensus-ext-httplib/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.2.dev0' +__version__ = '0.8.dev0' diff --git a/contrib/opencensus-ext-jaeger/version.py b/contrib/opencensus-ext-jaeger/version.py index bf7c8163b..dffc606db 100644 --- a/contrib/opencensus-ext-jaeger/version.py +++ b/contrib/opencensus-ext-jaeger/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.3.dev0' +__version__ = '0.8.dev0' diff --git a/contrib/opencensus-ext-pymongo/version.py b/contrib/opencensus-ext-pymongo/version.py index ff18aeb50..dffc606db 100644 --- a/contrib/opencensus-ext-pymongo/version.py +++ b/contrib/opencensus-ext-pymongo/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.2.dev0' +__version__ = '0.8.dev0' diff --git a/contrib/opencensus-ext-requests/CHANGELOG.md b/contrib/opencensus-ext-requests/CHANGELOG.md index f94e0621b..db729e0f5 100644 --- a/contrib/opencensus-ext-requests/CHANGELOG.md +++ b/contrib/opencensus-ext-requests/CHANGELOG.md @@ -2,6 +2,11 @@ ## Unreleased +## 0.7.1 +Released 2019-08-06 + + - Support exporter changes in `opencensus>=0.7.0` + ## 0.1.2 Released 2019-04-24 diff --git a/contrib/opencensus-ext-requests/version.py b/contrib/opencensus-ext-requests/version.py index ff18aeb50..dffc606db 100644 --- a/contrib/opencensus-ext-requests/version.py +++ b/contrib/opencensus-ext-requests/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.2.dev0' +__version__ = '0.8.dev0' diff --git a/contrib/opencensus-ext-stackdriver/version.py b/contrib/opencensus-ext-stackdriver/version.py index 0363359c3..dffc606db 100644 --- a/contrib/opencensus-ext-stackdriver/version.py +++ b/contrib/opencensus-ext-stackdriver/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.6.dev0' +__version__ = '0.8.dev0' From 86689fe86098c300200fc735e2b1c6b90c3c33b7 Mon Sep 17 00:00:00 2001 From: Chris Kleinknecht Date: Tue, 6 Aug 2019 13:13:40 -0700 Subject: [PATCH 06/79] SD exporter and set_status release (#750) --- CHANGELOG.md | 5 +++++ contrib/opencensus-ext-django/CHANGELOG.md | 5 +++-- contrib/opencensus-ext-flask/CHANGELOG.md | 10 ++++++++-- contrib/opencensus-ext-grpc/CHANGELOG.md | 5 +++++ contrib/opencensus-ext-jaeger/CHANGELOG.md | 5 +++++ contrib/opencensus-ext-ocagent/CHANGELOG.md | 11 ++++++++--- contrib/opencensus-ext-ocagent/version.py | 2 +- contrib/opencensus-ext-pymongo/CHANGELOG.md | 10 ++++++++-- contrib/opencensus-ext-stackdriver/CHANGELOG.md | 2 +- 9 files changed, 44 insertions(+), 11 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c84046d2e..239f527d4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,13 @@ # Changelog ## Unreleased + +## 0.7.1 +Released 2019-08-05 + - Added `set_status` to `span` ([#738](https://github.com/census-instrumentation/opencensus-python/pull/738)) +- Update released stackdriver exporter version ## 0.7.0 Released 2019-07-31 diff --git a/contrib/opencensus-ext-django/CHANGELOG.md b/contrib/opencensus-ext-django/CHANGELOG.md index e783a6b7c..507b05f51 100644 --- a/contrib/opencensus-ext-django/CHANGELOG.md +++ b/contrib/opencensus-ext-django/CHANGELOG.md @@ -5,8 +5,9 @@ ## 0.7.0 Released 2019-07-31 -- Updated span attributes to include some missing attributes listed [here](https://github.com/census-instrumentation/opencensus-specs/blob/master/trace/HTTP.md#attributes) -([#735](https://github.com/census-instrumentation/opencensus-python/pull/735)) +- Updated span attributes to include some missing attributes listed + [here](https://github.com/census-instrumentation/opencensus-specs/blob/master/trace/HTTP.md#attributes) + ([#735](https://github.com/census-instrumentation/opencensus-python/pull/735)) ## 0.3.2 Released 2019-07-26 diff --git a/contrib/opencensus-ext-flask/CHANGELOG.md b/contrib/opencensus-ext-flask/CHANGELOG.md index f0f06e915..6a590c4cd 100644 --- a/contrib/opencensus-ext-flask/CHANGELOG.md +++ b/contrib/opencensus-ext-flask/CHANGELOG.md @@ -2,12 +2,18 @@ ## Unreleased +## 0.7.1 +Released 2019-08-05 + +- Update for core library changes + ## 0.7.0 Released 2019-07-31 - Make ProbabilitySampler default -- Updated span attributes to include some missing attributes listed [here](https://github.com/census-instrumentation/opencensus-specs/blob/master/trace/HTTP.md#attributes) -([#735](https://github.com/census-instrumentation/opencensus-python/pull/735)) +- Updated span attributes to include some missing attributes listed + [here](https://github.com/census-instrumentation/opencensus-specs/blob/master/trace/HTTP.md#attributes) + ([#735](https://github.com/census-instrumentation/opencensus-python/pull/735)) ## 0.3.0 Released 2019-04-24 diff --git a/contrib/opencensus-ext-grpc/CHANGELOG.md b/contrib/opencensus-ext-grpc/CHANGELOG.md index 33aafa3d6..6ff58e2d5 100644 --- a/contrib/opencensus-ext-grpc/CHANGELOG.md +++ b/contrib/opencensus-ext-grpc/CHANGELOG.md @@ -2,6 +2,11 @@ ## Unreleased +## 0.7.1 +Released 2019-08-05 + +- Update for core library changes + ## 0.3.0 Released 2019-05-31 diff --git a/contrib/opencensus-ext-jaeger/CHANGELOG.md b/contrib/opencensus-ext-jaeger/CHANGELOG.md index b82a493db..bc2085635 100644 --- a/contrib/opencensus-ext-jaeger/CHANGELOG.md +++ b/contrib/opencensus-ext-jaeger/CHANGELOG.md @@ -2,6 +2,11 @@ ## Unreleased +## 0.7.1 +Released 2019-08-05 + +- Update for core library changes + ## 0.2.2 Released 2019-05-31 diff --git a/contrib/opencensus-ext-ocagent/CHANGELOG.md b/contrib/opencensus-ext-ocagent/CHANGELOG.md index c1f0e931e..a1dcef595 100644 --- a/contrib/opencensus-ext-ocagent/CHANGELOG.md +++ b/contrib/opencensus-ext-ocagent/CHANGELOG.md @@ -2,12 +2,17 @@ ## Unreleased +## 0.7.1 +Released 2019-08-05 + +- Update for core library changes + ## 0.4.0 Released 2019-05-31 -- Remove well_known_types.Error and well_known_types.ParseError. -Note this could be a breaking change if you depend on an older -version of protobuf and use ParseError. +- Remove well_known_types.Error and well_known_types.ParseError. Note this + could be a breaking change if you depend on an older version of protobuf and + use ParseError. ## 0.3.0 Released 2019-04-24 diff --git a/contrib/opencensus-ext-ocagent/version.py b/contrib/opencensus-ext-ocagent/version.py index 235cf3f15..dffc606db 100644 --- a/contrib/opencensus-ext-ocagent/version.py +++ b/contrib/opencensus-ext-ocagent/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.5.dev0' +__version__ = '0.8.dev0' diff --git a/contrib/opencensus-ext-pymongo/CHANGELOG.md b/contrib/opencensus-ext-pymongo/CHANGELOG.md index 49b40d03e..6b3c87023 100644 --- a/contrib/opencensus-ext-pymongo/CHANGELOG.md +++ b/contrib/opencensus-ext-pymongo/CHANGELOG.md @@ -1,8 +1,14 @@ # Changelog ## Unreleased -- Changed attributes names to make it compatible with [OpenTelemetry](https://github.com/open-telemetry/opentelemetry-specification/blob/master/specification/data-semantic-conventions.md), maintaining OpenCensus specs fidelity -([#738](https://github.com/census-instrumentation/opencensus-python/pull/738)) + +## 0.7.1 +Released 2019-08-05 + +- Changed attributes names to make it compatible with + [OpenTelemetry](https://github.com/open-telemetry/opentelemetry-specification/blob/master/specification/data-semantic-conventions.md), + maintaining OpenCensus specs fidelity + ([#738](https://github.com/census-instrumentation/opencensus-python/pull/738)) ## 0.1.3 Released 2019-05-31 diff --git a/contrib/opencensus-ext-stackdriver/CHANGELOG.md b/contrib/opencensus-ext-stackdriver/CHANGELOG.md index 605685920..f5ae8e0e1 100644 --- a/contrib/opencensus-ext-stackdriver/CHANGELOG.md +++ b/contrib/opencensus-ext-stackdriver/CHANGELOG.md @@ -2,7 +2,7 @@ ## Unreleased -## 0.5.0 +## 0.7.1 Released 2019-08-05 - Support exporter changes in `opencensus>=0.7.0` From ab4747491a969ec263ce47d648c979b0e1037629 Mon Sep 17 00:00:00 2001 From: Victor Date: Tue, 6 Aug 2019 19:36:46 -0300 Subject: [PATCH 07/79] Requests library spec fidelity (#746) --- CHANGELOG.md | 2 + contrib/opencensus-ext-requests/CHANGELOG.md | 4 + .../opencensus/ext/requests/trace.py | 89 ++++- .../tests/test_requests_trace.py | 326 +++++++++++++++++- opencensus/trace/exceptions_status.py | 24 ++ opencensus/trace/utils.py | 32 ++ tests/unit/trace/test_exceptions_status.py | 49 +++ tests/unit/trace/test_ext_utils.py | 65 ++++ 8 files changed, 563 insertions(+), 28 deletions(-) create mode 100644 opencensus/trace/exceptions_status.py create mode 100644 tests/unit/trace/test_exceptions_status.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 239f527d4..470cacf92 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,8 @@ # Changelog ## Unreleased +- Added `http code` to `grpc code` status code mapping on `utils` + ([#746](https://github.com/census-instrumentation/opencensus-python/pull/746)) ## 0.7.1 Released 2019-08-05 diff --git a/contrib/opencensus-ext-requests/CHANGELOG.md b/contrib/opencensus-ext-requests/CHANGELOG.md index db729e0f5..d0ed6146d 100644 --- a/contrib/opencensus-ext-requests/CHANGELOG.md +++ b/contrib/opencensus-ext-requests/CHANGELOG.md @@ -1,6 +1,10 @@ # Changelog ## Unreleased +- Added attributes following specs listed [here](https://github.com/census-instrumentation/opencensus-specs/blob/master/trace/HTTP.md#attributes) + ([#746](https://github.com/census-instrumentation/opencensus-python/pull/746)) +- Fixed span name + ([#746](https://github.com/census-instrumentation/opencensus-python/pull/746)) ## 0.7.1 Released 2019-08-06 diff --git a/contrib/opencensus-ext-requests/opencensus/ext/requests/trace.py b/contrib/opencensus-ext-requests/opencensus/ext/requests/trace.py index a26e98082..821c816b5 100644 --- a/contrib/opencensus-ext-requests/opencensus/ext/requests/trace.py +++ b/contrib/opencensus-ext-requests/opencensus/ext/requests/trace.py @@ -21,6 +21,7 @@ from urlparse import urlparse from opencensus.trace import attributes_helper +from opencensus.trace import exceptions_status from opencensus.trace import execution_context from opencensus.trace import span as span_module from opencensus.trace import utils @@ -33,8 +34,12 @@ SESSION_WRAP_METHODS = 'request' SESSION_CLASS_NAME = 'Session' -HTTP_URL = attributes_helper.COMMON_ATTRIBUTES['HTTP_URL'] +HTTP_HOST = attributes_helper.COMMON_ATTRIBUTES['HTTP_HOST'] +HTTP_METHOD = attributes_helper.COMMON_ATTRIBUTES['HTTP_METHOD'] +HTTP_PATH = attributes_helper.COMMON_ATTRIBUTES['HTTP_PATH'] +HTTP_ROUTE = attributes_helper.COMMON_ATTRIBUTES['HTTP_ROUTE'] HTTP_STATUS_CODE = attributes_helper.COMMON_ATTRIBUTES['HTTP_STATUS_CODE'] +HTTP_URL = attributes_helper.COMMON_ATTRIBUTES['HTTP_URL'] def trace_integration(tracer=None): @@ -74,22 +79,47 @@ def call(url, *args, **kwargs): if utils.disable_tracing_hostname(dest_url, blacklist_hostnames): return requests_func(url, *args, **kwargs) + path = parsed_url.path if parsed_url.path else '/' + _tracer = execution_context.get_opencensus_tracer() _span = _tracer.start_span() - _span.name = '[requests]{}'.format(requests_func.__name__) + _span.name = '{}'.format(path) _span.span_kind = span_module.SpanKind.CLIENT - # Add the requests url to attributes - _tracer.add_attribute_to_current_span(HTTP_URL, url) + # Add the requests host to attributes + _tracer.add_attribute_to_current_span( + HTTP_HOST, dest_url) - result = requests_func(url, *args, **kwargs) + # Add the requests method to attributes + _tracer.add_attribute_to_current_span( + HTTP_METHOD, requests_func.__name__.upper()) - # Add the status code to attributes + # Add the requests path to attributes _tracer.add_attribute_to_current_span( - HTTP_STATUS_CODE, str(result.status_code)) + HTTP_PATH, path) - _tracer.end_span() - return result + # Add the requests url to attributes + _tracer.add_attribute_to_current_span(HTTP_URL, url) + + try: + result = requests_func(url, *args, **kwargs) + except requests.Timeout: + _span.set_status(exceptions_status.TIMEOUT) + except requests.URLRequired: + _span.set_status(exceptions_status.INVALID_URL) + except Exception as e: + _span.set_status(exceptions_status.unknown(e)) + else: + # Add the status code to attributes + _tracer.add_attribute_to_current_span( + HTTP_STATUS_CODE, result.status_code + ) + _span.set_status( + utils.status_from_http_code(result.status_code) + ) + return result + finally: + _tracer.end_span() return call @@ -113,10 +143,12 @@ def wrap_session_request(wrapped, instance, args, kwargs): if utils.disable_tracing_hostname(dest_url, blacklist_hostnames): return wrapped(*args, **kwargs) + path = parsed_url.path if parsed_url.path else '/' + _tracer = execution_context.get_opencensus_tracer() _span = _tracer.start_span() - _span.name = '[requests]{}'.format(method) + _span.name = '{}'.format(path) _span.span_kind = span_module.SpanKind.CLIENT try: @@ -127,14 +159,37 @@ def wrap_session_request(wrapped, instance, args, kwargs): except Exception: # pragma: NO COVER pass - # Add the requests url to attributes - _tracer.add_attribute_to_current_span(HTTP_URL, url) + # Add the requests host to attributes + _tracer.add_attribute_to_current_span( + HTTP_HOST, dest_url) - result = wrapped(*args, **kwargs) + # Add the requests method to attributes + _tracer.add_attribute_to_current_span( + HTTP_METHOD, method.upper()) - # Add the status code to attributes + # Add the requests path to attributes _tracer.add_attribute_to_current_span( - HTTP_STATUS_CODE, str(result.status_code)) + HTTP_PATH, path) + + # Add the requests url to attributes + _tracer.add_attribute_to_current_span(HTTP_URL, url) - _tracer.end_span() - return result + try: + result = wrapped(*args, **kwargs) + except requests.Timeout: + _span.set_status(exceptions_status.TIMEOUT) + except requests.URLRequired: + _span.set_status(exceptions_status.INVALID_URL) + except Exception as e: + _span.set_status(exceptions_status.unknown(e)) + else: + # Add the status code to attributes + _tracer.add_attribute_to_current_span( + HTTP_STATUS_CODE, result.status_code + ) + _span.set_status( + utils.status_from_http_code(result.status_code) + ) + return result + finally: + _tracer.end_span() diff --git a/contrib/opencensus-ext-requests/tests/test_requests_trace.py b/contrib/opencensus-ext-requests/tests/test_requests_trace.py index 44e0abf56..67222c618 100644 --- a/contrib/opencensus-ext-requests/tests/test_requests_trace.py +++ b/contrib/opencensus-ext-requests/tests/test_requests_trace.py @@ -15,10 +15,12 @@ import unittest import mock +import requests from opencensus.trace.tracers import noop_tracer from opencensus.ext.requests import trace from opencensus.trace import span as span_module, execution_context +from opencensus.trace import status as status_module class Test_requests_trace(unittest.TestCase): @@ -95,19 +97,30 @@ def test_wrap_requests(self): wrapped = trace.wrap_requests(mock_func) - url = 'http://localhost:8080' + url = 'http://localhost:8080/test' with patch, patch_thread: wrapped(url) - expected_attributes = {'http.url': url, 'http.status_code': '200'} - expected_name = '[requests]get' + expected_attributes = { + 'http.host': 'localhost:8080', + 'http.method': 'GET', + 'http.path': '/test', + 'http.status_code': 200, + 'http.url': url, + } + expected_name = '/test' + expected_status = status_module.Status(0) self.assertEqual(span_module.SpanKind.CLIENT, mock_tracer.current_span.span_kind) self.assertEqual(expected_attributes, mock_tracer.current_span.attributes) self.assertEqual(expected_name, mock_tracer.current_span.name) + self.assertEqual( + expected_status.__dict__, + mock_tracer.current_span.status.__dict__ + ) def test_wrap_requests_blacklist_ok(self): mock_return = mock.Mock() @@ -138,7 +151,7 @@ def test_wrap_requests_blacklist_ok(self): with patch_tracer, patch_attr, patch_thread: wrapped(url) - expected_name = '[requests]get' + expected_name = '/' self.assertEqual(expected_name, mock_tracer.current_span.name) @@ -204,6 +217,144 @@ def test_wrap_requests_exporter_thread(self): self.assertEqual(None, mock_tracer.current_span) + def test_wrap_requests_timeout(self): + mock_return = mock.Mock() + mock_return.status_code = 200 + return_value = mock_return + mock_func = mock.Mock() + mock_func.__name__ = 'get' + mock_func.return_value = return_value + mock_func.side_effect = requests.Timeout + mock_tracer = MockTracer() + + patch = mock.patch( + 'opencensus.ext.requests.trace.execution_context.' + 'get_opencensus_tracer', + return_value=mock_tracer) + + patch_thread = mock.patch( + 'opencensus.ext.requests.trace.execution_context.' + 'is_exporter', + return_value=False) + + wrapped = trace.wrap_requests(mock_func) + + url = 'http://localhost:8080/test' + + with patch, patch_thread: + wrapped(url) + + expected_attributes = { + 'http.host': 'localhost:8080', + 'http.method': 'GET', + 'http.path': '/test', + 'http.url': url, + } + expected_name = '/test' + expected_status = status_module.Status(4, 'request timed out') + + self.assertEqual(span_module.SpanKind.CLIENT, + mock_tracer.current_span.span_kind) + self.assertEqual(expected_attributes, + mock_tracer.current_span.attributes) + self.assertEqual(expected_name, mock_tracer.current_span.name) + self.assertEqual( + expected_status.__dict__, + mock_tracer.current_span.status.__dict__ + ) + + def test_wrap_requests_invalid_url(self): + mock_return = mock.Mock() + mock_return.status_code = 200 + return_value = mock_return + mock_func = mock.Mock() + mock_func.__name__ = 'get' + mock_func.return_value = return_value + mock_func.side_effect = requests.URLRequired + mock_tracer = MockTracer() + + patch = mock.patch( + 'opencensus.ext.requests.trace.execution_context.' + 'get_opencensus_tracer', + return_value=mock_tracer) + + patch_thread = mock.patch( + 'opencensus.ext.requests.trace.execution_context.' + 'is_exporter', + return_value=False) + + wrapped = trace.wrap_requests(mock_func) + + url = 'http://localhost:8080/test' + + with patch, patch_thread: + wrapped(url) + + expected_attributes = { + 'http.host': 'localhost:8080', + 'http.method': 'GET', + 'http.path': '/test', + 'http.url': url, + } + expected_name = '/test' + expected_status = status_module.Status(3, 'invalid URL') + + self.assertEqual(span_module.SpanKind.CLIENT, + mock_tracer.current_span.span_kind) + self.assertEqual(expected_attributes, + mock_tracer.current_span.attributes) + self.assertEqual(expected_name, mock_tracer.current_span.name) + self.assertEqual( + expected_status.__dict__, + mock_tracer.current_span.status.__dict__ + ) + + def test_wrap_requests_exception(self): + mock_return = mock.Mock() + mock_return.status_code = 200 + return_value = mock_return + mock_func = mock.Mock() + mock_func.__name__ = 'get' + mock_func.return_value = return_value + mock_func.side_effect = requests.TooManyRedirects + mock_tracer = MockTracer() + + patch = mock.patch( + 'opencensus.ext.requests.trace.execution_context.' + 'get_opencensus_tracer', + return_value=mock_tracer) + + patch_thread = mock.patch( + 'opencensus.ext.requests.trace.execution_context.' + 'is_exporter', + return_value=False) + + wrapped = trace.wrap_requests(mock_func) + + url = 'http://localhost:8080/test' + + with patch, patch_thread: + wrapped(url) + + expected_attributes = { + 'http.host': 'localhost:8080', + 'http.method': 'GET', + 'http.path': '/test', + 'http.url': url, + } + expected_name = '/test' + expected_status = status_module.Status(2, '') + + self.assertEqual(span_module.SpanKind.CLIENT, + mock_tracer.current_span.span_kind) + self.assertEqual(expected_attributes, + mock_tracer.current_span.attributes) + self.assertEqual(expected_name, mock_tracer.current_span.name) + self.assertEqual( + expected_status.__dict__, + mock_tracer.current_span.status.__dict__ + ) + def test_wrap_session_request(self): wrapped = mock.Mock(return_value=mock.Mock(status_code=200)) @@ -220,7 +371,7 @@ def test_wrap_session_request(self): 'is_exporter', return_value=False) - url = 'http://localhost:8080' + url = 'http://localhost:8080/test' request_method = 'POST' kwargs = {} @@ -228,8 +379,15 @@ def test_wrap_session_request(self): trace.wrap_session_request(wrapped, 'Session.request', (request_method, url), kwargs) - expected_attributes = {'http.url': url, 'http.status_code': '200'} - expected_name = '[requests]POST' + expected_attributes = { + 'http.host': 'localhost:8080', + 'http.method': 'POST', + 'http.path': '/test', + 'http.status_code': 200, + 'http.url': url, + } + expected_name = '/test' + expected_status = status_module.Status(0) self.assertEqual(span_module.SpanKind.CLIENT, mock_tracer.current_span.span_kind) @@ -237,6 +395,10 @@ def test_wrap_session_request(self): mock_tracer.current_span.attributes) self.assertEqual(kwargs['headers']['x-trace'], 'some-value') self.assertEqual(expected_name, mock_tracer.current_span.name) + self.assertEqual( + expected_status.__dict__, + mock_tracer.current_span.status.__dict__ + ) def test_wrap_session_request_blacklist_ok(self): def wrapped(*args, **kwargs): @@ -261,14 +423,14 @@ def wrapped(*args, **kwargs): 'is_exporter', return_value=False) - url = 'http://localhost' + url = 'http://localhost/' request_method = 'POST' with patch_tracer, patch_attr, patch_thread: trace.wrap_session_request(wrapped, 'Session.request', (request_method, url), {}) - expected_name = '[requests]POST' + expected_name = '/' self.assertEqual(expected_name, mock_tracer.current_span.name) def test_wrap_session_request_blacklist_nok(self): @@ -406,6 +568,141 @@ def test_tracer_headers_are_overwritten(self): self.assertEqual(kwargs['headers']['x-trace'], 'some-value') + def test_wrap_session_request_timeout(self): + wrapped = mock.Mock(return_value=mock.Mock(status_code=200)) + wrapped.side_effect = requests.Timeout + + mock_tracer = MockTracer( + propagator=mock.Mock( + to_headers=lambda x: {'x-trace': 'some-value'})) + + patch = mock.patch( + 'opencensus.ext.requests.trace.execution_context.' + 'get_opencensus_tracer', + return_value=mock_tracer) + patch_thread = mock.patch( + 'opencensus.ext.requests.trace.execution_context.' + 'is_exporter', + return_value=False) + + url = 'http://localhost:8080/test' + request_method = 'POST' + kwargs = {} + + with patch, patch_thread: + trace.wrap_session_request(wrapped, 'Session.request', + (request_method, url), kwargs) + + expected_attributes = { + 'http.host': 'localhost:8080', + 'http.method': 'POST', + 'http.path': '/test', + 'http.url': url, + } + expected_name = '/test' + expected_status = status_module.Status(4, 'request timed out') + + self.assertEqual(span_module.SpanKind.CLIENT, + mock_tracer.current_span.span_kind) + self.assertEqual(expected_attributes, + mock_tracer.current_span.attributes) + self.assertEqual(kwargs['headers']['x-trace'], 'some-value') + self.assertEqual(expected_name, mock_tracer.current_span.name) + self.assertEqual( + expected_status.__dict__, + mock_tracer.current_span.status.__dict__ + ) + + def test_wrap_session_request_invalid_url(self): + wrapped = mock.Mock(return_value=mock.Mock(status_code=200)) + wrapped.side_effect = requests.URLRequired + + mock_tracer = MockTracer( + propagator=mock.Mock( + to_headers=lambda x: {'x-trace': 'some-value'})) + + patch = mock.patch( + 'opencensus.ext.requests.trace.execution_context.' + 'get_opencensus_tracer', + return_value=mock_tracer) + patch_thread = mock.patch( + 'opencensus.ext.requests.trace.execution_context.' + 'is_exporter', + return_value=False) + + url = 'http://localhost:8080/test' + request_method = 'POST' + kwargs = {} + + with patch, patch_thread: + trace.wrap_session_request(wrapped, 'Session.request', + (request_method, url), kwargs) + + expected_attributes = { + 'http.host': 'localhost:8080', + 'http.method': 'POST', + 'http.path': '/test', + 'http.url': url, + } + expected_name = '/test' + expected_status = status_module.Status(3, 'invalid URL') + + self.assertEqual(span_module.SpanKind.CLIENT, + mock_tracer.current_span.span_kind) + self.assertEqual(expected_attributes, + mock_tracer.current_span.attributes) + self.assertEqual(kwargs['headers']['x-trace'], 'some-value') + self.assertEqual(expected_name, mock_tracer.current_span.name) + self.assertEqual( + expected_status.__dict__, + mock_tracer.current_span.status.__dict__ + ) + + def test_wrap_session_request_exception(self): + wrapped = mock.Mock(return_value=mock.Mock(status_code=200)) + wrapped.side_effect = requests.TooManyRedirects + + mock_tracer = MockTracer( + propagator=mock.Mock( + to_headers=lambda x: {'x-trace': 'some-value'})) + + patch = mock.patch( + 'opencensus.ext.requests.trace.execution_context.' + 'get_opencensus_tracer', + return_value=mock_tracer) + patch_thread = mock.patch( + 'opencensus.ext.requests.trace.execution_context.' + 'is_exporter', + return_value=False) + + url = 'http://localhost:8080/test' + request_method = 'POST' + kwargs = {} + + with patch, patch_thread: + trace.wrap_session_request(wrapped, 'Session.request', + (request_method, url), kwargs) + + expected_attributes = { + 'http.host': 'localhost:8080', + 'http.method': 'POST', + 'http.path': '/test', + 'http.url': url, + } + expected_name = '/test' + expected_status = status_module.Status(2, '') + + self.assertEqual(span_module.SpanKind.CLIENT, + mock_tracer.current_span.span_kind) + self.assertEqual(expected_attributes, + mock_tracer.current_span.attributes) + self.assertEqual(kwargs['headers']['x-trace'], 'some-value') + self.assertEqual(expected_name, mock_tracer.current_span.name) + self.assertEqual( + expected_status.__dict__, + mock_tracer.current_span.status.__dict__ + ) + class MockTracer(object): def __init__(self, propagator=None): @@ -414,8 +711,7 @@ def __init__(self, propagator=None): self.propagator = propagator def start_span(self): - span = mock.Mock() - span.attributes = {} + span = MockSpan() self.current_span = span return span @@ -424,3 +720,11 @@ def end_span(self): def add_attribute_to_current_span(self, key, value): self.current_span.attributes[key] = value + + +class MockSpan(object): + def __init__(self): + self.attributes = {} + + def set_status(self, status): + self.status = status diff --git a/opencensus/trace/exceptions_status.py b/opencensus/trace/exceptions_status.py new file mode 100644 index 000000000..a57bdec60 --- /dev/null +++ b/opencensus/trace/exceptions_status.py @@ -0,0 +1,24 @@ +# Copyright 2017, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.rpc import code_pb2 +from opencensus.trace.status import Status + +CANCELLED = Status(code_pb2.CANCELLED) +INVALID_URL = Status(code_pb2.INVALID_ARGUMENT, message='invalid URL') +TIMEOUT = Status(code_pb2.DEADLINE_EXCEEDED, message='request timed out') + + +def unknown(exception): + return Status.from_exception(exception) diff --git a/opencensus/trace/utils.py b/opencensus/trace/utils.py index 1a39c8c0e..93add22d5 100644 --- a/opencensus/trace/utils.py +++ b/opencensus/trace/utils.py @@ -14,7 +14,9 @@ import re +from google.rpc import code_pb2 from opencensus.trace import execution_context +from opencensus.trace.status import Status # By default the blacklist urls are not tracing, currently just include the # health check url. The paths are literal string matched instead of regular @@ -93,3 +95,33 @@ def disable_tracing_hostname(url, blacklist_hostnames=None): blacklist_hostnames = [] return url in blacklist_hostnames + + +def status_from_http_code(http_code): + """Returns equivalent status from http status code + based on OpenCensus specs. + + :type http_code: int + :param http_code: HTTP request status code. + + :rtype: int + :returns: A instance of :class: `~opencensus.trace.status.Status`. + """ + if http_code <= 199: + return Status(code_pb2.UNKNOWN) + + if http_code <= 399: + return Status(code_pb2.OK) + + grpc_code = { + 400: code_pb2.INVALID_ARGUMENT, + 401: code_pb2.UNAUTHENTICATED, + 403: code_pb2.PERMISSION_DENIED, + 404: code_pb2.NOT_FOUND, + 429: code_pb2.RESOURCE_EXHAUSTED, + 501: code_pb2.UNIMPLEMENTED, + 503: code_pb2.UNAVAILABLE, + 504: code_pb2.DEADLINE_EXCEEDED, + }.get(http_code, code_pb2.UNKNOWN) + + return Status(grpc_code) diff --git a/tests/unit/trace/test_exceptions_status.py b/tests/unit/trace/test_exceptions_status.py new file mode 100644 index 000000000..1b3f7e963 --- /dev/null +++ b/tests/unit/trace/test_exceptions_status.py @@ -0,0 +1,49 @@ +# Copyright 2017, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +from google.rpc import code_pb2 +from opencensus.trace import exceptions_status + + +class TestUtils(unittest.TestCase): + def test_cancelled(self): + self.assertEqual( + exceptions_status.CANCELLED.canonical_code, + code_pb2.CANCELLED + ) + + def test_invalid_url(self): + self.assertEqual( + exceptions_status.INVALID_URL.canonical_code, + code_pb2.INVALID_ARGUMENT + ) + + def test_timeout(self): + self.assertEqual( + exceptions_status.TIMEOUT.canonical_code, + code_pb2.DEADLINE_EXCEEDED + ) + + def test_unknown(self): + status = exceptions_status.unknown(Exception) + self.assertEqual( + status.canonical_code, + code_pb2.UNKNOWN + ) + self.assertEqual( + status.description, + str(Exception) + ) diff --git a/tests/unit/trace/test_ext_utils.py b/tests/unit/trace/test_ext_utils.py index 96092fcb5..a7d946e49 100644 --- a/tests/unit/trace/test_ext_utils.py +++ b/tests/unit/trace/test_ext_utils.py @@ -16,6 +16,7 @@ import mock +from google.rpc import code_pb2 from opencensus.trace import utils @@ -73,3 +74,67 @@ def test_disable_tracing_hostname_explicit(self): url = '127.0.0.1:80' disable_tracing = utils.disable_tracing_hostname(url, blacklist_paths) self.assertFalse(disable_tracing) + + def test_grpc_code_from_http_code(self): + test_cases = [ + { + 'http_code': 0, + 'grpc_code': code_pb2.UNKNOWN, + }, + { + 'http_code': 200, + 'grpc_code': code_pb2.OK, + }, + { + 'http_code': 399, + 'grpc_code': code_pb2.OK, + }, + { + 'http_code': 400, + 'grpc_code': code_pb2.INVALID_ARGUMENT, + }, + { + 'http_code': 504, + 'grpc_code': code_pb2.DEADLINE_EXCEEDED, + }, + { + 'http_code': 404, + 'grpc_code': code_pb2.NOT_FOUND, + }, + { + 'http_code': 403, + 'grpc_code': code_pb2.PERMISSION_DENIED, + }, + { + 'http_code': 401, + 'grpc_code': code_pb2.UNAUTHENTICATED, + }, + { + 'http_code': 429, + 'grpc_code': code_pb2.RESOURCE_EXHAUSTED, + }, + { + 'http_code': 501, + 'grpc_code': code_pb2.UNIMPLEMENTED, + }, + { + 'http_code': 503, + 'grpc_code': code_pb2.UNAVAILABLE, + }, + { + 'http_code': 600, + 'grpc_code': code_pb2.UNKNOWN, + }, + ] + + for test_case in test_cases: + status = utils.status_from_http_code(test_case['http_code']) + self.assertEqual( + status.canonical_code, + test_case['grpc_code'], + 'HTTP: {} / GRPC: expected = {}, actual = {}'.format( + test_case['http_code'], + test_case['grpc_code'], + status.canonical_code, + ) + ) From 577fb61ab26513e3b448b6696432eefb0b11fa73 Mon Sep 17 00:00:00 2001 From: Reiley Yang Date: Wed, 7 Aug 2019 12:20:24 -0700 Subject: [PATCH 08/79] Update codeowners file (#756) --- .github/CODEOWNERS | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 826cd01b7..02bf84a10 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -2,4 +2,6 @@ # This file controls who is tagged for review for any given pull request. # For anything not explicitly taken by someone else: -* @census-instrumentation/global-owners @c24t @reyang @songy23 +* @census-instrumentation/global-owners @c24t @reyang @songy23 @victoraugustolls + +/contrib/opencensus-ext-azure/ @lzchen From 49b27ef0179d1f5cef17fd2496d891ce8f7e92f4 Mon Sep 17 00:00:00 2001 From: Olivier Lance Date: Tue, 13 Aug 2019 02:34:07 +0200 Subject: [PATCH 09/79] Fixes value for `http.route` in Flask middleware (#759) --- .../opencensus/ext/flask/flask_middleware.py | 6 +-- .../tests/test_flask_middleware.py | 41 +++++++++++++++---- 2 files changed, 36 insertions(+), 11 deletions(-) diff --git a/contrib/opencensus-ext-flask/opencensus/ext/flask/flask_middleware.py b/contrib/opencensus-ext-flask/opencensus/ext/flask/flask_middleware.py index 9ec8992c6..fec2a8c23 100644 --- a/contrib/opencensus-ext-flask/opencensus/ext/flask/flask_middleware.py +++ b/contrib/opencensus-ext-flask/opencensus/ext/flask/flask_middleware.py @@ -155,9 +155,6 @@ def _before_request(self): tracer.add_attribute_to_current_span( HTTP_PATH, flask.request.path ) - tracer.add_attribute_to_current_span( - HTTP_ROUTE, flask.request.path - ) tracer.add_attribute_to_current_span( HTTP_URL, str(flask.request.url) ) @@ -179,6 +176,9 @@ def _after_request(self, response): try: tracer = execution_context.get_opencensus_tracer() + tracer.add_attribute_to_current_span( + HTTP_ROUTE, flask.request.url_rule.rule + ) tracer.add_attribute_to_current_span( HTTP_STATUS_CODE, str(response.status_code) diff --git a/contrib/opencensus-ext-flask/tests/test_flask_middleware.py b/contrib/opencensus-ext-flask/tests/test_flask_middleware.py index d299c15b6..3e68f7124 100644 --- a/contrib/opencensus-ext-flask/tests/test_flask_middleware.py +++ b/contrib/opencensus-ext-flask/tests/test_flask_middleware.py @@ -51,6 +51,10 @@ def create_app(): def index(): return 'test flask trace' # pragma: NO COVER + @app.route('/wiki/') + def wiki(entry): + return 'test flask trace' # pragma: NO COVER + @app.route('/_ah/health') def health_check(): return 'test health check' # pragma: NO COVER @@ -155,7 +159,6 @@ def test__before_request(self): 'http.host': u'localhost', 'http.method': u'GET', 'http.path': u'/wiki/Rabbit', - 'http.route': u'/wiki/Rabbit', 'http.url': u'http://localhost/wiki/Rabbit', } @@ -220,7 +223,6 @@ def test_header_encoding(self): 'http.host': u'localhost', 'http.method': u'GET', 'http.path': u'/wiki/Rabbit', - 'http.route': u'/wiki/Rabbit', 'http.url': u'http://localhost/wiki/Rabbit', } @@ -248,7 +250,6 @@ def test_header_is_none(self): 'http.host': u'localhost', 'http.method': u'GET', 'http.path': u'/wiki/Rabbit', - 'http.route': u'/wiki/Rabbit', 'http.url': u'http://localhost/wiki/Rabbit', } @@ -278,13 +279,37 @@ def test__after_request_sampled(self): flask_trace_id = '00-{}-{}-00'.format(trace_id, span_id) app = self.create_app() - flask_middleware.FlaskMiddleware(app=app) + flask_middleware.FlaskMiddleware( + app=app, + sampler=samplers.AlwaysOnSampler() + ) - response = app.test_client().get( - '/', - headers={flask_trace_header: flask_trace_id}) + context = app.test_request_context( + path='/wiki/Rabbit', + headers={flask_trace_header: flask_trace_id} + ) - self.assertEqual(response.status_code, 200) + with context: + app.preprocess_request() + tracer = execution_context.get_opencensus_tracer() + self.assertIsNotNone(tracer) + + span = tracer.current_span() + + rv = app.dispatch_request() + app.finalize_request(rv) + + expected_attributes = { + 'http.host': u'localhost', + 'http.method': u'GET', + 'http.path': u'/wiki/Rabbit', + 'http.url': u'http://localhost/wiki/Rabbit', + 'http.route': u'/wiki/', + 'http.status_code': u'200' + } + + self.assertEqual(span.attributes, expected_attributes) + assert isinstance(span.parent_span, base.NullContextManager) def test__after_request_blacklist(self): flask_trace_header = 'traceparent' From daecc37ea3ec821b36c5a9da4094200f5a01b021 Mon Sep 17 00:00:00 2001 From: Chris Kleinknecht Date: Thu, 15 Aug 2019 14:43:02 -0700 Subject: [PATCH 10/79] Update GCP metadata URL (#761) --- opencensus/common/monitored_resource/gcp_metadata_config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opencensus/common/monitored_resource/gcp_metadata_config.py b/opencensus/common/monitored_resource/gcp_metadata_config.py index b33f49d2a..0975d7cf4 100644 --- a/opencensus/common/monitored_resource/gcp_metadata_config.py +++ b/opencensus/common/monitored_resource/gcp_metadata_config.py @@ -14,7 +14,7 @@ from opencensus.common.http_handler import get_request -_GCP_METADATA_URI = 'http://metadata/computeMetadata/v1/' +_GCP_METADATA_URI = 'http://metadata.google.internal/computeMetadata/v1/' _GCP_METADATA_URI_HEADER = {'Metadata-Flavor': 'Google'} # ID of the GCP project associated with this resource, such as "my-project" From 479087cf46162be9807dffe482e983b38763a531 Mon Sep 17 00:00:00 2001 From: Chris Kleinknecht Date: Fri, 16 Aug 2019 15:57:13 -0700 Subject: [PATCH 11/79] 0.7.2 release (#763) --- CHANGELOG.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 470cacf92..38ea9c2fb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,12 @@ - Added `http code` to `grpc code` status code mapping on `utils` ([#746](https://github.com/census-instrumentation/opencensus-python/pull/746)) +## 0.7.2 +Released 2019-08-16 + +- Fix GCP resource loading for certain environments + ([#761](https://github.com/census-instrumentation/opencensus-python/pull/761)) + ## 0.7.1 Released 2019-08-05 From cde62c5f5726f561f9d059f2d631caecbe28aa01 Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Mon, 19 Aug 2019 08:40:13 -0700 Subject: [PATCH 12/79] Standard Metrics - Incoming Requests Per Second (#758) * Incoming requests for Python3 * python2 support * change namespace * add tests * More tests * Fix lint * Fix lint * Add test * Add CHANGELOG --- contrib/opencensus-ext-azure/CHANGELOG.md | 2 + contrib/opencensus-ext-azure/README.rst | 1 + .../standard_metrics/__init__.py | 7 +- .../{dependency.py => http_dependency.py} | 2 +- .../standard_metrics/http_requests.py | 109 +++++++++++++ .../tests/test_azure_standard_metrics.py | 145 ++++++++++++++++-- 6 files changed, 247 insertions(+), 19 deletions(-) rename contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/{dependency.py => http_dependency.py} (97%) create mode 100644 contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_requests.py diff --git a/contrib/opencensus-ext-azure/CHANGELOG.md b/contrib/opencensus-ext-azure/CHANGELOG.md index 870afd94d..9655e02e0 100644 --- a/contrib/opencensus-ext-azure/CHANGELOG.md +++ b/contrib/opencensus-ext-azure/CHANGELOG.md @@ -1,6 +1,8 @@ # Changelog ## Unreleased +- Standard metrics incoming requests per second + ([#758](https://github.com/census-instrumentation/opencensus-python/pull/758)) ## 0.7.0 Released 2019-07-31 diff --git a/contrib/opencensus-ext-azure/README.rst b/contrib/opencensus-ext-azure/README.rst index 707845b52..451086d02 100644 --- a/contrib/opencensus-ext-azure/README.rst +++ b/contrib/opencensus-ext-azure/README.rst @@ -153,6 +153,7 @@ Below is a list of standard metrics that are currently available: - Available Memory (bytes) - CPU Processor Time (percentage) +- Incoming Request Rate (per second) - Outgoing Request Rate (per second) - Process CPU Usage (percentage) - Process Private Bytes (bytes) diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/__init__.py index 1fd64bd58..162f015a0 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/__init__.py @@ -16,7 +16,7 @@ from opencensus.metrics.export.metric_producer import MetricProducer from opencensus.ext.azure.metrics_exporter.standard_metrics.cpu \ import ProcessorTimeMetric -from opencensus.ext.azure.metrics_exporter.standard_metrics.dependency \ +from opencensus.ext.azure.metrics_exporter.standard_metrics.http_dependency \ import DependencyRateMetric from opencensus.ext.azure.metrics_exporter.standard_metrics.memory \ import AvailableMemoryMetric @@ -24,13 +24,16 @@ import ProcessCPUMetric from opencensus.ext.azure.metrics_exporter.standard_metrics.process \ import ProcessMemoryMetric +from opencensus.ext.azure.metrics_exporter.standard_metrics.http_requests \ + import RequestsRateMetric # List of standard metrics to track STANDARD_METRICS = [AvailableMemoryMetric, DependencyRateMetric, ProcessCPUMetric, ProcessMemoryMetric, - ProcessorTimeMetric] + ProcessorTimeMetric, + RequestsRateMetric] def register_metrics(): diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/dependency.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_dependency.py similarity index 97% rename from contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/dependency.py rename to contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_dependency.py index 13495b640..ac4e3da37 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/dependency.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_dependency.py @@ -76,7 +76,7 @@ def __call__(self): value over the elapsed time. :rtype: :class:`opencensus.metrics.export.gauge.DerivedLongGauge` - :return: The gauge representing the available memory metric + :return: The gauge representing the outgoing requests metric """ gauge = DerivedDoubleGauge( DependencyRateMetric.NAME, diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_requests.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_requests.py new file mode 100644 index 000000000..1a78da4fc --- /dev/null +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_requests.py @@ -0,0 +1,109 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +import time + +from opencensus.metrics.export.gauge import DerivedDoubleGauge +if sys.version_info < (3,): + from BaseHTTPServer import HTTPServer +else: + from http.server import HTTPServer + +requests_map = dict() +ORIGINAL_CONSTRUCTOR = HTTPServer.__init__ + + +def request_patch(func): + def wrapper(self=None): + func(self) + count = requests_map.get('count', 0) + requests_map['count'] = count + 1 + return wrapper + + +def server_patch(*args, **kwargs): + if len(args) >= 3: + handler = args[2] + if handler: + # Patch the handler methods if they exist + if "do_DELETE" in dir(handler): + handler.do_DELETE = request_patch(handler.do_DELETE) + if "do_GET" in dir(handler): + handler.do_GET = request_patch(handler.do_GET) + if "do_HEAD" in dir(handler): + handler.do_HEAD = request_patch(handler.do_HEAD) + if "do_OPTIONS" in dir(handler): + handler.do_OPTIONS = request_patch(handler.do_OPTIONS) + if "do_POST" in dir(handler): + handler.do_POST = request_patch(handler.do_POST) + if "do_PUT" in dir(handler): + handler.do_PUT = request_patch(handler.do_PUT) + result = ORIGINAL_CONSTRUCTOR(*args, **kwargs) + return result + + +def setup(): + # Patch the HTTPServer handler to track request information + HTTPServer.__init__ = server_patch + + +class RequestsRateMetric(object): + NAME = "\\ASP.NET Applications(??APP_W3SVC_PROC??)\\Requests/Sec" + + def __init__(self): + setup() + + @staticmethod + def get_value(): + current_count = requests_map.get('count', 0) + current_time = time.time() + last_count = requests_map.get('last_count', 0) + last_time = requests_map.get('last_time') + last_result = requests_map.get('last_result', 0) + + try: + # last_time is None the very first time this function is called + if last_time is not None: + elapsed_seconds = current_time - last_time + interval_count = current_count - last_count + result = interval_count / elapsed_seconds + else: + result = 0 + requests_map['last_time'] = current_time + requests_map['last_count'] = current_count + requests_map['last_result'] = result + return result + except ZeroDivisionError: + # If elapsed_seconds is 0, exporter call made too close to previous + # Return the previous result if this is the case + return last_result + + def __call__(self): + """ Returns a derived gauge for incoming requests per second + + Calculated by obtaining by getting the number of incoming requests + made to an HTTPServer within an elapsed time and dividing that value + over the elapsed time. + + :rtype: :class:`opencensus.metrics.export.gauge.DerivedLongGauge` + :return: The gauge representing the incoming requests metric + """ + gauge = DerivedDoubleGauge( + RequestsRateMetric.NAME, + 'Incoming Requests per second', + 'rps', + []) + gauge.create_default_time_series(RequestsRateMetric.get_value) + return gauge diff --git a/contrib/opencensus-ext-azure/tests/test_azure_standard_metrics.py b/contrib/opencensus-ext-azure/tests/test_azure_standard_metrics.py index b5c64d6da..f98df0c88 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_standard_metrics.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_standard_metrics.py @@ -15,19 +15,27 @@ import collections import mock import requests +import sys import unittest from opencensus.ext.azure.metrics_exporter import standard_metrics from opencensus.trace import execution_context +if sys.version_info < (3,): + from BaseHTTPServer import HTTPServer +else: + from http.server import HTTPServer ORIGINAL_FUNCTION = requests.Session.request +ORIGINAL_CONS = HTTPServer.__init__ class TestStandardMetrics(unittest.TestCase): def setUp(self): - standard_metrics.dependency.dependency_map.clear() + standard_metrics.http_dependency.dependency_map.clear() + standard_metrics.http_requests.requests_map.clear() requests.Session.request = ORIGINAL_FUNCTION - standard_metrics.dependency.ORIGINAL_REQUEST = ORIGINAL_FUNCTION + standard_metrics.http_dependency.ORIGINAL_REQUEST = ORIGINAL_FUNCTION + standard_metrics.http_requests.ORIGINAL_CONSTRUCTOR = ORIGINAL_CONS @mock.patch('opencensus.ext.azure.metrics_exporter' '.standard_metrics.register_metrics') @@ -40,12 +48,12 @@ def test_producer_get_metrics(self): producer = standard_metrics.AzureStandardMetricsProducer() metrics = producer.get_metrics() - self.assertEqual(len(metrics), 5) + self.assertEqual(len(metrics), 6) def test_register_metrics(self): registry = standard_metrics.register_metrics() - self.assertEqual(len(registry.get_metrics()), 5) + self.assertEqual(len(registry.get_metrics()), 6) def test_get_available_memory_metric(self): metric = standard_metrics.AvailableMemoryMetric() @@ -135,21 +143,21 @@ def test_get_process_cpu_usage_exception(self, logger_mock): logger_mock.exception.assert_called() def test_dependency_patch(self): - map = standard_metrics.dependency.dependency_map - standard_metrics.dependency.ORIGINAL_REQUEST = lambda x: None + map = standard_metrics.http_dependency.dependency_map + standard_metrics.http_dependency.ORIGINAL_REQUEST = lambda x: None session = requests.Session() execution_context.set_is_exporter(False) - result = standard_metrics.dependency.dependency_patch(session) + result = standard_metrics.http_dependency.dependency_patch(session) self.assertEqual(map['count'], 1) self.assertIsNone(result) def test_dependency_patch_exporter_thread(self): - map = standard_metrics.dependency.dependency_map - standard_metrics.dependency.ORIGINAL_REQUEST = lambda x: None + map = standard_metrics.http_dependency.dependency_map + standard_metrics.http_dependency.ORIGINAL_REQUEST = lambda x: None session = mock.Mock() execution_context.set_is_exporter(True) - result = standard_metrics.dependency.dependency_patch(session) + result = standard_metrics.http_dependency.dependency_patch(session) self.assertIsNone(map.get('count')) self.assertIsNone(result) @@ -167,21 +175,126 @@ def test_get_dependency_rate_first_time(self): self.assertEqual(rate, 0) @mock.patch('opencensus.ext.azure.metrics_exporter' - '.standard_metrics.dependency.time') + '.standard_metrics.http_dependency.time') def test_get_dependency_rate(self, time_mock): time_mock.time.return_value = 100 - standard_metrics.dependency.dependency_map['last_time'] = 98 - standard_metrics.dependency.dependency_map['count'] = 4 + standard_metrics.http_dependency.dependency_map['last_time'] = 98 + standard_metrics.http_dependency.dependency_map['count'] = 4 rate = standard_metrics.DependencyRateMetric.get_value() self.assertEqual(rate, 2) @mock.patch('opencensus.ext.azure.metrics_exporter' - '.standard_metrics.dependency.time') + '.standard_metrics.http_dependency.time') def test_get_dependency_rate_error(self, time_mock): time_mock.time.return_value = 100 - standard_metrics.dependency.dependency_map['last_result'] = 5 - standard_metrics.dependency.dependency_map['last_time'] = 100 + standard_metrics.http_dependency.dependency_map['last_result'] = 5 + standard_metrics.http_dependency.dependency_map['last_time'] = 100 result = standard_metrics.DependencyRateMetric.get_value() self.assertEqual(result, 5) + + def test_request_patch(self): + map = standard_metrics.http_requests.requests_map + func = mock.Mock() + new_func = standard_metrics.http_requests.request_patch(func) + new_func() + + self.assertEqual(map['count'], 1) + self.assertEqual(len(func.call_args_list), 1) + + def test_server_patch(self): + standard_metrics. \ + http_requests. \ + ORIGINAL_CONSTRUCTOR = lambda x, y, z: None + with mock.patch('opencensus.ext.azure.metrics_exporter' + '.standard_metrics.http_requests' + '.request_patch') as request_mock: + handler = mock.Mock() + handler.do_DELETE.return_value = None + handler.do_GET.return_value = None + handler.do_HEAD.return_value = None + handler.do_OPTIONS.return_value = None + handler.do_POST.return_value = None + handler.do_PUT.return_value = None + result = standard_metrics. \ + http_requests. \ + server_patch(None, None, handler) + handler.do_DELETE() + handler.do_GET() + handler.do_HEAD() + handler.do_OPTIONS() + handler.do_POST() + handler.do_PUT() + + self.assertEqual(result, None) + self.assertEqual(len(request_mock.call_args_list), 6) + + def test_server_patch_no_methods(self): + standard_metrics. \ + http_requests. \ + ORIGINAL_CONSTRUCTOR = lambda x, y, z: None + with mock.patch('opencensus.ext.azure.metrics_exporter' + '.standard_metrics.http_requests' + '.request_patch') as request_mock: + handler = mock.Mock() + result = standard_metrics. \ + http_requests. \ + server_patch(None, None, handler) + handler.do_DELETE() + handler.do_GET() + handler.do_HEAD() + handler.do_OPTIONS() + handler.do_POST() + handler.do_PUT() + + self.assertEqual(result, None) + self.assertEqual(len(request_mock.call_args_list), 0) + + def test_server_patch_no_args(self): + standard_metrics \ + .http_requests \ + .ORIGINAL_CONSTRUCTOR = lambda x, y: None + r = standard_metrics.http_requests.server_patch(None, None) + + self.assertEqual(r, None) + + def test_server_patch_no_handler(self): + standard_metrics \ + .http_requests \ + .ORIGINAL_CONSTRUCTOR = lambda x, y, z: None + r = standard_metrics.http_requests.server_patch(None, None, None) + + self.assertEqual(r, None) + + def test_get_request_rate_metric(self): + metric = standard_metrics.RequestsRateMetric() + gauge = metric() + + name = '\\ASP.NET Applications(??APP_W3SVC_PROC??)\\Requests/Sec' + self.assertEqual(gauge.descriptor.name, name) + + def test_get_request_rate_first_time(self): + rate = standard_metrics.RequestsRateMetric.get_value() + + self.assertEqual(rate, 0) + + @mock.patch('opencensus.ext.azure.metrics_exporter' + '.standard_metrics.http_requests.time') + def test_get_request_rate(self, time_mock): + time_mock.time.return_value = 100 + standard_metrics.http_requests.requests_map['last_time'] = 98 + standard_metrics.http_requests.requests_map['count'] = 4 + rate = standard_metrics.RequestsRateMetric.get_value() + + self.assertEqual(rate, 2) + + @mock.patch('opencensus.ext.azure.metrics_exporter' + '.standard_metrics.http_requests.time') + def test_get_request_rate_error(self, time_mock): + time_mock.time.return_value = 100 + standard_metrics.http_requests.requests_map['last_result'] = 5 + standard_metrics.http_requests.requests_map['last_time'] = 100 + result = standard_metrics.RequestsRateMetric.get_value() + + self.assertEqual(result, 5) From de131c70a99c0e0a326b7bd9871cb7fc1214471f Mon Sep 17 00:00:00 2001 From: Chris Kleinknecht Date: Tue, 20 Aug 2019 16:21:56 -0700 Subject: [PATCH 13/79] Delete SD integ test metric descriptors (#770) --- .../stackdriver/stackdriver_stats_test.py | 41 ++++++++++++------- 1 file changed, 26 insertions(+), 15 deletions(-) diff --git a/tests/system/stats/stackdriver/stackdriver_stats_test.py b/tests/system/stats/stackdriver/stackdriver_stats_test.py index 59857b7d6..a18291b37 100644 --- a/tests/system/stats/stackdriver/stackdriver_stats_test.py +++ b/tests/system/stats/stackdriver/stackdriver_stats_test.py @@ -66,15 +66,26 @@ def setUp(self): patcher.start() self.addCleanup(patcher.stop) + def tearDown(self): + suffix = str(os.getgid()) + + cli = monitoring_v3.MetricServiceClient() + for md in cli.list_metric_descriptors('projects/{}'.format(PROJECT)): + if "OpenCensus" in md.name and suffix in md.name: + try: + cli.delete_metric_descriptor(md.name) + except Exception: + pass + def test_stats_record_sync(self): - # We are using sufix in order to prevent cached objects - sufix = str(os.getgid()) + # We are using suffix in order to prevent cached objects + suffix = str(os.getgid()) - tag_key = "SampleKeySyncTest%s" % sufix - measure_name = "SampleMeasureNameSyncTest%s" % sufix - measure_description = "SampleDescriptionSyncTest%s" % sufix - view_name = "SampleViewNameSyncTest%s" % sufix - view_description = "SampleViewDescriptionSyncTest%s" % sufix + tag_key = "SampleKeySyncTest%s" % suffix + measure_name = "SampleMeasureNameSyncTest%s" % suffix + measure_description = "SampleDescriptionSyncTest%s" % suffix + view_name = "SampleViewNameSyncTest%s" % suffix + view_description = "SampleViewDescriptionSyncTest%s" % suffix FRONTEND_KEY = tag_key_module.TagKey(tag_key) VIDEO_SIZE_MEASURE = measure_module.MeasureInt( @@ -119,14 +130,14 @@ def test_stats_record_sync(self): self.check_sd_md(exporter, view_description) def test_stats_record_async(self): - # We are using sufix in order to prevent cached objects - sufix = str(os.getpid()) - - tag_key = "SampleKeyAsyncTest%s" % sufix - measure_name = "SampleMeasureNameAsyncTest%s" % sufix - measure_description = "SampleDescriptionAsyncTest%s" % sufix - view_name = "SampleViewNameAsyncTest%s" % sufix - view_description = "SampleViewDescriptionAsyncTest%s" % sufix + # We are using suffix in order to prevent cached objects + suffix = str(os.getpid()) + + tag_key = "SampleKeyAsyncTest%s" % suffix + measure_name = "SampleMeasureNameAsyncTest%s" % suffix + measure_description = "SampleDescriptionAsyncTest%s" % suffix + view_name = "SampleViewNameAsyncTest%s" % suffix + view_description = "SampleViewDescriptionAsyncTest%s" % suffix FRONTEND_KEY_ASYNC = tag_key_module.TagKey(tag_key) VIDEO_SIZE_MEASURE_ASYNC = measure_module.MeasureInt( From 52b974f3c31a3fe0e985209d2f7f4171a6dc168c Mon Sep 17 00:00:00 2001 From: Victor Date: Tue, 20 Aug 2019 21:15:25 -0300 Subject: [PATCH 14/79] Hotfix/django flask pyramid status code (#755) --- CHANGELOG.md | 2 ++ contrib/opencensus-ext-django/CHANGELOG.md | 2 ++ .../examples/app/views.py | 2 +- .../opencensus/ext/django/middleware.py | 2 +- .../tests/test_django_middleware.py | 4 +-- contrib/opencensus-ext-flask/CHANGELOG.md | 2 ++ .../opencensus/ext/flask/flask_middleware.py | 2 +- .../tests/test_flask_middleware.py | 2 +- contrib/opencensus-ext-httplib/CHANGELOG.md | 2 ++ .../opencensus/ext/httplib/trace.py | 2 +- contrib/opencensus-ext-pyramid/CHANGELOG.md | 2 ++ .../examples/app/__init__.py | 2 +- .../ext/pyramid/pyramid_middleware.py | 2 +- .../tests/test_pyramid_middleware.py | 2 +- .../stackdriver/trace_exporter/__init__.py | 11 ++++++ .../tests/test_stackdriver_exporter.py | 36 +++++++++++++++++-- .../system/trace/django/django_system_test.py | 8 +++-- tests/system/trace/flask/flask_system_test.py | 4 ++- 18 files changed, 74 insertions(+), 15 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 38ea9c2fb..583891b1a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,8 @@ # Changelog ## Unreleased +- Updated `django`, `flask`, `httplib`, `requests` and `pyramid` modules. + ([#755](https://github.com/census-instrumentation/opencensus-python/pull/755)) - Added `http code` to `grpc code` status code mapping on `utils` ([#746](https://github.com/census-instrumentation/opencensus-python/pull/746)) diff --git a/contrib/opencensus-ext-django/CHANGELOG.md b/contrib/opencensus-ext-django/CHANGELOG.md index 507b05f51..284104707 100644 --- a/contrib/opencensus-ext-django/CHANGELOG.md +++ b/contrib/opencensus-ext-django/CHANGELOG.md @@ -1,6 +1,8 @@ # Changelog ## Unreleased +- Updated `http.status_code` attribute to be an int. + ([#755](https://github.com/census-instrumentation/opencensus-python/pull/755)) ## 0.7.0 Released 2019-07-31 diff --git a/contrib/opencensus-ext-django/examples/app/views.py b/contrib/opencensus-ext-django/examples/app/views.py index ba01e98c1..d8ea57037 100644 --- a/contrib/opencensus-ext-django/examples/app/views.py +++ b/contrib/opencensus-ext-django/examples/app/views.py @@ -59,7 +59,7 @@ def greetings(request): def trace_requests(request): response = requests.get('http://www.google.com') - return HttpResponse(str(response.status_code)) + return HttpResponse(response.status_code) def mysql_trace(request): diff --git a/contrib/opencensus-ext-django/opencensus/ext/django/middleware.py b/contrib/opencensus-ext-django/opencensus/ext/django/middleware.py index d1da88ca5..68e1e0b39 100644 --- a/contrib/opencensus-ext-django/opencensus/ext/django/middleware.py +++ b/contrib/opencensus-ext-django/opencensus/ext/django/middleware.py @@ -215,7 +215,7 @@ def process_response(self, request, response): span = _get_django_span() span.add_attribute( attribute_key=HTTP_STATUS_CODE, - attribute_value=str(response.status_code)) + attribute_value=response.status_code) _set_django_attributes(span, request) diff --git a/contrib/opencensus-ext-django/tests/test_django_middleware.py b/contrib/opencensus-ext-django/tests/test_django_middleware.py index 15998e88c..cbd992d4a 100644 --- a/contrib/opencensus-ext-django/tests/test_django_middleware.py +++ b/contrib/opencensus-ext-django/tests/test_django_middleware.py @@ -222,7 +222,7 @@ def test_process_response(self): 'http.path': u'/wiki/Rabbit', 'http.route': u'/wiki/Rabbit', 'http.url': u'http://testserver/wiki/Rabbit', - 'http.status_code': '200', + 'http.status_code': 200, 'django.user.id': '123', 'django.user.name': 'test_name' } @@ -277,7 +277,7 @@ def test_process_response_unfinished_child_span(self): 'http.path': u'/wiki/Rabbit', 'http.route': u'/wiki/Rabbit', 'http.url': u'http://testserver/wiki/Rabbit', - 'http.status_code': '500', + 'http.status_code': 500, 'django.user.id': '123', 'django.user.name': 'test_name' } diff --git a/contrib/opencensus-ext-flask/CHANGELOG.md b/contrib/opencensus-ext-flask/CHANGELOG.md index 6a590c4cd..549e8c993 100644 --- a/contrib/opencensus-ext-flask/CHANGELOG.md +++ b/contrib/opencensus-ext-flask/CHANGELOG.md @@ -1,6 +1,8 @@ # Changelog ## Unreleased +- Updated `http.status_code` attribute to be an int. + ([#755](https://github.com/census-instrumentation/opencensus-python/pull/755)) ## 0.7.1 Released 2019-08-05 diff --git a/contrib/opencensus-ext-flask/opencensus/ext/flask/flask_middleware.py b/contrib/opencensus-ext-flask/opencensus/ext/flask/flask_middleware.py index fec2a8c23..ebd8d24ff 100644 --- a/contrib/opencensus-ext-flask/opencensus/ext/flask/flask_middleware.py +++ b/contrib/opencensus-ext-flask/opencensus/ext/flask/flask_middleware.py @@ -181,7 +181,7 @@ def _after_request(self, response): ) tracer.add_attribute_to_current_span( HTTP_STATUS_CODE, - str(response.status_code) + response.status_code ) except Exception: # pragma: NO COVER log.error('Failed to trace request', exc_info=True) diff --git a/contrib/opencensus-ext-flask/tests/test_flask_middleware.py b/contrib/opencensus-ext-flask/tests/test_flask_middleware.py index 3e68f7124..6cb59f177 100644 --- a/contrib/opencensus-ext-flask/tests/test_flask_middleware.py +++ b/contrib/opencensus-ext-flask/tests/test_flask_middleware.py @@ -305,7 +305,7 @@ def test__after_request_sampled(self): 'http.path': u'/wiki/Rabbit', 'http.url': u'http://localhost/wiki/Rabbit', 'http.route': u'/wiki/', - 'http.status_code': u'200' + 'http.status_code': 200 } self.assertEqual(span.attributes, expected_attributes) diff --git a/contrib/opencensus-ext-httplib/CHANGELOG.md b/contrib/opencensus-ext-httplib/CHANGELOG.md index 36cba4701..2bd0acffd 100644 --- a/contrib/opencensus-ext-httplib/CHANGELOG.md +++ b/contrib/opencensus-ext-httplib/CHANGELOG.md @@ -1,6 +1,8 @@ # Changelog ## Unreleased +- Updated `http.status_code` attribute to be an int. + ([#755](https://github.com/census-instrumentation/opencensus-python/pull/755)) ## 0.7.1 Released 2019-08-06 diff --git a/contrib/opencensus-ext-httplib/opencensus/ext/httplib/trace.py b/contrib/opencensus-ext-httplib/opencensus/ext/httplib/trace.py index 2a053bed1..54ed0653d 100644 --- a/contrib/opencensus-ext-httplib/opencensus/ext/httplib/trace.py +++ b/contrib/opencensus-ext-httplib/opencensus/ext/httplib/trace.py @@ -121,7 +121,7 @@ def call(self, *args, **kwargs): # Add the status code to attributes _tracer.add_attribute_to_current_span( - HTTP_STATUS_CODE, str(result.status)) + HTTP_STATUS_CODE, result.status) _tracer.end_span() return result diff --git a/contrib/opencensus-ext-pyramid/CHANGELOG.md b/contrib/opencensus-ext-pyramid/CHANGELOG.md index 7dc6896b2..d7d68ed91 100644 --- a/contrib/opencensus-ext-pyramid/CHANGELOG.md +++ b/contrib/opencensus-ext-pyramid/CHANGELOG.md @@ -1,6 +1,8 @@ # Changelog ## Unreleased +- Updated `http.status_code` attribute to be an int. + ([#755](https://github.com/census-instrumentation/opencensus-python/pull/755)) ## 0.7.0 Released 2019-07-31 diff --git a/contrib/opencensus-ext-pyramid/examples/app/__init__.py b/contrib/opencensus-ext-pyramid/examples/app/__init__.py index 045fc6428..24dcc1b82 100644 --- a/contrib/opencensus-ext-pyramid/examples/app/__init__.py +++ b/contrib/opencensus-ext-pyramid/examples/app/__init__.py @@ -28,7 +28,7 @@ def hello(request): @view_config(route_name='trace_requests') def trace_requests(request): response = requests.get('http://www.google.com') - return Response(str(response.status_code)) + return Response(response.status_code) def main(global_config, **settings): diff --git a/contrib/opencensus-ext-pyramid/opencensus/ext/pyramid/pyramid_middleware.py b/contrib/opencensus-ext-pyramid/opencensus/ext/pyramid/pyramid_middleware.py index 1608ea436..e2ce58c6d 100644 --- a/contrib/opencensus-ext-pyramid/opencensus/ext/pyramid/pyramid_middleware.py +++ b/contrib/opencensus-ext-pyramid/opencensus/ext/pyramid/pyramid_middleware.py @@ -121,7 +121,7 @@ def _after_request(self, request, response): tracer = execution_context.get_opencensus_tracer() tracer.add_attribute_to_current_span( HTTP_STATUS_CODE, - str(response.status_code)) + response.status_code) tracer.end_span() tracer.finish() diff --git a/contrib/opencensus-ext-pyramid/tests/test_pyramid_middleware.py b/contrib/opencensus-ext-pyramid/tests/test_pyramid_middleware.py index c016c8d11..13df75ddd 100644 --- a/contrib/opencensus-ext-pyramid/tests/test_pyramid_middleware.py +++ b/contrib/opencensus-ext-pyramid/tests/test_pyramid_middleware.py @@ -241,7 +241,7 @@ def dummy_handler(request): 'http.path': u'/', 'http.route': u'/', 'http.url': u'http://example.com', - 'http.status_code': '200', + 'http.status_code': 200, } self.assertEqual(span.parent_span.span_id, span_id) diff --git a/contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/trace_exporter/__init__.py b/contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/trace_exporter/__init__.py index d83d63ba4..b5dc999d7 100644 --- a/contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/trace_exporter/__init__.py +++ b/contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/trace_exporter/__init__.py @@ -28,6 +28,7 @@ from opencensus.trace import span_data from opencensus.trace.attributes import Attributes + # Agent AGENT = 'opencensus-python [{}]'.format(__version__) @@ -274,6 +275,16 @@ def map_attributes(self, attribute_map): if (attribute_key in ATTRIBUTE_MAPPING): new_key = ATTRIBUTE_MAPPING.get(attribute_key) value[new_key] = value.pop(attribute_key) + if new_key == '/http/status_code': + # workaround: Stackdriver expects status to be str + hack = value[new_key] + hack = hack['int_value'] + if not isinstance(hack, int): + hack = hack['value'] + value[new_key] = {'string_value': { + 'truncated_byte_count': 0, + 'value': str(hack), + }} return attribute_map diff --git a/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_exporter.py b/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_exporter.py index c64806825..c48e21681 100644 --- a/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_exporter.py +++ b/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_exporter.py @@ -448,8 +448,9 @@ def test_translate_common_attributes_to_stackdriver(self): } }, '/http/status_code': { - 'int_value': { - 'value': 200 + 'string_value': { + 'truncated_byte_count': 0, + 'value': '200' } }, '/http/url': { @@ -532,6 +533,37 @@ def test_translate_common_attributes_to_stackdriver(self): exporter.map_attributes(attributes) self.assertEqual(attributes, expected_attributes) + def test_translate_common_attributes_status_code(self): + project_id = 'PROJECT' + client = mock.Mock() + client.project = project_id + exporter = trace_exporter.StackdriverExporter( + client=client, project_id=project_id) + + attributes = { + 'outer key': 'some value', + 'attributeMap': { + 'http.status_code': { + 'int_value': 200 + } + } + } + + expected_attributes = { + 'outer key': 'some value', + 'attributeMap': { + '/http/status_code': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': '200' + } + } + } + } + + exporter.map_attributes(attributes) + self.assertEqual(attributes, expected_attributes) + class Test_set_attributes_gae(unittest.TestCase): @mock.patch('opencensus.ext.stackdriver.trace_exporter.' diff --git a/tests/system/trace/django/django_system_test.py b/tests/system/trace/django/django_system_test.py index 0bb227e2c..4dba05db4 100644 --- a/tests/system/trace/django/django_system_test.py +++ b/tests/system/trace/django/django_system_test.py @@ -134,7 +134,9 @@ def test_with_retry(self): if span.get('name') == '[mysql.query]SELECT 2*3': self.assertEqual( labels.get('mysql.cursor.method.name'), 'execute') - self.assertEqual(labels.get('mysql.query'), 'SELECT 2*3') + self.assertEqual( + labels.get('mysql.query'), 'SELECT 2*3' + ) self.assertTrue(request_succeeded) @@ -168,7 +170,9 @@ def test_with_retry(self): if span.get('name') == '[postgresql.query]SELECT 2*3': self.assertEqual( - labels.get('postgresql.cursor.method.name'), 'execute') + labels.get( + 'postgresql.cursor.method.name'), 'execute' + ) self.assertEqual( labels.get('postgresql.query'), 'SELECT 2*3') diff --git a/tests/system/trace/flask/flask_system_test.py b/tests/system/trace/flask/flask_system_test.py index a733cd245..e89f66aa5 100644 --- a/tests/system/trace/flask/flask_system_test.py +++ b/tests/system/trace/flask/flask_system_test.py @@ -162,7 +162,9 @@ def test_with_retry(self): for span in spans: labels = span.get('labels') if '/http/status_code' in labels.keys(): - self.assertEqual(labels.get('/http/status_code'), '200') + self.assertEqual( + labels.get('/http/status_code'), '200' + ) request_succeeded = True if span.get('name') == '[postgresql.query]SELECT 2*3': From a8c3415a58257f1c644e5c087c0c4b0bbb3d8482 Mon Sep 17 00:00:00 2001 From: Victor Date: Thu, 22 Aug 2019 14:24:31 -0300 Subject: [PATCH 15/79] Fixed requests contrib to raise original exceptions (#771) --- CHANGELOG.md | 4 +- contrib/opencensus-ext-requests/CHANGELOG.md | 2 + .../opencensus/ext/requests/trace.py | 6 ++ .../tests/test_requests_trace.py | 76 +++++++++++++------ 4 files changed, 64 insertions(+), 24 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 583891b1a..89197e7b5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,12 @@ # Changelog ## Unreleased -- Updated `django`, `flask`, `httplib`, `requests` and `pyramid` modules. +- Updated `django`, `flask`, `httplib`, `requests` and `pyramid` modules ([#755](https://github.com/census-instrumentation/opencensus-python/pull/755)) - Added `http code` to `grpc code` status code mapping on `utils` ([#746](https://github.com/census-instrumentation/opencensus-python/pull/746)) +- Updated `requests` module + ([#771](https://github.com/census-instrumentation/opencensus-python/pull/771)) ## 0.7.2 Released 2019-08-16 diff --git a/contrib/opencensus-ext-requests/CHANGELOG.md b/contrib/opencensus-ext-requests/CHANGELOG.md index d0ed6146d..9ffe6c8a9 100644 --- a/contrib/opencensus-ext-requests/CHANGELOG.md +++ b/contrib/opencensus-ext-requests/CHANGELOG.md @@ -5,6 +5,8 @@ ([#746](https://github.com/census-instrumentation/opencensus-python/pull/746)) - Fixed span name ([#746](https://github.com/census-instrumentation/opencensus-python/pull/746)) +- Fixed exception handling + ([#771](https://github.com/census-instrumentation/opencensus-python/pull/771)) ## 0.7.1 Released 2019-08-06 diff --git a/contrib/opencensus-ext-requests/opencensus/ext/requests/trace.py b/contrib/opencensus-ext-requests/opencensus/ext/requests/trace.py index 821c816b5..1118f57d6 100644 --- a/contrib/opencensus-ext-requests/opencensus/ext/requests/trace.py +++ b/contrib/opencensus-ext-requests/opencensus/ext/requests/trace.py @@ -105,10 +105,13 @@ def call(url, *args, **kwargs): result = requests_func(url, *args, **kwargs) except requests.Timeout: _span.set_status(exceptions_status.TIMEOUT) + raise except requests.URLRequired: _span.set_status(exceptions_status.INVALID_URL) + raise except Exception as e: _span.set_status(exceptions_status.unknown(e)) + raise else: # Add the status code to attributes _tracer.add_attribute_to_current_span( @@ -178,10 +181,13 @@ def wrap_session_request(wrapped, instance, args, kwargs): result = wrapped(*args, **kwargs) except requests.Timeout: _span.set_status(exceptions_status.TIMEOUT) + raise except requests.URLRequired: _span.set_status(exceptions_status.INVALID_URL) + raise except Exception as e: _span.set_status(exceptions_status.unknown(e)) + raise else: # Add the status code to attributes _tracer.add_attribute_to_current_span( diff --git a/contrib/opencensus-ext-requests/tests/test_requests_trace.py b/contrib/opencensus-ext-requests/tests/test_requests_trace.py index 67222c618..10554dfa5 100644 --- a/contrib/opencensus-ext-requests/tests/test_requests_trace.py +++ b/contrib/opencensus-ext-requests/tests/test_requests_trace.py @@ -242,7 +242,8 @@ def test_wrap_requests_timeout(self): url = 'http://localhost:8080/test' with patch, patch_thread: - wrapped(url) + with self.assertRaises(requests.Timeout): + wrapped(url) expected_attributes = { 'http.host': 'localhost:8080', @@ -288,7 +289,8 @@ def test_wrap_requests_invalid_url(self): url = 'http://localhost:8080/test' with patch, patch_thread: - wrapped(url) + with self.assertRaises(requests.URLRequired): + wrapped(url) expected_attributes = { 'http.host': 'localhost:8080', @@ -308,6 +310,7 @@ def test_wrap_requests_invalid_url(self): expected_status.__dict__, mock_tracer.current_span.status.__dict__ ) + self.assertRaises(requests.URLRequired, mock_func) def test_wrap_requests_exception(self): mock_return = mock.Mock() @@ -334,7 +337,8 @@ def test_wrap_requests_exception(self): url = 'http://localhost:8080/test' with patch, patch_thread: - wrapped(url) + with self.assertRaises(requests.TooManyRedirects): + wrapped(url) expected_attributes = { 'http.host': 'localhost:8080', @@ -354,6 +358,7 @@ def test_wrap_requests_exception(self): expected_status.__dict__, mock_tracer.current_span.status.__dict__ ) + self.assertRaises(requests.TooManyRedirects, mock_func) def test_wrap_session_request(self): wrapped = mock.Mock(return_value=mock.Mock(status_code=200)) @@ -376,8 +381,10 @@ def test_wrap_session_request(self): kwargs = {} with patch, patch_thread: - trace.wrap_session_request(wrapped, 'Session.request', - (request_method, url), kwargs) + trace.wrap_session_request( + wrapped, 'Session.request', + (request_method, url), kwargs + ) expected_attributes = { 'http.host': 'localhost:8080', @@ -427,8 +434,10 @@ def wrapped(*args, **kwargs): request_method = 'POST' with patch_tracer, patch_attr, patch_thread: - trace.wrap_session_request(wrapped, 'Session.request', - (request_method, url), {}) + trace.wrap_session_request( + wrapped, 'Session.request', + (request_method, url), {} + ) expected_name = '/' self.assertEqual(expected_name, mock_tracer.current_span.name) @@ -458,8 +467,11 @@ def wrapped(*args, **kwargs): request_method = 'POST' with patch_tracer, patch_attr, patch_thread: - trace.wrap_session_request(wrapped, 'Session.request', - (request_method, url), {}) + trace.wrap_session_request( + wrapped, 'Session.request', + (request_method, url), {} + ) + self.assertEqual(None, mock_tracer.current_span) def test_wrap_session_request_exporter_thread(self): @@ -487,8 +499,11 @@ def wrapped(*args, **kwargs): request_method = 'POST' with patch_tracer, patch_attr, patch_thread: - trace.wrap_session_request(wrapped, 'Session.request', - (request_method, url), {}) + trace.wrap_session_request( + wrapped, 'Session.request', + (request_method, url), {} + ) + self.assertEqual(None, mock_tracer.current_span) def test_header_is_passed_in(self): @@ -511,8 +526,10 @@ def test_header_is_passed_in(self): kwargs = {} with patch, patch_thread: - trace.wrap_session_request(wrapped, 'Session.request', - (request_method, url), kwargs) + trace.wrap_session_request( + wrapped, 'Session.request', + (request_method, url), kwargs + ) self.assertEqual(kwargs['headers']['x-trace'], 'some-value') @@ -536,8 +553,10 @@ def test_headers_are_preserved(self): kwargs = {'headers': {'key': 'value'}} with patch, patch_thread: - trace.wrap_session_request(wrapped, 'Session.request', - (request_method, url), kwargs) + trace.wrap_session_request( + wrapped, 'Session.request', + (request_method, url), kwargs + ) self.assertEqual(kwargs['headers']['key'], 'value') self.assertEqual(kwargs['headers']['x-trace'], 'some-value') @@ -563,8 +582,10 @@ def test_tracer_headers_are_overwritten(self): kwargs = {'headers': {'x-trace': 'original-value'}} with patch, patch_thread: - trace.wrap_session_request(wrapped, 'Session.request', - (request_method, url), kwargs) + trace.wrap_session_request( + wrapped, 'Session.request', + (request_method, url), kwargs + ) self.assertEqual(kwargs['headers']['x-trace'], 'some-value') @@ -590,8 +611,11 @@ def test_wrap_session_request_timeout(self): kwargs = {} with patch, patch_thread: - trace.wrap_session_request(wrapped, 'Session.request', - (request_method, url), kwargs) + with self.assertRaises(requests.Timeout): + trace.wrap_session_request( + wrapped, 'Session.request', + (request_method, url), kwargs + ) expected_attributes = { 'http.host': 'localhost:8080', @@ -635,8 +659,11 @@ def test_wrap_session_request_invalid_url(self): kwargs = {} with patch, patch_thread: - trace.wrap_session_request(wrapped, 'Session.request', - (request_method, url), kwargs) + with self.assertRaises(requests.URLRequired): + trace.wrap_session_request( + wrapped, 'Session.request', + (request_method, url), kwargs + ) expected_attributes = { 'http.host': 'localhost:8080', @@ -680,8 +707,11 @@ def test_wrap_session_request_exception(self): kwargs = {} with patch, patch_thread: - trace.wrap_session_request(wrapped, 'Session.request', - (request_method, url), kwargs) + with self.assertRaises(requests.TooManyRedirects): + trace.wrap_session_request( + wrapped, 'Session.request', + (request_method, url), kwargs + ) expected_attributes = { 'http.host': 'localhost:8080', From 8e582cb686f0f677a0534bf5c39cb52e3efcb8cc Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Tue, 27 Aug 2019 13:33:35 -0700 Subject: [PATCH 16/79] Standard Metrics - Incoming Requests Execution Time (#773) * Incoming Requests Execution Time * Fix test * Remove blank line * ADd line * Update comments * Include locks to prevent race case * Fix lint * address comments --- contrib/opencensus-ext-azure/CHANGELOG.md | 2 + contrib/opencensus-ext-azure/README.rst | 1 + .../standard_metrics/__init__.py | 3 + .../standard_metrics/http_dependency.py | 8 +- .../standard_metrics/http_requests.py | 112 ++++++++++++++---- .../tests/test_azure_standard_metrics.py | 47 ++++++-- 6 files changed, 138 insertions(+), 35 deletions(-) diff --git a/contrib/opencensus-ext-azure/CHANGELOG.md b/contrib/opencensus-ext-azure/CHANGELOG.md index 9655e02e0..53175aa45 100644 --- a/contrib/opencensus-ext-azure/CHANGELOG.md +++ b/contrib/opencensus-ext-azure/CHANGELOG.md @@ -3,6 +3,8 @@ ## Unreleased - Standard metrics incoming requests per second ([#758](https://github.com/census-instrumentation/opencensus-python/pull/758)) +- Standard metrics incoming requests average execution rate + ([#773](https://github.com/census-instrumentation/opencensus-python/pull/773)) ## 0.7.0 Released 2019-07-31 diff --git a/contrib/opencensus-ext-azure/README.rst b/contrib/opencensus-ext-azure/README.rst index 451086d02..cdc5c955c 100644 --- a/contrib/opencensus-ext-azure/README.rst +++ b/contrib/opencensus-ext-azure/README.rst @@ -154,6 +154,7 @@ Below is a list of standard metrics that are currently available: - Available Memory (bytes) - CPU Processor Time (percentage) - Incoming Request Rate (per second) +- Incoming Request Average Execution Time (milliseconds) - Outgoing Request Rate (per second) - Process CPU Usage (percentage) - Process Private Bytes (bytes) diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/__init__.py index 162f015a0..fc6a1c235 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/__init__.py @@ -24,6 +24,8 @@ import ProcessCPUMetric from opencensus.ext.azure.metrics_exporter.standard_metrics.process \ import ProcessMemoryMetric +from opencensus.ext.azure.metrics_exporter.standard_metrics.http_requests \ + import RequestsAvgExecutionMetric from opencensus.ext.azure.metrics_exporter.standard_metrics.http_requests \ import RequestsRateMetric @@ -33,6 +35,7 @@ ProcessCPUMetric, ProcessMemoryMetric, ProcessorTimeMetric, + RequestsAvgExecutionMetric, RequestsRateMetric] diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_dependency.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_dependency.py index ac4e3da37..4a00a4f5f 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_dependency.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_dependency.py @@ -13,12 +13,14 @@ # limitations under the License. import requests +import threading import time from opencensus.metrics.export.gauge import DerivedDoubleGauge from opencensus.trace import execution_context dependency_map = dict() +_dependency_lock = threading.Lock() ORIGINAL_REQUEST = requests.Session.request @@ -26,8 +28,10 @@ def dependency_patch(*args, **kwargs): result = ORIGINAL_REQUEST(*args, **kwargs) # Only collect request metric if sent from non-exporter thread if not execution_context.is_exporter(): - count = dependency_map.get('count', 0) - dependency_map['count'] = count + 1 + # We don't want multiple threads updating this at once + with _dependency_lock: + count = dependency_map.get('count', 0) + dependency_map['count'] = count + 1 return result diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_requests.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_requests.py index 1a78da4fc..b19f77b55 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_requests.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_requests.py @@ -13,6 +13,7 @@ # limitations under the License. import sys +import threading import time from opencensus.metrics.export.gauge import DerivedDoubleGauge @@ -21,16 +22,32 @@ else: from http.server import HTTPServer +_requests_lock = threading.Lock() requests_map = dict() ORIGINAL_CONSTRUCTOR = HTTPServer.__init__ def request_patch(func): def wrapper(self=None): + start_time = time.time() func(self) + end_time = time.time() + + update_request_state(start_time, end_time) + + return wrapper + + +def update_request_state(start_time, end_time): + # Update requests state information + # We don't want multiple threads updating this at once + with _requests_lock: + # Update Count count = requests_map.get('count', 0) requests_map['count'] = count + 1 - return wrapper + # Update duration + duration = requests_map.get('duration', 0) + requests_map['duration'] = duration + (end_time - start_time) def server_patch(*args, **kwargs): @@ -59,6 +76,76 @@ def setup(): HTTPServer.__init__ = server_patch +def get_average_execution_time(): + last_average_duration = requests_map.get('last_average_duration', 0) + interval_duration = requests_map.get('duration', 0) \ + - requests_map.get('last_duration', 0) + interval_count = requests_map.get('count', 0) \ + - requests_map.get('last_count', 0) + try: + result = interval_duration / interval_count + requests_map['last_average_duration'] = result + requests_map['last_duration'] = requests_map.get('duration', 0) + # Convert to milliseconds + return result * 1000.0 + except ZeroDivisionError: + # If interval_count is 0, exporter call made too close to previous + # Return the previous result if this is the case + return last_average_duration * 1000.0 + + +def get_requests_rate(): + current_time = time.time() + last_rate = requests_map.get('last_rate', 0) + last_time = requests_map.get('last_time') + + try: + # last_rate_time is None the first time this function is called + if last_time is not None: + interval_time = current_time - requests_map.get('last_time', 0) + interval_count = requests_map.get('count', 0) \ + - requests_map.get('last_count', 0) + result = interval_count / interval_time + else: + result = 0 + requests_map['last_time'] = current_time + requests_map['last_count'] = requests_map.get('count', 0) + requests_map['last_rate'] = result + return result + except ZeroDivisionError: + # If elapsed_seconds is 0, exporter call made too close to previous + # Return the previous result if this is the case + return last_rate + + +class RequestsAvgExecutionMetric(object): + NAME = "\\ASP.NET Applications(??APP_W3SVC_PROC??)\\Request Execution Time" + + def __init__(self): + setup() + + @staticmethod + def get_value(): + return get_average_execution_time() + + def __call__(self): + """ Returns a derived gauge for incoming requests execution rate + + Calculated by getting the time it takes to make an incoming request + and dividing over the amount of incoming requests over an elapsed time. + + :rtype: :class:`opencensus.metrics.export.gauge.DerivedLongGauge` + :return: The gauge representing the incoming requests metric + """ + gauge = DerivedDoubleGauge( + RequestsAvgExecutionMetric.NAME, + 'Incoming Requests Average Execution Rate', + 'milliseconds', + []) + gauge.create_default_time_series(RequestsAvgExecutionMetric.get_value) + return gauge + + class RequestsRateMetric(object): NAME = "\\ASP.NET Applications(??APP_W3SVC_PROC??)\\Requests/Sec" @@ -67,28 +154,7 @@ def __init__(self): @staticmethod def get_value(): - current_count = requests_map.get('count', 0) - current_time = time.time() - last_count = requests_map.get('last_count', 0) - last_time = requests_map.get('last_time') - last_result = requests_map.get('last_result', 0) - - try: - # last_time is None the very first time this function is called - if last_time is not None: - elapsed_seconds = current_time - last_time - interval_count = current_count - last_count - result = interval_count / elapsed_seconds - else: - result = 0 - requests_map['last_time'] = current_time - requests_map['last_count'] = current_count - requests_map['last_result'] = result - return result - except ZeroDivisionError: - # If elapsed_seconds is 0, exporter call made too close to previous - # Return the previous result if this is the case - return last_result + return get_requests_rate() def __call__(self): """ Returns a derived gauge for incoming requests per second diff --git a/contrib/opencensus-ext-azure/tests/test_azure_standard_metrics.py b/contrib/opencensus-ext-azure/tests/test_azure_standard_metrics.py index f98df0c88..70aaf4e30 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_standard_metrics.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_standard_metrics.py @@ -48,12 +48,12 @@ def test_producer_get_metrics(self): producer = standard_metrics.AzureStandardMetricsProducer() metrics = producer.get_metrics() - self.assertEqual(len(metrics), 6) + self.assertEqual(len(metrics), 7) def test_register_metrics(self): registry = standard_metrics.register_metrics() - self.assertEqual(len(registry.get_metrics()), 6) + self.assertEqual(len(registry.get_metrics()), 7) def test_get_available_memory_metric(self): metric = standard_metrics.AvailableMemoryMetric() @@ -201,6 +201,7 @@ def test_request_patch(self): new_func() self.assertEqual(map['count'], 1) + self.assertIsNotNone(map['duration']) self.assertEqual(len(func.call_args_list), 1) def test_server_patch(self): @@ -267,34 +268,60 @@ def test_server_patch_no_handler(self): self.assertEqual(r, None) - def test_get_request_rate_metric(self): + def test_get_requests_rate_metric(self): metric = standard_metrics.RequestsRateMetric() gauge = metric() name = '\\ASP.NET Applications(??APP_W3SVC_PROC??)\\Requests/Sec' self.assertEqual(gauge.descriptor.name, name) - def test_get_request_rate_first_time(self): - rate = standard_metrics.RequestsRateMetric.get_value() + def test_get_requests_rate_first_time(self): + rate = standard_metrics.http_requests.get_requests_rate() self.assertEqual(rate, 0) @mock.patch('opencensus.ext.azure.metrics_exporter' '.standard_metrics.http_requests.time') - def test_get_request_rate(self, time_mock): + def test_get_requests_rate(self, time_mock): time_mock.time.return_value = 100 standard_metrics.http_requests.requests_map['last_time'] = 98 standard_metrics.http_requests.requests_map['count'] = 4 - rate = standard_metrics.RequestsRateMetric.get_value() + rate = standard_metrics.http_requests.get_requests_rate() self.assertEqual(rate, 2) @mock.patch('opencensus.ext.azure.metrics_exporter' '.standard_metrics.http_requests.time') - def test_get_request_rate_error(self, time_mock): + def test_get_requests_rate_error(self, time_mock): time_mock.time.return_value = 100 - standard_metrics.http_requests.requests_map['last_result'] = 5 + standard_metrics.http_requests.requests_map['last_rate'] = 5 standard_metrics.http_requests.requests_map['last_time'] = 100 - result = standard_metrics.RequestsRateMetric.get_value() + result = standard_metrics.http_requests.get_requests_rate() self.assertEqual(result, 5) + + def test_get_requests_execution_metric(self): + metric = standard_metrics.RequestsAvgExecutionMetric() + gauge = metric() + + name = '\\ASP.NET Applications(??APP_W3SVC_PROC??)' \ + '\\Request Execution Time' + self.assertEqual(gauge.descriptor.name, name) + + def test_get_requests_execution(self): + map = standard_metrics.http_requests.requests_map + map['duration'] = 0.1 + map['count'] = 10 + map['last_count'] = 5 + result = standard_metrics.http_requests.get_average_execution_time() + + self.assertEqual(result, 20) + + def test_get_requests_execution_error(self): + map = standard_metrics.http_requests.requests_map + map['duration'] = 0.1 + map['count'] = 10 + map['last_count'] = 10 + result = standard_metrics.http_requests.get_average_execution_time() + + self.assertEqual(result, 0) From 2e396b063a238b3e823b6efc136b9a0405dd5565 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Donovan=20Sch=C3=B6nknecht?= Date: Fri, 30 Aug 2019 22:21:23 +0200 Subject: [PATCH 17/79] Use Django DB instrumentation (#775) --- .../opencensus/ext/django/middleware.py | 38 ++++++++ .../tests/test_django_db_middleware.py | 89 +++++++++++++++++++ 2 files changed, 127 insertions(+) create mode 100644 contrib/opencensus-ext-django/tests/test_django_db_middleware.py diff --git a/contrib/opencensus-ext-django/opencensus/ext/django/middleware.py b/contrib/opencensus-ext-django/opencensus/ext/django/middleware.py index 68e1e0b39..6ba6a80a9 100644 --- a/contrib/opencensus-ext-django/opencensus/ext/django/middleware.py +++ b/contrib/opencensus-ext-django/opencensus/ext/django/middleware.py @@ -13,11 +13,14 @@ # limitations under the License. """Django middleware helper to capture and trace a request.""" +import django import logging import six import django.conf +from django.db import connection from django.utils.deprecation import MiddlewareMixin +from google.rpc import code_pb2 from opencensus.common import configuration from opencensus.trace import attributes_helper @@ -25,6 +28,7 @@ from opencensus.trace import print_exporter from opencensus.trace import samplers from opencensus.trace import span as span_module +from opencensus.trace import status as status_module from opencensus.trace import tracer as tracer_module from opencensus.trace import utils from opencensus.trace.propagation import trace_context_http_header_format @@ -99,6 +103,37 @@ def _set_django_attributes(span, request): span.add_attribute('django.user.name', str(user_name)) +def _trace_db_call(execute, sql, params, many, context): + tracer = _get_current_tracer() + if not tracer: + return execute(sql, params, many, context) + + vendor = context['connection'].vendor + alias = context['connection'].alias + + span = tracer.start_span() + span.name = '{}.query'.format(vendor) + span.span_kind = span_module.SpanKind.CLIENT + + tracer.add_attribute_to_current_span('component', vendor) + tracer.add_attribute_to_current_span('db.instance', alias) + tracer.add_attribute_to_current_span('db.statement', sql) + tracer.add_attribute_to_current_span('db.type', 'sql') + + try: + result = execute(sql, params, many, context) + except Exception: # pragma: NO COVER + status = status_module.Status( + code=code_pb2.UNKNOWN, message='DB error' + ) + span.set_status(status) + raise + else: + return result + finally: + tracer.end_span() + + class OpencensusMiddleware(MiddlewareMixin): """Saves the request in thread local""" @@ -126,6 +161,9 @@ def __init__(self, get_response=None): self.blacklist_hostnames = settings.get(BLACKLIST_HOSTNAMES, None) + if django.VERSION >= (2,): # pragma: NO COVER + connection.execute_wrappers.append(_trace_db_call) + def process_request(self, request): """Called on each request, before Django decides which view to execute. diff --git a/contrib/opencensus-ext-django/tests/test_django_db_middleware.py b/contrib/opencensus-ext-django/tests/test_django_db_middleware.py new file mode 100644 index 000000000..18bf385c9 --- /dev/null +++ b/contrib/opencensus-ext-django/tests/test_django_db_middleware.py @@ -0,0 +1,89 @@ +# Copyright 2017, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +from collections import namedtuple + +import django +import mock +import pytest +from django.test.utils import teardown_test_environment + +from opencensus.trace import execution_context + + +class TestOpencensusDatabaseMiddleware(unittest.TestCase): + def setUp(self): + from django.conf import settings as django_settings + from django.test.utils import setup_test_environment + + if not django_settings.configured: + django_settings.configure() + setup_test_environment() + + def tearDown(self): + execution_context.clear() + teardown_test_environment() + + def test_process_request(self): + if django.VERSION < (2, 0): + pytest.skip("Wrong version of Django") + + from opencensus.ext.django import middleware + + sql = "SELECT * FROM users" + + MockConnection = namedtuple('Connection', ('vendor', 'alias')) + connection = MockConnection('mysql', 'default') + + mock_execute = mock.Mock() + mock_execute.return_value = "Mock result" + + middleware.OpencensusMiddleware() + + patch_no_tracer = mock.patch( + 'opencensus.ext.django.middleware._get_current_tracer', + return_value=None) + with patch_no_tracer: + result = middleware._trace_db_call( + mock_execute, sql, params=[], many=False, + context={'connection': connection}) + self.assertEqual(result, "Mock result") + + mock_tracer = mock.Mock() + mock_tracer.return_value = mock_tracer + patch = mock.patch( + 'opencensus.ext.django.middleware._get_current_tracer', + return_value=mock_tracer) + with patch: + result = middleware._trace_db_call( + mock_execute, sql, params=[], many=False, + context={'connection': connection}) + + (mock_sql, mock_params, mock_many, + mock_context) = mock_execute.call_args[0] + + self.assertEqual(mock_sql, sql) + self.assertEqual(mock_params, []) + self.assertEqual(mock_many, False) + self.assertEqual(mock_context, {'connection': connection}) + self.assertEqual(result, "Mock result") + + result = middleware._trace_db_call( + mock_execute, sql, params=[], many=True, + context={'connection': connection}) + + (mock_sql, mock_params, mock_many, + mock_context) = mock_execute.call_args[0] + self.assertEqual(mock_many, True) From 2457a87165bbe3951ae48a2a56c2177d7f277be2 Mon Sep 17 00:00:00 2001 From: Joel Beach Date: Wed, 4 Sep 2019 01:07:47 +1000 Subject: [PATCH 18/79] Check that url_rule is not None before dereferencing property (#781) --- .../opencensus/ext/flask/flask_middleware.py | 8 ++-- .../tests/test_flask_middleware.py | 43 +++++++++++++++++++ 2 files changed, 48 insertions(+), 3 deletions(-) diff --git a/contrib/opencensus-ext-flask/opencensus/ext/flask/flask_middleware.py b/contrib/opencensus-ext-flask/opencensus/ext/flask/flask_middleware.py index ebd8d24ff..fcbdc41e7 100644 --- a/contrib/opencensus-ext-flask/opencensus/ext/flask/flask_middleware.py +++ b/contrib/opencensus-ext-flask/opencensus/ext/flask/flask_middleware.py @@ -176,9 +176,11 @@ def _after_request(self, response): try: tracer = execution_context.get_opencensus_tracer() - tracer.add_attribute_to_current_span( - HTTP_ROUTE, flask.request.url_rule.rule - ) + url_rule = flask.request.url_rule + if url_rule is not None: + tracer.add_attribute_to_current_span( + HTTP_ROUTE, url_rule.rule + ) tracer.add_attribute_to_current_span( HTTP_STATUS_CODE, response.status_code diff --git a/contrib/opencensus-ext-flask/tests/test_flask_middleware.py b/contrib/opencensus-ext-flask/tests/test_flask_middleware.py index 6cb59f177..bcd11a506 100644 --- a/contrib/opencensus-ext-flask/tests/test_flask_middleware.py +++ b/contrib/opencensus-ext-flask/tests/test_flask_middleware.py @@ -19,6 +19,7 @@ from google.rpc import code_pb2 import flask +from werkzeug.exceptions import NotFound import mock from opencensus.ext.flask import flask_middleware @@ -311,6 +312,48 @@ def test__after_request_sampled(self): self.assertEqual(span.attributes, expected_attributes) assert isinstance(span.parent_span, base.NullContextManager) + def test__after_request_invalid_url(self): + flask_trace_header = 'traceparent' + trace_id = '2dd43a1d6b2549c6bc2a1a54c2fc0b05' + span_id = '6e0c63257de34c92' + flask_trace_id = '00-{}-{}-00'.format(trace_id, span_id) + + app = self.create_app() + flask_middleware.FlaskMiddleware( + app=app, + sampler=samplers.AlwaysOnSampler() + ) + + context = app.test_request_context( + path='/this-url-does-not-exist', + headers={flask_trace_header: flask_trace_id} + ) + + with context: + app.preprocess_request() + tracer = execution_context.get_opencensus_tracer() + self.assertIsNotNone(tracer) + + span = tracer.current_span() + + try: + rv = app.dispatch_request() + except NotFound as e: + rv = app.handle_user_exception(e) + app.finalize_request(rv) + + # http.route should not be set + expected_attributes = { + 'http.host': u'localhost', + 'http.method': u'GET', + 'http.path': u'/this-url-does-not-exist', + 'http.url': u'http://localhost/this-url-does-not-exist', + 'http.status_code': 404 + } + + self.assertEqual(span.attributes, expected_attributes) + assert isinstance(span.parent_span, base.NullContextManager) + def test__after_request_blacklist(self): flask_trace_header = 'traceparent' trace_id = '2dd43a1d6b2549c6bc2a1a54c2fc0b05' From af284a92b80bcbaf5db53e7e0813f96691b4c696 Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Wed, 25 Sep 2019 11:02:01 -0700 Subject: [PATCH 19/79] Implement Connection Strings for Azure Exporters (#767) * Connection string * Implement derived endpoint, authorization * Update README, add tests * Fix lint * Update CHANGELOG * Fix lint * Fix examples * Address comments * Fix use case * fix test * Fix example * Address comments * include tests --- contrib/opencensus-ext-azure/CHANGELOG.md | 2 + contrib/opencensus-ext-azure/README.rst | 49 ++++--- .../examples/logs/correlated.py | 5 +- .../examples/logs/error.py | 5 +- .../examples/logs/simple.py | 5 +- .../examples/metrics/simple.py | 3 + .../examples/metrics/standard.py | 3 + .../examples/metrics/sum.py | 3 + .../examples/traces/client.py | 5 +- .../examples/traces/config.py | 3 +- .../examples/traces/custom.py | 3 +- .../examples/traces/server.py | 5 +- .../examples/traces/simple.py | 5 +- .../opencensus/ext/azure/common/__init__.py | 69 ++++++++- .../tests/test_options.py | 138 ++++++++++++++++++ 15 files changed, 268 insertions(+), 35 deletions(-) create mode 100644 contrib/opencensus-ext-azure/tests/test_options.py diff --git a/contrib/opencensus-ext-azure/CHANGELOG.md b/contrib/opencensus-ext-azure/CHANGELOG.md index 53175aa45..1a5688f47 100644 --- a/contrib/opencensus-ext-azure/CHANGELOG.md +++ b/contrib/opencensus-ext-azure/CHANGELOG.md @@ -1,6 +1,8 @@ # Changelog ## Unreleased +- Implement connection strings + ([#767](https://github.com/census-instrumentation/opencensus-python/pull/767)) - Standard metrics incoming requests per second ([#758](https://github.com/census-instrumentation/opencensus-python/pull/758)) - Standard metrics incoming requests average execution rate diff --git a/contrib/opencensus-ext-azure/README.rst b/contrib/opencensus-ext-azure/README.rst index cdc5c955c..1d844e153 100644 --- a/contrib/opencensus-ext-azure/README.rst +++ b/contrib/opencensus-ext-azure/README.rst @@ -24,8 +24,8 @@ The **Azure Monitor Log Handler** allows you to export Python logs to `Azure Mon This example shows how to send a warning level log to Azure Monitor. * Create an Azure Monitor resource and get the instrumentation key, more information can be found `here `_. -* Put the instrumentation key in ``APPINSIGHTS_INSTRUMENTATIONKEY`` environment variable. -* You can also specify the instrumentation key explicitly in the code, which will take priority over a set environment variable. +* Place your instrumentation key in a `connection string` and directly into your code. +* Alternatively, you can specify your `connection string` in an environment variable ``APPLICATIONINSIGHTS_CONNECTION_STRING``. .. code:: python @@ -34,15 +34,16 @@ This example shows how to send a warning level log to Azure Monitor. from opencensus.ext.azure.log_exporter import AzureLogHandler logger = logging.getLogger(__name__) - logger.addHandler(AzureLogHandler()) + logger.addHandler(AzureLogHandler(connection_string='InstrumentationKey=')) logger.warning('Hello, World!') +* Alternatively, you can specify your `connection string` in an environment variable ``APPLICATIONINSIGHTS_CONNECTION_STRING``. + You can enrich the logs with trace IDs and span IDs by using the `logging integration <../opencensus-ext-logging>`_. * Create an Azure Monitor resource and get the instrumentation key, more information can be found `here `_. -* Install the `logging integration package <../opencensus-ext-logging>`_ using ``pip install opencensus-ext-logging``. -* Put the instrumentation key in ``APPINSIGHTS_INSTRUMENTATIONKEY`` environment variable. -* You can also specify the instrumentation key explicitly in the code, which will take priority over a set environment variable. +* Place your instrumentation key in a `connection string` and directly into your code. +* Alternatively, you can specify your `connection string` in an environment variable ``APPLICATIONINSIGHTS_CONNECTION_STRING``. .. code:: python @@ -58,16 +59,19 @@ You can enrich the logs with trace IDs and span IDs by using the `logging integr logger = logging.getLogger(__name__) - handler = AzureLogHandler() + handler = AzureLogHandler(connection_string='InstrumentationKey=') handler.setFormatter(logging.Formatter('%(traceId)s %(spanId)s %(message)s')) logger.addHandler(handler) - tracer = Tracer(exporter=AzureExporter(), sampler=ProbabilitySampler(1.0)) + tracer = Tracer( + exporter=AzureExporter(connection_string='InstrumentationKey='), + sampler=ProbabilitySampler(1.0) + ) logger.warning('Before the span') with tracer.span(name='test'): logger.warning('In the span') - logger.warning('After the span') + logger.warning('After the span')s Metrics ~~~~~~~ @@ -75,8 +79,8 @@ Metrics The **Azure Monitor Metrics Exporter** allows you to export metrics to `Azure Monitor`_. * Create an Azure Monitor resource and get the instrumentation key, more information can be found `here `_. -* Put the instrumentation key in ``APPINSIGHTS_INSTRUMENTATIONKEY`` environment variable. -* You can also specify the instrumentation key explicitly in the code, which will take priority over a set environment variable. +* Place your instrumentation key in a `connection string` and directly into your code. +* Alternatively, you can specify your `connection string` in an environment variable ``APPLICATIONINSIGHTS_CONNECTION_STRING``. .. code:: python @@ -105,7 +109,7 @@ The **Azure Monitor Metrics Exporter** allows you to export metrics to `Azure Mo def main(): # Enable metrics # Set the interval in seconds in which you want to send metrics - exporter = metrics_exporter.new_metrics_exporter() + exporter = metrics_exporter.new_metrics_exporter(connection_string='InstrumentationKey=') view_manager.register_exporter(exporter) view_manager.register_view(CARROTS_VIEW) @@ -138,7 +142,7 @@ The exporter also includes a set of standard metrics that are exported to Azure # All you need is the next line. You can disable standard metrics by # passing in enable_standard_metrics=False into the constructor of # new_metrics_exporter() - _exporter = metrics_exporter.new_metrics_exporter() + _exporter = metrics_exporter.new_metrics_exporter(connection_string='InstrumentationKey=') for i in range(100): print(psutil.virtual_memory()) @@ -167,26 +171,31 @@ The **Azure Monitor Trace Exporter** allows you to export `OpenCensus`_ traces t This example shows how to send a span "hello" to Azure Monitor. * Create an Azure Monitor resource and get the instrumentation key, more information can be found `here `_. -* Put the instrumentation key in ``APPINSIGHTS_INSTRUMENTATIONKEY`` environment variable. -* You can also specify the instrumentation key explicitly in the code, which will take priority over a set environment variable. +* Place your instrumentation key in a `connection string` and directly into your code. +* Alternatively, you can specify your `connection string` in an environment variable ``APPLICATIONINSIGHTS_CONNECTION_STRING``. -.. code:: python + .. code:: python from opencensus.ext.azure.trace_exporter import AzureExporter from opencensus.trace.samplers import ProbabilitySampler from opencensus.trace.tracer import Tracer - tracer = Tracer(exporter=AzureExporter(), sampler=ProbabilitySampler(1.0)) + tracer = Tracer( + exporter=AzureExporter(connection_string='InstrumentationKey='), + sampler=ProbabilitySampler(1.0) + ) with tracer.span(name='hello'): print('Hello, World!') -You can also specify the instrumentation key explicitly in the code. +OpenCensus also supports several [integrations](https://github.com/census-instrumentation/opencensus-python#integration) which allows OpenCensus to integrate with third party libraries. + +This example shows how to integrate with the [requests](https://2.python-requests.org/en/master/) library. * Create an Azure Monitor resource and get the instrumentation key, more information can be found `here `_. * Install the `requests integration package <../opencensus-ext-requests>`_ using ``pip install opencensus-ext-requests``. -* Put the instrumentation key in ``APPINSIGHTS_INSTRUMENTATIONKEY`` environment variable. -* You can also specify the instrumentation key explicitly in the code, which will take priority over a set environment variable. +* Place your instrumentation key in a `connection string` and directly into your code. +* Alternatively, you can specify your `connection string` in an environment variable ``APPLICATIONINSIGHTS_CONNECTION_STRING``. .. code:: python diff --git a/contrib/opencensus-ext-azure/examples/logs/correlated.py b/contrib/opencensus-ext-azure/examples/logs/correlated.py index 205a46bc1..69445e997 100644 --- a/contrib/opencensus-ext-azure/examples/logs/correlated.py +++ b/contrib/opencensus-ext-azure/examples/logs/correlated.py @@ -24,8 +24,9 @@ logger = logging.getLogger(__name__) -# TODO: you need to specify the instrumentation key in the -# APPINSIGHTS_INSTRUMENTATIONKEY environment variable. +# TODO: you need to specify the instrumentation key in a connection string +# and place it in the APPLICATIONINSIGHTS_CONNECTION_STRING +# environment variable. handler = AzureLogHandler() logger.addHandler(handler) diff --git a/contrib/opencensus-ext-azure/examples/logs/error.py b/contrib/opencensus-ext-azure/examples/logs/error.py index 4f801342a..772861cf1 100644 --- a/contrib/opencensus-ext-azure/examples/logs/error.py +++ b/contrib/opencensus-ext-azure/examples/logs/error.py @@ -17,8 +17,9 @@ from opencensus.ext.azure.log_exporter import AzureLogHandler logger = logging.getLogger(__name__) -# TODO: you need to specify the instrumentation key in the -# APPINSIGHTS_INSTRUMENTATIONKEY environment variable. +# TODO: you need to specify the instrumentation key in a connection string +# and place it in the APPLICATIONINSIGHTS_CONNECTION_STRING +# environment variable. logger.addHandler(AzureLogHandler()) diff --git a/contrib/opencensus-ext-azure/examples/logs/simple.py b/contrib/opencensus-ext-azure/examples/logs/simple.py index cdda2b688..1fba3d668 100644 --- a/contrib/opencensus-ext-azure/examples/logs/simple.py +++ b/contrib/opencensus-ext-azure/examples/logs/simple.py @@ -17,7 +17,8 @@ from opencensus.ext.azure.log_exporter import AzureLogHandler logger = logging.getLogger(__name__) -# TODO: you need to specify the instrumentation key in the -# APPINSIGHTS_INSTRUMENTATIONKEY environment variable. +# TODO: you need to specify the instrumentation key in a connection string +# and place it in the APPLICATIONINSIGHTS_CONNECTION_STRING +# environment variable. logger.addHandler(AzureLogHandler()) logger.warning('Hello, World!') diff --git a/contrib/opencensus-ext-azure/examples/metrics/simple.py b/contrib/opencensus-ext-azure/examples/metrics/simple.py index 2409f21ad..67115f6bd 100644 --- a/contrib/opencensus-ext-azure/examples/metrics/simple.py +++ b/contrib/opencensus-ext-azure/examples/metrics/simple.py @@ -38,6 +38,9 @@ def main(): # Enable metrics # Set the interval in seconds in which you want to send metrics + # TODO: you need to specify the instrumentation key in a connection string + # and place it in the APPLICATIONINSIGHTS_CONNECTION_STRING + # environment variable. exporter = metrics_exporter.new_metrics_exporter() view_manager.register_exporter(exporter) diff --git a/contrib/opencensus-ext-azure/examples/metrics/standard.py b/contrib/opencensus-ext-azure/examples/metrics/standard.py index 7951c8aa9..3c4627765 100644 --- a/contrib/opencensus-ext-azure/examples/metrics/standard.py +++ b/contrib/opencensus-ext-azure/examples/metrics/standard.py @@ -19,6 +19,9 @@ def main(): + # TODO: you need to specify the instrumentation key in a connection string + # and place it in the APPLICATIONINSIGHTS_CONNECTION_STRING + # environment variable. # All you need is the next line. You can disable standard metrics by # passing in enable_standard_metrics=False into the constructor of # new_metrics_exporter() diff --git a/contrib/opencensus-ext-azure/examples/metrics/sum.py b/contrib/opencensus-ext-azure/examples/metrics/sum.py index e92a5e406..355c72954 100644 --- a/contrib/opencensus-ext-azure/examples/metrics/sum.py +++ b/contrib/opencensus-ext-azure/examples/metrics/sum.py @@ -38,6 +38,9 @@ def main(): # Enable metrics # Set the interval in seconds in which you want to send metrics + # TODO: you need to specify the instrumentation key in a connection string + # and place it in the APPLICATIONINSIGHTS_CONNECTION_STRING + # environment variable. exporter = metrics_exporter.new_metrics_exporter() view_manager.register_exporter(exporter) diff --git a/contrib/opencensus-ext-azure/examples/traces/client.py b/contrib/opencensus-ext-azure/examples/traces/client.py index eb8480089..004c79ab2 100644 --- a/contrib/opencensus-ext-azure/examples/traces/client.py +++ b/contrib/opencensus-ext-azure/examples/traces/client.py @@ -20,8 +20,9 @@ from opencensus.trace.tracer import Tracer config_integration.trace_integrations(['requests']) -# TODO: you need to specify the instrumentation key in the -# APPINSIGHTS_INSTRUMENTATIONKEY environment variable. +# TODO: you need to specify the instrumentation key in a connection string +# and place it in the APPLICATIONINSIGHTS_CONNECTION_STRING +# environment variable. tracer = Tracer(exporter=AzureExporter(), sampler=ProbabilitySampler(1.0)) with tracer.span(name='parent'): with tracer.span(name='child'): diff --git a/contrib/opencensus-ext-azure/examples/traces/config.py b/contrib/opencensus-ext-azure/examples/traces/config.py index 9c4c7fd0f..c5a9e025a 100644 --- a/contrib/opencensus-ext-azure/examples/traces/config.py +++ b/contrib/opencensus-ext-azure/examples/traces/config.py @@ -19,7 +19,8 @@ tracer = Tracer( exporter=AzureExporter( # TODO: replace the all-zero GUID with your instrumentation key. - instrumentation_key='00000000-0000-0000-0000-000000000000', + connection_string='InstrumentationKey= \ + 00000000-0000-0000-0000-000000000000', ), sampler=ProbabilitySampler(rate=1.0), ) diff --git a/contrib/opencensus-ext-azure/examples/traces/custom.py b/contrib/opencensus-ext-azure/examples/traces/custom.py index edcb187c5..5b23b10a7 100644 --- a/contrib/opencensus-ext-azure/examples/traces/custom.py +++ b/contrib/opencensus-ext-azure/examples/traces/custom.py @@ -21,7 +21,8 @@ 'TRACE': { 'SAMPLER': 'opencensus.trace.samplers.ProbabilitySampler(rate=1.0)', 'EXPORTER': '''opencensus.ext.azure.trace_exporter.AzureExporter( - instrumentation_key='00000000-0000-0000-0000-000000000000', + connection_string= + 'InstrumentationKey=00000000-0000-0000-0000-000000000000', )''', }, } diff --git a/contrib/opencensus-ext-azure/examples/traces/server.py b/contrib/opencensus-ext-azure/examples/traces/server.py index a76e5d3a2..3702e7ef3 100644 --- a/contrib/opencensus-ext-azure/examples/traces/server.py +++ b/contrib/opencensus-ext-azure/examples/traces/server.py @@ -20,8 +20,9 @@ from opencensus.trace import config_integration from opencensus.trace.samplers import ProbabilitySampler -# TODO: you need to specify the instrumentation key in the -# APPINSIGHTS_INSTRUMENTATIONKEY environment variable. +# TODO: you need to specify the instrumentation key in a connection string +# and place it in the APPLICATIONINSIGHTS_CONNECTION_STRING +# environment variable. app = Flask(__name__) middleware = FlaskMiddleware( app, diff --git a/contrib/opencensus-ext-azure/examples/traces/simple.py b/contrib/opencensus-ext-azure/examples/traces/simple.py index 49c69d6e1..b0008f464 100644 --- a/contrib/opencensus-ext-azure/examples/traces/simple.py +++ b/contrib/opencensus-ext-azure/examples/traces/simple.py @@ -16,8 +16,9 @@ from opencensus.trace.samplers import ProbabilitySampler from opencensus.trace.tracer import Tracer -# TODO: you need to specify the instrumentation key in the -# APPINSIGHTS_INSTRUMENTATIONKEY environment variable. +# TODO: you need to specify the instrumentation key in a connection string +# and place it in the APPLICATIONINSIGHTS_CONNECTION_STRING +# environment variable. tracer = Tracer(exporter=AzureExporter(), sampler=ProbabilitySampler(1.0)) with tracer.span(name='foo'): diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py index 15687db17..754e2a27c 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py @@ -17,14 +17,81 @@ from opencensus.ext.azure.common.protocol import BaseObject +INGESTION_ENDPOINT = 'ingestionendpoint' +INSTRUMENTATION_KEY = 'instrumentationkey' + + +def process_options(options): + code_cs = parse_connection_string(options.connection_string) + code_ikey = options.instrumentation_key + env_cs = parse_connection_string( + os.getenv('APPLICATIONINSIGHTS_CONNECTION_STRING')) + env_ikey = os.getenv('APPINSIGHTS_INSTRUMENTATIONKEY') + + # The priority of which value takes on the instrumentation key is: + # 1. Key from explicitly passed in connection string + # 2. Key from explicitly passed in instrumentation key + # 3. Key from connection string in environment variable + # 4. Key from instrumentation key in environment variable + options.instrumentation_key = code_cs.get(INSTRUMENTATION_KEY) \ + or code_ikey \ + or env_cs.get(INSTRUMENTATION_KEY) \ + or env_ikey + # The priority of the ingestion endpoint is as follows: + # 1. The endpoint explicitly passed in connection string + # 2. The endpoint from the connection string in environment variable + # 3. The default breeze endpoint + endpoint = code_cs.get(INGESTION_ENDPOINT) \ + or env_cs.get(INGESTION_ENDPOINT) \ + or 'https://dc.services.visualstudio.com' + options.endpoint = endpoint + '/v2/track' + + +def parse_connection_string(connection_string): + if connection_string is None: + return {} + try: + pairs = connection_string.split(';') + result = dict(s.split('=') for s in pairs) + # Convert keys to lower-case due to case type-insensitive checking + result = {key.lower(): value for key, value in result.items()} + except Exception: + raise ValueError('Invalid connection string') + # Validate authorization + auth = result.get('authorization') + if auth is not None and auth.lower() != 'ikey': + raise ValueError('Invalid authorization mechanism') + # Construct the ingestion endpoint if not passed in explicitly + if result.get(INGESTION_ENDPOINT) is None: + endpoint_suffix = '' + location_prefix = '' + suffix = result.get('endpointsuffix') + if suffix is not None: + endpoint_suffix = suffix + # Get regional information if provided + prefix = result.get('location') + if prefix is not None: + location_prefix = prefix + '.' + endpoint = 'https://' + location_prefix + 'dc.' + endpoint_suffix + result[INGESTION_ENDPOINT] = endpoint + else: + # Default to None if cannot construct + result[INGESTION_ENDPOINT] = None + return result + class Options(BaseObject): + def __init__(self, *args, **kwargs): + super(Options, self).__init__(*args, **kwargs) + process_options(self) + _default = BaseObject( + connection_string=None, enable_standard_metrics=True, endpoint='https://dc.services.visualstudio.com/v2/track', export_interval=15.0, grace_period=5.0, - instrumentation_key=os.getenv('APPINSIGHTS_INSTRUMENTATIONKEY', None), + instrumentation_key=None, max_batch_size=100, minimum_retry_interval=60, # minimum retry interval in seconds proxy=None, diff --git a/contrib/opencensus-ext-azure/tests/test_options.py b/contrib/opencensus-ext-azure/tests/test_options.py new file mode 100644 index 000000000..5c16d9c6c --- /dev/null +++ b/contrib/opencensus-ext-azure/tests/test_options.py @@ -0,0 +1,138 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import unittest + +from opencensus.ext.azure import common + + +class TestOptions(unittest.TestCase): + def setUp(self): + os.environ.clear() + + def test_process_options_ikey_code_cs(self): + options = common.Options() + options.connection_string = 'Authorization=ikey;InstrumentationKey=123' + options.instrumentation_key = '456' + os.environ['APPLICATIONINSIGHTS_CONNECTION_STRING'] = \ + 'Authorization=ikey;InstrumentationKey=789' + os.environ['APPINSIGHTS_INSTRUMENTATIONKEY'] = '101112' + common.process_options(options) + + self.assertEqual(options.instrumentation_key, '123') + + def test_process_options_ikey_code_ikey(self): + options = common.Options() + options.connection_string = None + options.instrumentation_key = '456' + os.environ['APPLICATIONINSIGHTS_CONNECTION_STRING'] = \ + 'Authorization=ikey;InstrumentationKey=789' + os.environ['APPINSIGHTS_INSTRUMENTATIONKEY'] = '101112' + common.process_options(options) + + self.assertEqual(options.instrumentation_key, '456') + + def test_process_options_ikey_env_cs(self): + options = common.Options() + options.connection_string = None + options.instrumentation_key = None + os.environ['APPLICATIONINSIGHTS_CONNECTION_STRING'] = \ + 'Authorization=ikey;InstrumentationKey=789' + os.environ['APPINSIGHTS_INSTRUMENTATIONKEY'] = '101112' + common.process_options(options) + + self.assertEqual(options.instrumentation_key, '789') + + def test_process_options_ikey_env_ikey(self): + options = common.Options() + options.connection_string = None + options.instrumentation_key = None + os.environ['APPINSIGHTS_INSTRUMENTATIONKEY'] = '101112' + common.process_options(options) + + self.assertEqual(options.instrumentation_key, '101112') + + def test_process_options_endpoint_code_cs(self): + options = common.Options() + options.connection_string = 'Authorization=ikey;IngestionEndpoint=123' + os.environ['APPLICATIONINSIGHTS_CONNECTION_STRING'] = \ + 'Authorization=ikey;IngestionEndpoint=456' + common.process_options(options) + + self.assertEqual(options.endpoint, '123/v2/track') + + def test_process_options_endpoint_env_cs(self): + options = common.Options() + options.connection_string = None + os.environ['APPLICATIONINSIGHTS_CONNECTION_STRING'] = \ + 'Authorization=ikey;IngestionEndpoint=456' + common.process_options(options) + + self.assertEqual(options.endpoint, '456/v2/track') + + def test_process_options_endpoint_default(self): + options = common.Options() + options.connection_string = None + common.process_options(options) + + self.assertEqual(options.endpoint, + 'https://dc.services.visualstudio.com/v2/track') + + def test_parse_connection_string_none(self): + cs = None + result = common.parse_connection_string(cs) + + self.assertEqual(result, {}) + + def test_parse_connection_string_invalid(self): + cs = 'asd' + self.assertRaises(ValueError, + lambda: common.parse_connection_string(cs)) + + def test_parse_connection_string_default_auth(self): + cs = 'InstrumentationKey=123' + result = common.parse_connection_string(cs) + self.assertEqual(result['instrumentationkey'], '123') + + def test_parse_connection_string_invalid_auth(self): + cs = 'Authorization=asd' + self.assertRaises(ValueError, + lambda: common.parse_connection_string(cs)) + + def test_parse_connection_string_explicit_endpoint(self): + cs = 'Authorization=ikey;IngestionEndpoint=123;' \ + 'Location=us;EndpointSuffix=suffix' + result = common.parse_connection_string(cs) + + self.assertEqual(result['ingestionendpoint'], '123') + + def test_parse_connection_string_default(self): + cs = 'Authorization=ikey;Location=us' + result = common.parse_connection_string(cs) + + self.assertEqual(result['ingestionendpoint'], + None) + + def test_parse_connection_string_no_location(self): + cs = 'Authorization=ikey;EndpointSuffix=suffix' + result = common.parse_connection_string(cs) + + self.assertEqual(result['ingestionendpoint'], 'https://dc.suffix') + + def test_parse_connection_string_location(self): + cs = 'Authorization=ikey;EndpointSuffix=suffix;Location=us' + result = common.parse_connection_string(cs) + + self.assertEqual(result['ingestionendpoint'], 'https://us.dc.suffix') From e5e752ceab3371ec4b78cec23a717168e2ed9372 Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Tue, 1 Oct 2019 15:41:07 -0700 Subject: [PATCH 20/79] Update CHANGELOGs, bump Azure version for releases (#794) --- CHANGELOG.md | 24 +++++++++++++++++-- contrib/opencensus-ext-azure/CHANGELOG.md | 12 ++++++++-- .../opencensus/ext/azure/common/version.py | 2 +- contrib/opencensus-ext-azure/setup.py | 6 ++--- contrib/opencensus-ext-django/CHANGELOG.md | 10 ++++++++ contrib/opencensus-ext-flask/CHANGELOG.md | 10 ++++++++ contrib/opencensus-ext-httplib/CHANGELOG.md | 4 ++++ contrib/opencensus-ext-pyramid/CHANGELOG.md | 4 ++++ contrib/opencensus-ext-requests/CHANGELOG.md | 4 ++++ .../opencensus-ext-stackdriver/CHANGELOG.md | 8 +++++++ 10 files changed, 76 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 89197e7b5..bb78b55d3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,30 @@ # Changelog ## Unreleased -- Updated `django`, `flask`, `httplib`, `requests` and `pyramid` modules - ([#755](https://github.com/census-instrumentation/opencensus-python/pull/755)) + +## 0.7.5 +Released 2019-10-01 + +- Updated `flask` module + ([#781](https://github.com/census-instrumentation/opencensus-python/pull/781)) + +## 0.7.4 +Released 2019-09-30 + +- Updated `azure` module + ([#773](https://github.com/census-instrumentation/opencensus-python/pull/773), + [#767](https://github.com/census-instrumentation/opencensus-python/pull/767)) + +- Updated `django` module + ([#775](https://github.com/census-instrumentation/opencensus-python/pull/775)) + +## 0.7.3 +Released 2019-08-26 + - Added `http code` to `grpc code` status code mapping on `utils` ([#746](https://github.com/census-instrumentation/opencensus-python/pull/746)) +- Updated `django`, `flask`, `httplib`, `requests` and `pyramid` modules + ([#755](https://github.com/census-instrumentation/opencensus-python/pull/755)) - Updated `requests` module ([#771](https://github.com/census-instrumentation/opencensus-python/pull/771)) diff --git a/contrib/opencensus-ext-azure/CHANGELOG.md b/contrib/opencensus-ext-azure/CHANGELOG.md index 1a5688f47..7731e418b 100644 --- a/contrib/opencensus-ext-azure/CHANGELOG.md +++ b/contrib/opencensus-ext-azure/CHANGELOG.md @@ -1,12 +1,20 @@ # Changelog ## Unreleased + +## 1.0.0 +Released 2019-09-30 + +- Standard Metrics - Incoming requests execution time + ([#773](https://github.com/census-instrumentation/opencensus-python/pull/773)) - Implement connection strings ([#767](https://github.com/census-instrumentation/opencensus-python/pull/767)) + +## 0.7.1 +Released 2019-08-26 + - Standard metrics incoming requests per second ([#758](https://github.com/census-instrumentation/opencensus-python/pull/758)) -- Standard metrics incoming requests average execution rate - ([#773](https://github.com/census-instrumentation/opencensus-python/pull/773)) ## 0.7.0 Released 2019-07-31 diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/version.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/version.py index dffc606db..71eb269bd 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/version.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.8.dev0' +__version__ = '1.0.dev0' diff --git a/contrib/opencensus-ext-azure/setup.py b/contrib/opencensus-ext-azure/setup.py index 3b5976bcd..a5efc1676 100644 --- a/contrib/opencensus-ext-azure/setup.py +++ b/contrib/opencensus-ext-azure/setup.py @@ -22,9 +22,9 @@ author='OpenCensus Authors', author_email='census-developers@googlegroups.com', classifiers=[ - 'Intended Audience :: Developers', - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Developers', + 'Intended Audience :: End Users/Desktop', + 'Development Status :: 5 - Production/Stable', + 'Intended Audience :: End Users/Desktop', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python', 'Programming Language :: Python :: 2', diff --git a/contrib/opencensus-ext-django/CHANGELOG.md b/contrib/opencensus-ext-django/CHANGELOG.md index 284104707..79d6d9a43 100644 --- a/contrib/opencensus-ext-django/CHANGELOG.md +++ b/contrib/opencensus-ext-django/CHANGELOG.md @@ -1,6 +1,16 @@ # Changelog ## Unreleased + +## 0.7.2 +Released 2019-09-30 + +- Use Django 2.0 DB instrumentation +([#775](https://github.com/census-instrumentation/opencensus-python/pull/775)) + +## 0.7.1 +Released 2019-08-26 + - Updated `http.status_code` attribute to be an int. ([#755](https://github.com/census-instrumentation/opencensus-python/pull/755)) diff --git a/contrib/opencensus-ext-flask/CHANGELOG.md b/contrib/opencensus-ext-flask/CHANGELOG.md index 549e8c993..2b8246fd3 100644 --- a/contrib/opencensus-ext-flask/CHANGELOG.md +++ b/contrib/opencensus-ext-flask/CHANGELOG.md @@ -1,6 +1,16 @@ # Changelog ## Unreleased + +## 0.7.3 +Released 2019-10-01 + +- Check that `url_rule` is not `None` before dereferencing property. + ([#781](https://github.com/census-instrumentation/opencensus-python/pull/781)) + +## 0.7.2 +Released 2019-08-26 + - Updated `http.status_code` attribute to be an int. ([#755](https://github.com/census-instrumentation/opencensus-python/pull/755)) diff --git a/contrib/opencensus-ext-httplib/CHANGELOG.md b/contrib/opencensus-ext-httplib/CHANGELOG.md index 2bd0acffd..d2241c7af 100644 --- a/contrib/opencensus-ext-httplib/CHANGELOG.md +++ b/contrib/opencensus-ext-httplib/CHANGELOG.md @@ -1,6 +1,10 @@ # Changelog ## Unreleased + +## 0.7.2 +Released 2019-08-26 + - Updated `http.status_code` attribute to be an int. ([#755](https://github.com/census-instrumentation/opencensus-python/pull/755)) diff --git a/contrib/opencensus-ext-pyramid/CHANGELOG.md b/contrib/opencensus-ext-pyramid/CHANGELOG.md index d7d68ed91..fd4831563 100644 --- a/contrib/opencensus-ext-pyramid/CHANGELOG.md +++ b/contrib/opencensus-ext-pyramid/CHANGELOG.md @@ -1,6 +1,10 @@ # Changelog ## Unreleased + +## 0.7.1 +Released 2019-08-26 + - Updated `http.status_code` attribute to be an int. ([#755](https://github.com/census-instrumentation/opencensus-python/pull/755)) diff --git a/contrib/opencensus-ext-requests/CHANGELOG.md b/contrib/opencensus-ext-requests/CHANGELOG.md index 9ffe6c8a9..cacdd28a3 100644 --- a/contrib/opencensus-ext-requests/CHANGELOG.md +++ b/contrib/opencensus-ext-requests/CHANGELOG.md @@ -1,6 +1,10 @@ # Changelog ## Unreleased + +## 0.7.2 +Released 2019-08-26 + - Added attributes following specs listed [here](https://github.com/census-instrumentation/opencensus-specs/blob/master/trace/HTTP.md#attributes) ([#746](https://github.com/census-instrumentation/opencensus-python/pull/746)) - Fixed span name diff --git a/contrib/opencensus-ext-stackdriver/CHANGELOG.md b/contrib/opencensus-ext-stackdriver/CHANGELOG.md index f5ae8e0e1..a3b8b4c7b 100644 --- a/contrib/opencensus-ext-stackdriver/CHANGELOG.md +++ b/contrib/opencensus-ext-stackdriver/CHANGELOG.md @@ -2,6 +2,14 @@ ## Unreleased +## 0.7.2 +Released 2019-08-26 + + - Delete SD integ test metric descriptors + ([#770](https://github.com/census-instrumentation/opencensus-python/pull/770)) + - Updated `http.status_code` attribute to be an int. + ([#755](https://github.com/census-instrumentation/opencensus-python/pull/755)) + ## 0.7.1 Released 2019-08-05 From 3f01007a05dcae84cc912a28588d3074fff4f587 Mon Sep 17 00:00:00 2001 From: Nathan Button Date: Thu, 3 Oct 2019 21:10:50 -0700 Subject: [PATCH 21/79] Trace exporter for Datadog (#793) --- README.rst | 2 + contrib/opencensus-ext-datadog/CHANGELOG.md | 4 + .../examples/datadog.py | 23 ++ .../opencensus/__init__.py | 1 + .../opencensus/ext/__init__.py | 1 + .../opencensus/ext/datadog/__init__.py | 1 + .../opencensus/ext/datadog/traces.py | 373 ++++++++++++++++++ .../opencensus/ext/datadog/transport.py | 45 +++ contrib/opencensus-ext-datadog/setup.cfg | 2 + contrib/opencensus-ext-datadog/setup.py | 54 +++ .../tests/traces_test.py | 353 +++++++++++++++++ .../tests/transport_test.py | 15 + contrib/opencensus-ext-datadog/version.py | 15 + noxfile.py | 1 + tox.ini | 1 + 15 files changed, 891 insertions(+) create mode 100644 contrib/opencensus-ext-datadog/CHANGELOG.md create mode 100644 contrib/opencensus-ext-datadog/examples/datadog.py create mode 100644 contrib/opencensus-ext-datadog/opencensus/__init__.py create mode 100644 contrib/opencensus-ext-datadog/opencensus/ext/__init__.py create mode 100644 contrib/opencensus-ext-datadog/opencensus/ext/datadog/__init__.py create mode 100644 contrib/opencensus-ext-datadog/opencensus/ext/datadog/traces.py create mode 100644 contrib/opencensus-ext-datadog/opencensus/ext/datadog/transport.py create mode 100644 contrib/opencensus-ext-datadog/setup.cfg create mode 100644 contrib/opencensus-ext-datadog/setup.py create mode 100644 contrib/opencensus-ext-datadog/tests/traces_test.py create mode 100644 contrib/opencensus-ext-datadog/tests/transport_test.py create mode 100644 contrib/opencensus-ext-datadog/version.py diff --git a/README.rst b/README.rst index 87ddf495f..e056d4c4d 100644 --- a/README.rst +++ b/README.rst @@ -226,12 +226,14 @@ Trace Exporter -------------- - `Azure`_ +- `Datadog`_ - `Jaeger`_ - `OCAgent`_ - `Stackdriver`_ - `Zipkin`_ .. _Azure: https://github.com/census-instrumentation/opencensus-python/tree/master/contrib/opencensus-ext-azure +.. _Datadog: https://github.com/census-instrumentation/opencensus-python/tree/master/contrib/opencensus-ext-datadog .. _Django: https://github.com/census-instrumentation/opencensus-python/tree/master/contrib/opencensus-ext-django .. _Flask: https://github.com/census-instrumentation/opencensus-python/tree/master/contrib/opencensus-ext-flask .. _gevent: https://github.com/census-instrumentation/opencensus-python/tree/master/contrib/opencensus-ext-gevent diff --git a/contrib/opencensus-ext-datadog/CHANGELOG.md b/contrib/opencensus-ext-datadog/CHANGELOG.md new file mode 100644 index 000000000..b2705a57e --- /dev/null +++ b/contrib/opencensus-ext-datadog/CHANGELOG.md @@ -0,0 +1,4 @@ +# Changelog + +## Unreleased +- Initial version. diff --git a/contrib/opencensus-ext-datadog/examples/datadog.py b/contrib/opencensus-ext-datadog/examples/datadog.py new file mode 100644 index 000000000..7e1a6e0d0 --- /dev/null +++ b/contrib/opencensus-ext-datadog/examples/datadog.py @@ -0,0 +1,23 @@ +from flask import Flask + +from opencensus.ext.flask.flask_middleware import FlaskMiddleware +from opencensus.trace.samplers import AlwaysOnSampler +from traces import DatadogTraceExporter +from traces import Options + +app = Flask(__name__) +middleware = FlaskMiddleware(app, + blacklist_paths=['/healthz'], + sampler=AlwaysOnSampler(), + exporter=DatadogTraceExporter( + Options(service='python-export-test', + global_tags={"stack": "example"}))) + + +@app.route('/') +def hello(): + return 'Hello World!' + + +if __name__ == '__main__': + app.run(host='0.0.0.0', port=8080, threaded=True) diff --git a/contrib/opencensus-ext-datadog/opencensus/__init__.py b/contrib/opencensus-ext-datadog/opencensus/__init__.py new file mode 100644 index 000000000..69e3be50d --- /dev/null +++ b/contrib/opencensus-ext-datadog/opencensus/__init__.py @@ -0,0 +1 @@ +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/contrib/opencensus-ext-datadog/opencensus/ext/__init__.py b/contrib/opencensus-ext-datadog/opencensus/ext/__init__.py new file mode 100644 index 000000000..69e3be50d --- /dev/null +++ b/contrib/opencensus-ext-datadog/opencensus/ext/__init__.py @@ -0,0 +1 @@ +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/contrib/opencensus-ext-datadog/opencensus/ext/datadog/__init__.py b/contrib/opencensus-ext-datadog/opencensus/ext/datadog/__init__.py new file mode 100644 index 000000000..69e3be50d --- /dev/null +++ b/contrib/opencensus-ext-datadog/opencensus/ext/datadog/__init__.py @@ -0,0 +1 @@ +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/contrib/opencensus-ext-datadog/opencensus/ext/datadog/traces.py b/contrib/opencensus-ext-datadog/opencensus/ext/datadog/traces.py new file mode 100644 index 000000000..ebeee9b84 --- /dev/null +++ b/contrib/opencensus-ext-datadog/opencensus/ext/datadog/traces.py @@ -0,0 +1,373 @@ +# Copyright 2018, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import codecs +from collections import defaultdict +from datetime import datetime + +import bitarray + +from opencensus.common.transports import sync +from opencensus.common.utils import ISO_DATETIME_REGEX +from opencensus.ext.datadog.transport import DDTransport +from opencensus.trace import base_exporter +from opencensus.trace import span_data + + +class Options(object): + """ Options contains options for configuring the exporter. + The address can be empty as the prometheus client will + assume it's localhost + + :type namespace: str + :param namespace: Namespace specifies the namespaces to which metric keys + are appended. Defaults to ''. + + :type service: str + :param service: service specifies the service name used for tracing. + + :type trace_addr: str + :param trace_addr: trace_addr specifies the host[:port] address of the + Datadog Trace Agent. It defaults to localhost:8126 + + :type global_tags: dict + :param global_tags: global_tags is a set of tags that will be + applied to all exported spans. + """ + def __init__(self, service='', trace_addr='localhost:8126', + global_tags={}): + self._service = service + self._trace_addr = trace_addr + for k, v in global_tags.items(): + if not isinstance(k, str) or not isinstance(v, str): + raise TypeError( + "global tags must be dictionary of string string") + self._global_tags = global_tags + + @property + def trace_addr(self): + """ specifies the host[:port] address of the Datadog Trace Agent. + """ + return self._trace_addr + + @property + def service(self): + """ Specifies the service name used for tracing. + """ + return self._service + + @property + def global_tags(self): + """ Specifies the namespaces to which metric keys are appended + """ + return self._global_tags + + +class DatadogTraceExporter(base_exporter.Exporter): + """ A exporter that send traces and trace spans to Datadog. + + :type options: :class:`~opencensus.ext.datadog.Options` + :param options: An options object with the parameters to instantiate the + Datadog Exporter. + + :type transport: + :class:`opencensus.common.transports.sync.SyncTransport` or + :class:`opencensus.common.transports.async_.AsyncTransport` + :param transport: An instance of a Transport to send data with. + """ + def __init__(self, options, transport=sync.SyncTransport): + self._options = options + self._transport = transport(self) + self._dd_transport = DDTransport(options.trace_addr) + + @property + def transport(self): + """ The transport way to be sent data to server + (default is sync). + """ + return self._transport + + @property + def options(self): + """ Options to be used to configure the exporter + """ + return self._options + + def export(self, span_datas): + """ + :type span_datas: list of :class: + `~opencensus.trace.span_data.SpanData` + :param list of opencensus.trace.span_data.SpanData span_datas: + SpanData tuples to export + """ + if span_datas is not None: # pragma: NO COVER + self.transport.export(span_datas) + + def emit(self, span_datas): + """ + :type span_datas: list of :class: + `~opencensus.trace.span_data.SpanData` + :param list of opencensus.trace.span_data.SpanData span_datas: + SpanData tuples to emit + """ + # Map each span data to it's corresponding trace id + trace_span_map = defaultdict(list) + for sd in span_datas: + trace_span_map[sd.context.trace_id] += [sd] + + dd_spans = [] + # Write spans to Datadog + for _, sds in trace_span_map.items(): + # convert to the legacy trace json for easier refactoring + trace = span_data.format_legacy_trace_json(sds) + dd_spans.append(self.translate_to_datadog(trace)) + + self._dd_transport.send_traces(dd_spans) + + def translate_to_datadog(self, trace): + """Translate the spans json to Datadog format. + + :type trace: dict + :param trace: Trace dictionary + + :rtype: dict + :returns: Spans in Datadog Trace format. + """ + + spans_json = trace.get('spans') + trace_id = convert_id(trace.get('traceId')[8:]) + dd_trace = [] + for span in spans_json: + span_id_int = convert_id(span.get('spanId')) + # Set meta at the end. + meta = self.options.global_tags.copy() + + dd_span = { + 'span_id': span_id_int, + 'trace_id': trace_id, + 'name': "opencensus", + 'service': self.options.service, + 'resource': span.get("displayName").get("value"), + } + + start_time = datetime.strptime(span.get('startTime'), + ISO_DATETIME_REGEX) + + # The start time of the request in nanoseconds from the unix epoch. + epoch = datetime.utcfromtimestamp(0) + dd_span["start"] = int((start_time - epoch).total_seconds() * + 1000.0 * 1000.0 * 1000.0) + + end_time = datetime.strptime(span.get('endTime'), + ISO_DATETIME_REGEX) + duration_td = end_time - start_time + + # The duration of the request in nanoseconds. + dd_span["duration"] = int(duration_td.total_seconds() * 1000.0 * + 1000.0 * 1000.0) + + if span.get('parentSpanId') is not None: + parent_span_id = convert_id(span.get('parentSpanId')) + dd_span['parent_id'] = parent_span_id + + code = STATUS_CODES.get(span["status"].get("code")) + if code is None: + code = {} + code["message"] = "ERR_CODE_" + str(span["status"].get("code")) + code["status"] = 500 + + # opencensus.trace.span.SpanKind + dd_span['type'] = to_dd_type(span.get("kind")) + dd_span["error"] = 0 + if 4 <= code.get("status") // 100 <= 5: + dd_span["error"] = 1 + meta["error.type"] = code.get("message") + + if span.get("status").get("message") is not None: + meta["error.msg"] = span.get("status").get("message") + + meta["opencensus.status_code"] = str(code.get("status")) + meta["opencensus.status"] = code.get("message") + + if span.get("status").get("message") is not None: + meta["opencensus.status_description"] = span.get("status").get( + "message") + + atts = span.get("attributes").get("attributeMap") + atts_to_metadata(atts, meta=meta) + + dd_span["meta"] = meta + dd_trace.append(dd_span) + + return dd_trace + + +def atts_to_metadata(atts, meta={}): + """Translate the attributes to Datadog meta format. + + :type atts: dict + :param atts: Attributes dictionary + + :rtype: dict + :returns: meta dictionary + """ + for key, elem in atts.items(): + value = value_from_atts_elem(elem) + if value != "": + meta[key] = value + + return meta + + +def value_from_atts_elem(elem): + """ value_from_atts_elem takes an attribute element and retuns a string value + + :type elem: dict + :param elem: Element from the attributes map + + :rtype: str + :return: A string rep of the element value + """ + if elem.get('string_value') is not None: + return elem.get('string_value').get('value') + elif elem.get('int_value') is not None: + return str(elem.get('int_value')) + elif elem.get('bool_value') is not None: + return str(elem.get('bool_value')) + elif elem.get('double_value') is not None: + return str(elem.get('double_value').get('value')) + return "" + + +def to_dd_type(oc_kind): + """ to_dd_type takes an OC kind int ID and returns a dd string of the span type + + :type oc_kind: int + :param oc_kind: OC kind id + + :rtype: string + :returns: A string of the Span type. + """ + if oc_kind == 2: + return "client" + elif oc_kind == 1: + return "server" + else: + return "unspecified" + + +def new_trace_exporter(option): + """ new_trace_exporter returns an exporter + that exports traces to Datadog. + """ + if option.service == "": + raise ValueError("Service can not be empty string.") + + exporter = DatadogTraceExporter(options=option) + return exporter + + +def convert_id(str_id): + """ convert_id takes a string and converts that to an int that is no + more than 64 bits wide. It does this by first converting the string + to a bit array then taking up to the 64th bit and creating and int. + This is equivlent to the go-exporter ID converter + ` binary.BigEndian.Uint64(s.SpanContext.SpanID[:])` + + :type str_id: str + :param str_id: string id + + :rtype: int + :returns: An int that is no more than 64 bits wide + """ + id_bitarray = bitarray.bitarray(endian='big') + id_bitarray.frombytes(str_id.encode()) + cut_off = len(id_bitarray) + if cut_off > 64: + cut_off = 64 + id_cutoff_bytearray = id_bitarray[:cut_off].tobytes() + id_int = int(codecs.encode(id_cutoff_bytearray, 'hex'), 16) + return id_int + + +# https://opencensus.io/tracing/span/status/ +STATUS_CODES = { + 0: { + "message": "OK", + "status": 200 + }, + 1: { + "message": "CANCELLED", + "status": 499 + }, + 2: { + "message": "UNKNOWN", + "status": 500 + }, + 3: { + "message": "INVALID_ARGUMENT", + "status": 400 + }, + 4: { + "message": "DEADLINE_EXCEEDED", + "status": 504 + }, + 5: { + "message": "NOT_FOUND", + "status": 404 + }, + 6: { + "message": "ALREADY_EXISTS", + "status": 409 + }, + 7: { + "message": "PERMISSION_DENIED", + "status": 403 + }, + 8: { + "message": "RESOURCE_EXHAUSTED", + "status": 429 + }, + 9: { + "message": "FAILED_PRECONDITION", + "status": 400 + }, + 10: { + "message": "ABORTED", + "status": 409 + }, + 11: { + "message": "OUT_OF_RANGE", + "status": 400 + }, + 12: { + "message": "UNIMPLEMENTED", + "status": 502 + }, + 13: { + "message": "INTERNAL", + "status": 500 + }, + 14: { + "message": "UNAVAILABLE", + "status": 503 + }, + 15: { + "message": "DATA_LOSS", + "status": 501 + }, + 16: { + "message": "UNAUTHENTICATED", + "status": 401 + }, +} diff --git a/contrib/opencensus-ext-datadog/opencensus/ext/datadog/transport.py b/contrib/opencensus-ext-datadog/opencensus/ext/datadog/transport.py new file mode 100644 index 000000000..72a598a77 --- /dev/null +++ b/contrib/opencensus-ext-datadog/opencensus/ext/datadog/transport.py @@ -0,0 +1,45 @@ +import platform +import requests + + +class DDTransport(object): + """ DDTransport contains all the logic for sending Traces to Datadog + + :type trace_addr: str + :param trace_addr: trace_addr specifies the host[:port] address of the + Datadog Trace Agent. + """ + def __init__(self, trace_addr): + self._trace_addr = trace_addr + + self._headers = { + "Datadog-Meta-Lang": "python", + "Datadog-Meta-Lang-Interpreter": platform.platform(), + # Following the example of the Golang version it is prefixed + # OC for Opencensus. + "Datadog-Meta-Tracer-Version": "OC/0.0.1", + "Content-Type": "application/json", + } + + @property + def trace_addr(self): + """ specifies the host[:port] address of the Datadog Trace Agent. + """ + return self._trace_addr + + @property + def headers(self): + """ specifies the headers that will be attached to HTTP request sent to DD. + """ + return self._headers + + def send_traces(self, trace): + """ Sends traces to the Datadog Tracing Agent + + :type trace: dic + :param trace: Trace dictionary + """ + + requests.post("http://" + self.trace_addr + "/v0.4/traces", + json=trace, + headers=self.headers) diff --git a/contrib/opencensus-ext-datadog/setup.cfg b/contrib/opencensus-ext-datadog/setup.cfg new file mode 100644 index 000000000..2a9acf13d --- /dev/null +++ b/contrib/opencensus-ext-datadog/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal = 1 diff --git a/contrib/opencensus-ext-datadog/setup.py b/contrib/opencensus-ext-datadog/setup.py new file mode 100644 index 000000000..48aa39367 --- /dev/null +++ b/contrib/opencensus-ext-datadog/setup.py @@ -0,0 +1,54 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from setuptools import find_packages +from setuptools import setup +from version import __version__ + +setup( + name='opencensus-ext-datadog', + version=__version__, # noqa + author='OpenCensus Authors', + author_email='census-developers@googlegroups.com', + classifiers=[ + 'Intended Audience :: Developers', + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: Apache Software License', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + ], + description='OpenCensus Datadog exporter', + include_package_data=True, + install_requires=[ + 'bitarray >= 1.0.1, < 2.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', + 'requests >= 2.19.0', + ], + extras_require={}, + license='Apache-2.0', + packages=find_packages(exclude=( + 'examples', + 'tests', + )), + namespace_packages=[], + url='https://github.com/census-instrumentation/opencensus-python/tree/master/contrib/opencensus-ext-datadog', # noqa: E501 + zip_safe=False, +) diff --git a/contrib/opencensus-ext-datadog/tests/traces_test.py b/contrib/opencensus-ext-datadog/tests/traces_test.py new file mode 100644 index 000000000..46f7e757b --- /dev/null +++ b/contrib/opencensus-ext-datadog/tests/traces_test.py @@ -0,0 +1,353 @@ +import unittest + +import mock + +from opencensus.ext.datadog.traces import (convert_id, to_dd_type, + value_from_atts_elem, + atts_to_metadata, + new_trace_exporter, + DatadogTraceExporter, Options) +from opencensus.trace import span_data as span_data_module +from opencensus.trace import span_context + + +class TestTraces(unittest.TestCase): + def setUp(self): + pass + + def test_convert_id(self): + test_cases = [{ + 'input': 'd17b83f89a2cbb08c2fa4469', + 'expected': 0x6431376238336638, + }, { + 'input': '1ff346aeb5d12443', + 'expected': 0x3166663334366165, + }, { + 'input': '8c9b71d2ffb05ede97bea00a', + 'expected': 0x3863396237316432, + }, { + 'input': 'a3e1b9b4ce7d2e33', + 'expected': 0x6133653162396234, + }, { + 'input': '2f79a1a078c0a4d070094440', + 'expected': 0x3266373961316130, + }, { + 'input': '0018b3f50e44f875', + 'expected': 0x3030313862336635, + }, { + 'input': 'cba7b2832de221dbc1ac8e77', + 'expected': 0x6362613762323833, + }, { + 'input': 'a3e1b9b4', + 'expected': 0x6133653162396234, + }] + for tc in test_cases: + self.assertEqual(convert_id(tc['input']), tc['expected']) + + def test_to_dd_type(self): + self.assertEqual(to_dd_type(1), "server") + self.assertEqual(to_dd_type(2), "client") + self.assertEqual(to_dd_type(3), "unspecified") + + def test_value_from_atts_elem(self): + test_cases = [{ + 'elem': { + 'string_value': { + 'value': 'StringValue' + } + }, + 'expected': 'StringValue' + }, { + 'elem': { + 'int_value': 10 + }, + 'expected': '10' + }, { + 'elem': { + 'bool_value': True + }, + 'expected': 'True' + }, { + 'elem': { + 'bool_value': False + }, + 'expected': 'False' + }, { + 'elem': { + 'double_value': { + 'value': 2.1 + } + }, + 'expected': '2.1' + }, { + 'elem': { + 'somthing_les': 2.1 + }, + 'expected': '' + }] + + for tc in test_cases: + self.assertEqual(value_from_atts_elem(tc['elem']), tc['expected']) + + def test_export(self): + mock_dd_transport = mock.Mock() + exporter = DatadogTraceExporter(options=Options(), + transport=MockTransport) + exporter._dd_transport = mock_dd_transport + exporter.export({}) + self.assertTrue(exporter.transport.export_called) + + @mock.patch('opencensus.ext.datadog.traces.' + 'DatadogTraceExporter.translate_to_datadog', + return_value=None) + def test_emit(self, mr_mock): + + trace_id = '6e0c63257de34c92bf9efcd03927272e' + span_datas = [ + span_data_module.SpanData( + name='span', + context=span_context.SpanContext(trace_id=trace_id), + span_id=None, + parent_span_id=None, + attributes=None, + start_time=None, + end_time=None, + child_span_count=None, + stack_trace=None, + annotations=None, + message_events=None, + links=None, + status=None, + same_process_as_parent_span=None, + span_kind=0, + ) + ] + + mock_dd_transport = mock.Mock() + exporter = DatadogTraceExporter( + options=Options(service="dd-unit-test"), + transport=MockTransport) + exporter._dd_transport = mock_dd_transport + + exporter.emit(span_datas) + # mock_dd_transport.send_traces.assert_called_with(datadog_spans) + self.assertTrue(mock_dd_transport.send_traces.called) + + def test_translate_to_datadog(self): + test_cases = [ + { + 'status': {'code': 0}, + 'prt_span_id': '6e0c63257de34c92', + 'expt_prt_span_id': 0x3665306336333235, + 'attributes': { + 'attributeMap': { + 'key': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'value' + } + }, + 'key_double': { + 'double_value': { + 'value': 123.45 + } + }, + 'http.host': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'host' + } + } + } + }, + 'meta': { + 'key': 'value', + 'key_double': '123.45', + 'http.host': 'host', + 'opencensus.status': 'OK', + 'opencensus.status_code': '200' + }, + 'error': 0 + }, + { + 'status': {'code': 23}, + 'attributes': { + 'attributeMap': {} + }, + 'meta': { + 'error.type': 'ERR_CODE_23', + 'opencensus.status': 'ERR_CODE_23', + 'opencensus.status_code': '500' + }, + 'error': 1 + }, + { + 'status': {'code': 23, 'message': 'I_AM_A_TEAPOT'}, + 'attributes': { + 'attributeMap': {} + }, + 'meta': { + 'error.type': 'ERR_CODE_23', + 'opencensus.status': 'ERR_CODE_23', + 'opencensus.status_code': '500', + 'opencensus.status_description': 'I_AM_A_TEAPOT', + 'error.msg': 'I_AM_A_TEAPOT' + }, + 'error': 1 + }, + { + 'status': {'code': 0, 'message': 'OK'}, + 'attributes': { + 'attributeMap': {} + }, + 'meta': { + 'opencensus.status': 'OK', + 'opencensus.status_code': '200', + 'opencensus.status_description': 'OK' + }, + 'error': 0 + } + ] + trace_id = '6e0c63257de34c92bf9efcd03927272e' + expected_trace_id = 0x3764653334633932 + span_id = '6e0c63257de34c92' + expected_span_id = 0x3665306336333235 + span_name = 'test span' + start_time = '2019-09-19T14:05:15.000000Z' + start_time_epoch = 1568901915000000000 + end_time = '2019-09-19T14:05:16.000000Z' + span_duration = 1 * 1000 * 1000 * 1000 + + for tc in test_cases: + mock_dd_transport = mock.Mock() + opts = Options(service="dd-unit-test") + tran = MockTransport + exporter = DatadogTraceExporter(options=opts, transport=tran) + exporter._dd_transport = mock_dd_transport + trace = { + 'spans': [{ + 'displayName': { + 'value': span_name, + 'truncated_byte_count': 0 + }, + 'spanId': span_id, + 'startTime': start_time, + 'endTime': end_time, + 'parentSpanId': tc.get('prt_span_id'), + 'attributes': tc.get('attributes'), + 'someRandomKey': 'this should not be included in result', + 'childSpanCount': 0, + 'kind': 1, + 'status': tc.get('status') + }], + 'traceId': + trace_id, + } + + spans = list(exporter.translate_to_datadog(trace)) + expected_traces = [{ + 'span_id': expected_span_id, + 'trace_id': expected_trace_id, + 'name': 'opencensus', + 'service': 'dd-unit-test', + 'resource': span_name, + 'start': start_time_epoch, + 'duration': span_duration, + 'meta': tc.get('meta'), + 'type': 'server', + 'error': tc.get('error') + }] + + if tc.get('prt_span_id') is not None: + expected_traces[0]['parent_id'] = tc.get('expt_prt_span_id') + self.assertEqual.__self__.maxDiff = None + self.assertEqual(spans, expected_traces) + + def test_atts_to_metadata(self): + test_cases = [ + { + 'input': { + 'key_string': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'value' + } + }, + 'key_double': { + 'double_value': { + 'value': 123.45 + } + }, + }, + 'input_meta': {}, + 'output': { + 'key_string': 'value', + 'key_double': '123.45' + } + }, + { + 'input': { + 'key_string': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'value' + } + }, + }, + 'input_meta': { + 'key': 'in_meta' + }, + 'output': { + 'key_string': 'value', + 'key': 'in_meta' + } + }, + { + 'input': { + 'key_string': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'value' + } + }, + 'invalid': { + 'unknown_value': "na" + } + }, + 'input_meta': {}, + 'output': { + 'key_string': 'value', + } + } + ] + + for tc in test_cases: + out = atts_to_metadata(tc.get('input'), meta=tc.get('input_meta')) + self.assertEqual(out, tc.get('output')) + + def test_new_trace_exporter(self): + self.assertRaises(ValueError, new_trace_exporter, Options()) + try: + new_trace_exporter(Options(service="test")) + except ValueError: + self.fail("new_trace_exporter raised ValueError unexpectedly") + + def test_constructure(self): + self.assertRaises(TypeError, Options, global_tags={'int_bad': 1}) + try: + Options(global_tags={'good': 'tag'}) + except TypeError: + self.fail("Constructure raised TypeError unexpectedly") + + +class MockTransport(object): + def __init__(self, exporter=None): + self.export_called = False + self.exporter = exporter + + def export(self, trace): + self.export_called = True + + +if __name__ == '__main__': + unittest.main() diff --git a/contrib/opencensus-ext-datadog/tests/transport_test.py b/contrib/opencensus-ext-datadog/tests/transport_test.py new file mode 100644 index 000000000..56cd5b318 --- /dev/null +++ b/contrib/opencensus-ext-datadog/tests/transport_test.py @@ -0,0 +1,15 @@ +import unittest + +import mock + +from opencensus.ext.datadog.transport import DDTransport + + +class TestTraces(unittest.TestCase): + def setUp(self): + pass + + @mock.patch('requests.post', return_value=None) + def test_send_traces(self, mr_mock): + transport = DDTransport('test') + transport.send_traces({}) diff --git a/contrib/opencensus-ext-datadog/version.py b/contrib/opencensus-ext-datadog/version.py new file mode 100644 index 000000000..f3a64a892 --- /dev/null +++ b/contrib/opencensus-ext-datadog/version.py @@ -0,0 +1,15 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +__version__ = '0.1.dev0' diff --git a/noxfile.py b/noxfile.py index b65df590c..e419f2f89 100644 --- a/noxfile.py +++ b/noxfile.py @@ -24,6 +24,7 @@ def _install_dev_packages(session): session.install('-e', '.') session.install('-e', 'contrib/opencensus-ext-azure') + session.install('-e', 'contrib/opencensus-ext-datadog') session.install('-e', 'contrib/opencensus-ext-dbapi') session.install('-e', 'contrib/opencensus-ext-django') session.install('-e', 'contrib/opencensus-ext-flask') diff --git a/tox.ini b/tox.ini index 845972b34..74a05863f 100644 --- a/tox.ini +++ b/tox.ini @@ -14,6 +14,7 @@ deps = py{27,34,35,36,37}-unit,py37-lint,py37-docs: -e contrib/opencensus-correlation py{27,34,35,36,37}-unit,py37-lint,py37-docs: -e . py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-azure + py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-datadog py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-dbapi py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-django py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-flask From f4d8ec36cada8b385821a16795f27fe322694a1a Mon Sep 17 00:00:00 2001 From: Dinesh Krishna Reddy Date: Tue, 8 Oct 2019 19:42:36 +0400 Subject: [PATCH 22/79] added isort support (#797) --- .isort.cfg | 17 ++++++++++ .../opencensus-context/examples/async_span.py | 1 + .../examples/explicit_threading.py | 1 + .../examples/thread_pool.py | 5 +-- context/opencensus-context/setup.py | 4 +-- .../tests/test_runtime_context.py | 1 + .../common/correlationcontext/__init__.py | 6 ++-- contrib/opencensus-correlation/setup.py | 4 +-- .../tests/test_correlation_context.py | 1 + .../examples/metrics/standard.py | 3 +- .../examples/traces/custom.py | 1 + .../examples/traces/server.py | 2 +- .../opencensus/ext/azure/common/exporter.py | 3 +- .../opencensus/ext/azure/common/storage.py | 2 +- .../opencensus/ext/azure/common/transport.py | 1 + .../opencensus/ext/azure/common/utils.py | 8 ++--- .../ext/azure/log_exporter/__init__.py | 17 +++++----- .../ext/azure/metrics_exporter/__init__.py | 14 +++++---- .../standard_metrics/__init__.py | 31 ++++++++++--------- .../standard_metrics/http_dependency.py | 3 +- .../standard_metrics/http_requests.py | 1 + .../standard_metrics/process.py | 7 +++-- .../ext/azure/trace_exporter/__init__.py | 13 ++++---- contrib/opencensus-ext-azure/setup.py | 4 +-- .../tests/test_azure_log_exporter.py | 3 +- .../tests/test_azure_metrics_exporter.py | 21 +++++++------ .../tests/test_azure_standard_metrics.py | 6 ++-- .../tests/test_azure_trace_exporter.py | 3 +- .../tests/test_azure_utils.py | 1 + .../tests/test_protocol.py | 1 + .../tests/test_storage.py | 13 +++++--- .../examples/datadog.py | 3 +- .../opencensus/ext/datadog/traces.py | 3 +- .../opencensus/ext/datadog/transport.py | 1 + contrib/opencensus-ext-datadog/setup.py | 4 +-- .../tests/traces_test.py | 16 ++++++---- contrib/opencensus-ext-dbapi/setup.py | 4 +-- .../examples/app/urls.py | 1 - .../examples/app/views.py | 15 +++++---- .../opencensus/ext/django/middleware.py | 15 +++++---- contrib/opencensus-ext-django/setup.py | 4 +-- .../tests/test_django_middleware.py | 6 ++-- .../opencensus-ext-flask/examples/custom.py | 7 ++--- .../opencensus-ext-flask/examples/simple.py | 1 + .../opencensus/ext/flask/flask_middleware.py | 16 +++++----- contrib/opencensus-ext-flask/setup.py | 4 +-- .../tests/test_flask_middleware.py | 15 +++------ .../ext/gevent/geventcompatibility.py | 1 + contrib/opencensus-ext-gevent/setup.py | 4 +-- .../tests/test_patching.py | 4 +-- .../ext/google_cloud_clientlibs/trace.py | 10 ++---- .../setup.py | 4 +-- .../test_google_cloud_clientlibs_trace.py | 2 +- .../examples/hello_world_client.py | 3 +- .../examples/hello_world_pb2.py | 6 ++-- .../examples/hello_world_server.py | 3 +- .../opencensus/ext/grpc/client_interceptor.py | 6 ++-- .../opencensus/ext/grpc/server_interceptor.py | 6 ++-- .../opencensus/ext/grpc/utils.py | 4 +-- contrib/opencensus-ext-grpc/setup.py | 4 +-- .../tests/test_client_interceptor.py | 4 +-- .../opencensus/ext/httplib/trace.py | 3 +- contrib/opencensus-ext-httplib/setup.py | 4 +-- .../jaeger/trace_exporter/gen/jaeger/agent.py | 9 ++++-- .../trace_exporter/gen/jaeger/jaeger.py | 13 +++++--- contrib/opencensus-ext-jaeger/setup.py | 4 +-- .../tests/test_jaeger_exporter.py | 10 ++++-- contrib/opencensus-ext-logging/setup.py | 4 +-- .../opencensus/ext/mysql/trace.py | 1 + contrib/opencensus-ext-mysql/setup.py | 4 +-- .../ext/ocagent/stats_exporter/__init__.py | 12 ++++--- .../ext/ocagent/trace_exporter/__init__.py | 7 +++-- .../ext/ocagent/trace_exporter/utils.py | 1 + .../opencensus/ext/ocagent/utils/__init__.py | 1 + contrib/opencensus-ext-ocagent/setup.py | 4 +-- .../tests/test_ocagent_utils.py | 2 +- .../tests/test_stats_exporter.py | 30 ++++++++++-------- .../tests/test_trace_exporter.py | 6 ++-- .../tests/test_trace_exporter_utils.py | 3 +- .../opencensus/ext/postgresql/trace.py | 6 ++-- contrib/opencensus-ext-postgresql/setup.py | 4 +-- .../examples/prometheus.py | 2 +- .../ext/prometheus/stats_exporter/__init__.py | 18 ++++++----- contrib/opencensus-ext-prometheus/setup.py | 4 +-- .../tests/test_prometheus_stats.py | 4 +-- .../opencensus/ext/pymongo/trace.py | 4 +-- contrib/opencensus-ext-pymongo/setup.py | 4 +-- .../opencensus/ext/pymysql/trace.py | 1 + contrib/opencensus-ext-pymysql/setup.py | 4 +-- .../examples/app/__init__.py | 1 - .../opencensus-ext-pyramid/examples/simple.py | 6 +--- .../opencensus/ext/pyramid/config.py | 3 +- .../ext/pyramid/pyramid_middleware.py | 3 +- contrib/opencensus-ext-pyramid/setup.py | 4 +-- .../tests/test_pyramid_config.py | 3 +- .../tests/test_pyramid_middleware.py | 4 +-- .../opencensus/ext/requests/trace.py | 15 ++++++--- contrib/opencensus-ext-requests/setup.py | 4 +-- .../tests/test_requests_trace.py | 5 +-- .../opencensus/ext/sqlalchemy/trace.py | 4 +-- contrib/opencensus-ext-sqlalchemy/setup.py | 4 +-- .../tests/test_sqlalchemy_trace.py | 2 +- .../stackdriver/stats_exporter/__init__.py | 19 ++++++------ .../stackdriver/trace_exporter/__init__.py | 17 +++++----- contrib/opencensus-ext-stackdriver/setup.py | 4 +-- .../tests/test_stackdriver_stats.py | 21 +++++++------ .../opencensus/ext/threading/trace.py | 5 ++- contrib/opencensus-ext-threading/setup.py | 4 +-- .../tests/test_threading_trace.py | 7 +++-- .../ext/zipkin/trace_exporter/__init__.py | 3 +- contrib/opencensus-ext-zipkin/setup.py | 4 +-- .../tests/test_zipkin_exporter.py | 3 +- examples/stats/helloworld/main.py | 2 +- examples/trace/helloworld/main.py | 4 +-- noxfile.py | 3 +- opencensus/common/backports/__init__.py | 1 + .../aws_identity_doc_utils.py | 3 +- .../monitored_resource/monitored_resource.py | 9 +++--- opencensus/common/resource/__init__.py | 3 +- opencensus/common/schedule/__init__.py | 4 +-- opencensus/common/transports/async_.py | 5 ++- opencensus/log/__init__.py | 3 +- opencensus/metrics/export/cumulative.py | 3 +- opencensus/metrics/export/gauge.py | 13 +++++--- opencensus/metrics/transport.py | 1 - opencensus/stats/aggregation.py | 3 +- opencensus/stats/aggregation_data.py | 4 +-- opencensus/stats/measure_to_view_map.py | 2 +- opencensus/stats/measurement_map.py | 1 - opencensus/stats/metric_utils.py | 4 +-- opencensus/stats/stats_recorder.py | 4 +-- opencensus/stats/view_manager.py | 2 +- opencensus/tags/__init__.py | 2 +- .../tags/propagation/binary_serializer.py | 3 +- opencensus/tags/tag.py | 1 + opencensus/trace/__init__.py | 1 - opencensus/trace/exceptions_status.py | 1 + opencensus/trace/file_exporter.py | 3 +- opencensus/trace/logging_exporter.py | 3 +- opencensus/trace/propagation/b3_format.py | 2 +- .../trace_context_http_header_format.py | 5 +-- .../propagation/tracestate_string_format.py | 5 ++- opencensus/trace/span.py | 6 ++-- opencensus/trace/span_context.py | 5 +-- opencensus/trace/tracer.py | 7 ++--- opencensus/trace/tracers/context_tracer.py | 5 ++- opencensus/trace/tracers/noop_tracer.py | 4 +-- opencensus/trace/tracestate.py | 2 +- opencensus/trace/utils.py | 1 + setup.py | 3 +- .../stackdriver/stackdriver_stats_test.py | 2 +- tests/system/trace/django/app/urls.py | 1 - tests/system/trace/django/app/views.py | 13 ++++---- .../system/trace/django/django_system_test.py | 5 ++- tests/system/trace/flask/flask_system_test.py | 5 ++- .../test_aws_identity_doc_utils.py | 3 +- .../test_gcp_metadata_config.py | 3 +- .../test_monitored_resource.py | 2 +- tests/unit/common/test_http_handler.py | 5 +-- tests/unit/common/test_schedule.py | 4 +-- tests/unit/common/test_utils.py | 3 +- tests/unit/common/transports/test_sync.py | 2 ++ tests/unit/log/test_log.py | 2 +- tests/unit/metrics/export/test_cumulative.py | 4 +-- tests/unit/metrics/export/test_gauge.py | 3 +- tests/unit/metrics/export/test_metric.py | 4 +-- .../metrics/export/test_metric_descriptor.py | 3 +- tests/unit/metrics/export/test_point.py | 1 + tests/unit/metrics/export/test_summary.py | 1 + tests/unit/metrics/export/test_time_series.py | 4 +-- tests/unit/metrics/test_label_key.py | 1 + tests/unit/metrics/test_label_value.py | 1 + tests/unit/stats/test_aggregation.py | 5 +-- tests/unit/stats/test_aggregation_data.py | 2 +- tests/unit/stats/test_base_stats.py | 2 ++ tests/unit/stats/test_measure_to_view_map.py | 4 +-- tests/unit/stats/test_measurement.py | 4 +-- tests/unit/stats/test_measurement_map.py | 7 ++--- tests/unit/stats/test_metric_utils.py | 21 ++++++------- tests/unit/stats/test_stats.py | 6 ++-- tests/unit/stats/test_stats_recorder.py | 4 ++- tests/unit/stats/test_view.py | 9 +++--- tests/unit/stats/test_view_data.py | 5 +-- tests/unit/stats/test_view_manager.py | 4 ++- tests/unit/tags/test_tag.py | 1 + tests/unit/tags/test_tag_value.py | 1 + .../trace/exporters/test_logging_exporter.py | 3 +- .../unit/trace/propagation/test_b3_format.py | 3 +- tests/unit/trace/test_base_span.py | 2 ++ tests/unit/trace/test_blank_span.py | 5 +-- tests/unit/trace/test_exceptions_status.py | 1 + tests/unit/trace/test_execution_context.py | 3 +- tests/unit/trace/test_ext_utils.py | 2 +- tests/unit/trace/test_span.py | 9 ++---- tests/unit/trace/test_span_context.py | 1 + tests/unit/trace/test_span_data.py | 7 ++--- tests/unit/trace/test_time_event.py | 2 +- tests/unit/trace/test_tracer.py | 3 +- tests/unit/trace/test_tracestate.py | 5 +-- .../unit/trace/tracers/test_context_tracer.py | 3 +- tox.ini | 2 ++ 201 files changed, 534 insertions(+), 485 deletions(-) create mode 100644 .isort.cfg diff --git a/.isort.cfg b/.isort.cfg new file mode 100644 index 000000000..4f378fabf --- /dev/null +++ b/.isort.cfg @@ -0,0 +1,17 @@ +[settings] +include_trailing_comma=True +force_grid_wrap=0 +use_parentheses=True +line_length=79 + +; 3 stands for Vertical Hanging Indent, e.g. +; from third_party import ( +; lib1, +; lib2, +; lib3, +; ) +; docs: https://github.com/timothycrosley/isort#multi-line-output-modes +multi_line_output=3 +known_future_library = six,six.moves,__future__ +known_third_party=google,mock,pymysql,sqlalchemy,psycopg2,mysql,requests,django,pytest,grpc,flask,bitarray,prometheus_client,psutil,pymongo,wrapt,thrift,retrying,pyramid,werkzeug,gevent +known_first_party=opencensus \ No newline at end of file diff --git a/context/opencensus-context/examples/async_span.py b/context/opencensus-context/examples/async_span.py index a91460689..3abcc0db2 100644 --- a/context/opencensus-context/examples/async_span.py +++ b/context/opencensus-context/examples/async_span.py @@ -13,6 +13,7 @@ # limitations under the License. import asyncio + from opencensus.common.runtime_context import RuntimeContext RuntimeContext.register_slot('current_span', None) diff --git a/context/opencensus-context/examples/explicit_threading.py b/context/opencensus-context/examples/explicit_threading.py index ad6af8841..aad6a48c3 100644 --- a/context/opencensus-context/examples/explicit_threading.py +++ b/context/opencensus-context/examples/explicit_threading.py @@ -13,6 +13,7 @@ # limitations under the License. from threading import Thread + from opencensus.common.runtime_context import RuntimeContext RuntimeContext.register_slot('operation_id', '') diff --git a/context/opencensus-context/examples/thread_pool.py b/context/opencensus-context/examples/thread_pool.py index c36d5a304..0f06ea785 100644 --- a/context/opencensus-context/examples/thread_pool.py +++ b/context/opencensus-context/examples/thread_pool.py @@ -12,9 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from multiprocessing.dummy import Pool as ThreadPool -import time import threading +import time +from multiprocessing.dummy import Pool as ThreadPool + from opencensus.common.runtime_context import RuntimeContext RuntimeContext.register_slot('operation_id', '') diff --git a/context/opencensus-context/setup.py b/context/opencensus-context/setup.py index 1397ed4fe..0746bd3e2 100644 --- a/context/opencensus-context/setup.py +++ b/context/opencensus-context/setup.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup + from version import __version__ setup( diff --git a/context/opencensus-context/tests/test_runtime_context.py b/context/opencensus-context/tests/test_runtime_context.py index 795a165a5..2f26d7b21 100644 --- a/context/opencensus-context/tests/test_runtime_context.py +++ b/context/opencensus-context/tests/test_runtime_context.py @@ -13,6 +13,7 @@ # limitations under the License. import unittest + from opencensus.common.runtime_context import RuntimeContext diff --git a/contrib/opencensus-correlation/opencensus/common/correlationcontext/__init__.py b/contrib/opencensus-correlation/opencensus/common/correlationcontext/__init__.py index 145a780e0..053ce3981 100644 --- a/contrib/opencensus-correlation/opencensus/common/correlationcontext/__init__.py +++ b/contrib/opencensus-correlation/opencensus/common/correlationcontext/__init__.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from opencensus.common.correlationcontext.correlationcontext \ - import CorrelationContext - +from opencensus.common.correlationcontext.correlationcontext import ( + CorrelationContext, +) __all__ = ['CorrelationContext'] diff --git a/contrib/opencensus-correlation/setup.py b/contrib/opencensus-correlation/setup.py index 4f9271017..567a39ef4 100644 --- a/contrib/opencensus-correlation/setup.py +++ b/contrib/opencensus-correlation/setup.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup + from version import __version__ setup( diff --git a/contrib/opencensus-correlation/tests/test_correlation_context.py b/contrib/opencensus-correlation/tests/test_correlation_context.py index 98864ba1d..aa0eb0eb8 100644 --- a/contrib/opencensus-correlation/tests/test_correlation_context.py +++ b/contrib/opencensus-correlation/tests/test_correlation_context.py @@ -13,6 +13,7 @@ # limitations under the License. import unittest + from opencensus.common.correlationcontext import CorrelationContext diff --git a/contrib/opencensus-ext-azure/examples/metrics/standard.py b/contrib/opencensus-ext-azure/examples/metrics/standard.py index 3c4627765..3d726385c 100644 --- a/contrib/opencensus-ext-azure/examples/metrics/standard.py +++ b/contrib/opencensus-ext-azure/examples/metrics/standard.py @@ -12,9 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -import psutil import time +import psutil + from opencensus.ext.azure import metrics_exporter diff --git a/contrib/opencensus-ext-azure/examples/traces/custom.py b/contrib/opencensus-ext-azure/examples/traces/custom.py index 5b23b10a7..abc3497db 100644 --- a/contrib/opencensus-ext-azure/examples/traces/custom.py +++ b/contrib/opencensus-ext-azure/examples/traces/custom.py @@ -13,6 +13,7 @@ # limitations under the License. from flask import Flask + from opencensus.ext.flask.flask_middleware import FlaskMiddleware app = Flask(__name__) diff --git a/contrib/opencensus-ext-azure/examples/traces/server.py b/contrib/opencensus-ext-azure/examples/traces/server.py index 3702e7ef3..0b4831d5b 100644 --- a/contrib/opencensus-ext-azure/examples/traces/server.py +++ b/contrib/opencensus-ext-azure/examples/traces/server.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from flask import Flask import requests +from flask import Flask from opencensus.ext.azure.trace_exporter import AzureExporter from opencensus.ext.flask.flask_middleware import FlaskMiddleware diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/exporter.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/exporter.py index b6eaed7e6..969568087 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/exporter.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/exporter.py @@ -16,8 +16,7 @@ import threading import time -from opencensus.common.schedule import Queue -from opencensus.common.schedule import QueueEvent +from opencensus.common.schedule import Queue, QueueEvent from opencensus.ext.azure.common import Options from opencensus.trace import execution_context diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/storage.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/storage.py index a7fce53d7..55f375fb4 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/storage.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/storage.py @@ -1,7 +1,7 @@ import datetime import json -import random import os +import random from opencensus.common.schedule import PeriodicTask diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/transport.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/transport.py index 2b2d24a57..661a57dd9 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/transport.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/transport.py @@ -14,6 +14,7 @@ import json import logging + import requests logger = logging.getLogger(__name__) diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/utils.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/utils.py index 97c6148e3..bb3550e89 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/utils.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/utils.py @@ -18,15 +18,15 @@ import platform import sys +from opencensus.common.utils import timestamp_to_microseconds, to_iso_str +from opencensus.common.version import __version__ as opencensus_version +from opencensus.ext.azure.common.version import __version__ as ext_version + try: from urllib.parse import urlparse except ImportError: from urlparse import urlparse -from opencensus.common.version import __version__ as opencensus_version -from opencensus.common.utils import timestamp_to_microseconds -from opencensus.common.utils import to_iso_str -from opencensus.ext.azure.common.version import __version__ as ext_version azure_monitor_context = { 'ai.cloud.role': os.path.basename(sys.argv[0]) or 'Python Application', diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py index 57f258e2a..eccaa9372 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py @@ -17,15 +17,14 @@ import time import traceback -from opencensus.common.schedule import Queue -from opencensus.common.schedule import QueueExitEvent -from opencensus.common.schedule import QueueEvent -from opencensus.ext.azure.common import Options -from opencensus.ext.azure.common import utils -from opencensus.ext.azure.common.protocol import Data -from opencensus.ext.azure.common.protocol import Envelope -from opencensus.ext.azure.common.protocol import ExceptionData -from opencensus.ext.azure.common.protocol import Message +from opencensus.common.schedule import Queue, QueueEvent, QueueExitEvent +from opencensus.ext.azure.common import Options, utils +from opencensus.ext.azure.common.protocol import ( + Data, + Envelope, + ExceptionData, + Message, +) from opencensus.ext.azure.common.storage import LocalFileStorage from opencensus.ext.azure.common.transport import TransportMixin from opencensus.trace import execution_context diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py index 80e1b4ba2..c37468acc 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py @@ -14,15 +14,17 @@ import json import logging + import requests from opencensus.common import utils as common_utils -from opencensus.ext.azure.common import Options -from opencensus.ext.azure.common import utils -from opencensus.ext.azure.common.protocol import Data -from opencensus.ext.azure.common.protocol import DataPoint -from opencensus.ext.azure.common.protocol import Envelope -from opencensus.ext.azure.common.protocol import MetricData +from opencensus.ext.azure.common import Options, utils +from opencensus.ext.azure.common.protocol import ( + Data, + DataPoint, + Envelope, + MetricData, +) from opencensus.ext.azure.metrics_exporter import standard_metrics from opencensus.metrics import transport from opencensus.metrics.export.metric_descriptor import MetricDescriptorType diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/__init__.py index fc6a1c235..b9ce6380e 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/__init__.py @@ -12,22 +12,25 @@ # See the License for the specific language governing permissions and # limitations under the License. +from opencensus.ext.azure.metrics_exporter.standard_metrics.cpu import ( + ProcessorTimeMetric, +) +from opencensus.ext.azure.metrics_exporter.standard_metrics.http_dependency import ( # noqa E501 + DependencyRateMetric, +) +from opencensus.ext.azure.metrics_exporter.standard_metrics.http_requests import ( # noqa E501 + RequestsAvgExecutionMetric, + RequestsRateMetric, +) +from opencensus.ext.azure.metrics_exporter.standard_metrics.memory import ( + AvailableMemoryMetric, +) +from opencensus.ext.azure.metrics_exporter.standard_metrics.process import ( + ProcessCPUMetric, + ProcessMemoryMetric, +) from opencensus.metrics.export.gauge import Registry from opencensus.metrics.export.metric_producer import MetricProducer -from opencensus.ext.azure.metrics_exporter.standard_metrics.cpu \ - import ProcessorTimeMetric -from opencensus.ext.azure.metrics_exporter.standard_metrics.http_dependency \ - import DependencyRateMetric -from opencensus.ext.azure.metrics_exporter.standard_metrics.memory \ - import AvailableMemoryMetric -from opencensus.ext.azure.metrics_exporter.standard_metrics.process \ - import ProcessCPUMetric -from opencensus.ext.azure.metrics_exporter.standard_metrics.process \ - import ProcessMemoryMetric -from opencensus.ext.azure.metrics_exporter.standard_metrics.http_requests \ - import RequestsAvgExecutionMetric -from opencensus.ext.azure.metrics_exporter.standard_metrics.http_requests \ - import RequestsRateMetric # List of standard metrics to track STANDARD_METRICS = [AvailableMemoryMetric, diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_dependency.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_dependency.py index 4a00a4f5f..0632ba2d2 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_dependency.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_dependency.py @@ -12,10 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -import requests import threading import time +import requests + from opencensus.metrics.export.gauge import DerivedDoubleGauge from opencensus.trace import execution_context diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_requests.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_requests.py index b19f77b55..629161dea 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_requests.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_requests.py @@ -17,6 +17,7 @@ import time from opencensus.metrics.export.gauge import DerivedDoubleGauge + if sys.version_info < (3,): from BaseHTTPServer import HTTPServer else: diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/process.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/process.py index 454f82f8d..f3214aba2 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/process.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/process.py @@ -13,10 +13,13 @@ # limitations under the License. import logging + import psutil -from opencensus.metrics.export.gauge import DerivedLongGauge -from opencensus.metrics.export.gauge import DerivedDoubleGauge +from opencensus.metrics.export.gauge import ( + DerivedDoubleGauge, + DerivedLongGauge, +) logger = logging.getLogger(__name__) PROCESS = psutil.Process() diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py index 6ab356880..41f2a06aa 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py @@ -15,13 +15,14 @@ import logging from opencensus.common.schedule import QueueExitEvent -from opencensus.ext.azure.common import Options -from opencensus.ext.azure.common import utils +from opencensus.ext.azure.common import Options, utils from opencensus.ext.azure.common.exporter import BaseExporter -from opencensus.ext.azure.common.protocol import Data -from opencensus.ext.azure.common.protocol import Envelope -from opencensus.ext.azure.common.protocol import RemoteDependency -from opencensus.ext.azure.common.protocol import Request +from opencensus.ext.azure.common.protocol import ( + Data, + Envelope, + RemoteDependency, + Request, +) from opencensus.ext.azure.common.storage import LocalFileStorage from opencensus.ext.azure.common.transport import TransportMixin from opencensus.trace.span import SpanKind diff --git a/contrib/opencensus-ext-azure/setup.py b/contrib/opencensus-ext-azure/setup.py index a5efc1676..d4f228478 100644 --- a/contrib/opencensus-ext-azure/setup.py +++ b/contrib/opencensus-ext-azure/setup.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup + from opencensus.ext.azure.common.version import __version__ setup( diff --git a/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py b/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py index 2707c9775..15c361cbd 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py @@ -13,11 +13,12 @@ # limitations under the License. import logging -import mock import os import shutil import unittest +import mock + from opencensus.ext.azure import log_exporter TEST_FOLDER = os.path.abspath('.test.logs') diff --git a/contrib/opencensus-ext-azure/tests/test_azure_metrics_exporter.py b/contrib/opencensus-ext-azure/tests/test_azure_metrics_exporter.py index 466f017ce..63c802e0d 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_metrics_exporter.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_metrics_exporter.py @@ -12,23 +12,24 @@ # See the License for the specific language governing permissions and # limitations under the License. -import mock import unittest from datetime import datetime +import mock + from opencensus.common import utils from opencensus.ext.azure import metrics_exporter from opencensus.ext.azure.common import Options -from opencensus.ext.azure.common.protocol import DataPoint -from opencensus.ext.azure.common.protocol import Envelope +from opencensus.ext.azure.common.protocol import DataPoint, Envelope from opencensus.ext.azure.metrics_exporter import standard_metrics -from opencensus.metrics import label_key -from opencensus.metrics import label_value -from opencensus.metrics.export import metric -from opencensus.metrics.export import metric_descriptor -from opencensus.metrics.export import point -from opencensus.metrics.export import time_series -from opencensus.metrics.export import value +from opencensus.metrics import label_key, label_value +from opencensus.metrics.export import ( + metric, + metric_descriptor, + point, + time_series, + value, +) from opencensus.metrics.export.metric_descriptor import MetricDescriptorType diff --git a/contrib/opencensus-ext-azure/tests/test_azure_standard_metrics.py b/contrib/opencensus-ext-azure/tests/test_azure_standard_metrics.py index 70aaf4e30..5cbb6e601 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_standard_metrics.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_standard_metrics.py @@ -13,13 +13,15 @@ # limitations under the License. import collections -import mock -import requests import sys import unittest +import mock +import requests + from opencensus.ext.azure.metrics_exporter import standard_metrics from opencensus.trace import execution_context + if sys.version_info < (3,): from BaseHTTPServer import HTTPServer else: diff --git a/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py b/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py index d884383f7..935a52a7e 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py @@ -13,11 +13,12 @@ # limitations under the License. import json -import mock import os import shutil import unittest +import mock + from opencensus.ext.azure import trace_exporter TEST_FOLDER = os.path.abspath('.test.exporter') diff --git a/contrib/opencensus-ext-azure/tests/test_azure_utils.py b/contrib/opencensus-ext-azure/tests/test_azure_utils.py index 79c7bfdb3..213a7fe0a 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_utils.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_utils.py @@ -13,6 +13,7 @@ # limitations under the License. import unittest + from opencensus.ext.azure.common import utils diff --git a/contrib/opencensus-ext-azure/tests/test_protocol.py b/contrib/opencensus-ext-azure/tests/test_protocol.py index 7d7a9f2cd..dcfc830e6 100644 --- a/contrib/opencensus-ext-azure/tests/test_protocol.py +++ b/contrib/opencensus-ext-azure/tests/test_protocol.py @@ -13,6 +13,7 @@ # limitations under the License. import unittest + from opencensus.ext.azure.common import protocol diff --git a/contrib/opencensus-ext-azure/tests/test_storage.py b/contrib/opencensus-ext-azure/tests/test_storage.py index 9b9b2e12b..b5776926d 100644 --- a/contrib/opencensus-ext-azure/tests/test_storage.py +++ b/contrib/opencensus-ext-azure/tests/test_storage.py @@ -12,15 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. -import mock import os import shutil import unittest -from opencensus.ext.azure.common.storage import _now -from opencensus.ext.azure.common.storage import _seconds -from opencensus.ext.azure.common.storage import LocalFileBlob -from opencensus.ext.azure.common.storage import LocalFileStorage +import mock + +from opencensus.ext.azure.common.storage import ( + LocalFileBlob, + LocalFileStorage, + _now, + _seconds, +) TEST_FOLDER = os.path.abspath('.test') diff --git a/contrib/opencensus-ext-datadog/examples/datadog.py b/contrib/opencensus-ext-datadog/examples/datadog.py index 7e1a6e0d0..58ea73484 100644 --- a/contrib/opencensus-ext-datadog/examples/datadog.py +++ b/contrib/opencensus-ext-datadog/examples/datadog.py @@ -2,8 +2,7 @@ from opencensus.ext.flask.flask_middleware import FlaskMiddleware from opencensus.trace.samplers import AlwaysOnSampler -from traces import DatadogTraceExporter -from traces import Options +from traces import DatadogTraceExporter, Options app = Flask(__name__) middleware = FlaskMiddleware(app, diff --git a/contrib/opencensus-ext-datadog/opencensus/ext/datadog/traces.py b/contrib/opencensus-ext-datadog/opencensus/ext/datadog/traces.py index ebeee9b84..6555a801c 100644 --- a/contrib/opencensus-ext-datadog/opencensus/ext/datadog/traces.py +++ b/contrib/opencensus-ext-datadog/opencensus/ext/datadog/traces.py @@ -20,8 +20,7 @@ from opencensus.common.transports import sync from opencensus.common.utils import ISO_DATETIME_REGEX from opencensus.ext.datadog.transport import DDTransport -from opencensus.trace import base_exporter -from opencensus.trace import span_data +from opencensus.trace import base_exporter, span_data class Options(object): diff --git a/contrib/opencensus-ext-datadog/opencensus/ext/datadog/transport.py b/contrib/opencensus-ext-datadog/opencensus/ext/datadog/transport.py index 72a598a77..58c23fc6a 100644 --- a/contrib/opencensus-ext-datadog/opencensus/ext/datadog/transport.py +++ b/contrib/opencensus-ext-datadog/opencensus/ext/datadog/transport.py @@ -1,4 +1,5 @@ import platform + import requests diff --git a/contrib/opencensus-ext-datadog/setup.py b/contrib/opencensus-ext-datadog/setup.py index 48aa39367..804bbdcba 100644 --- a/contrib/opencensus-ext-datadog/setup.py +++ b/contrib/opencensus-ext-datadog/setup.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup + from version import __version__ setup( diff --git a/contrib/opencensus-ext-datadog/tests/traces_test.py b/contrib/opencensus-ext-datadog/tests/traces_test.py index 46f7e757b..cc3799694 100644 --- a/contrib/opencensus-ext-datadog/tests/traces_test.py +++ b/contrib/opencensus-ext-datadog/tests/traces_test.py @@ -2,13 +2,17 @@ import mock -from opencensus.ext.datadog.traces import (convert_id, to_dd_type, - value_from_atts_elem, - atts_to_metadata, - new_trace_exporter, - DatadogTraceExporter, Options) -from opencensus.trace import span_data as span_data_module +from opencensus.ext.datadog.traces import ( + DatadogTraceExporter, + Options, + atts_to_metadata, + convert_id, + new_trace_exporter, + to_dd_type, + value_from_atts_elem, +) from opencensus.trace import span_context +from opencensus.trace import span_data as span_data_module class TestTraces(unittest.TestCase): diff --git a/contrib/opencensus-ext-dbapi/setup.py b/contrib/opencensus-ext-dbapi/setup.py index bd5a8469e..f55daf5d6 100644 --- a/contrib/opencensus-ext-dbapi/setup.py +++ b/contrib/opencensus-ext-dbapi/setup.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup + from version import __version__ setup( diff --git a/contrib/opencensus-ext-django/examples/app/urls.py b/contrib/opencensus-ext-django/examples/app/urls.py index cbe75510d..bb46753f1 100644 --- a/contrib/opencensus-ext-django/examples/app/urls.py +++ b/contrib/opencensus-ext-django/examples/app/urls.py @@ -32,7 +32,6 @@ import app.views - urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^$', app.views.home), diff --git a/contrib/opencensus-ext-django/examples/app/views.py b/contrib/opencensus-ext-django/examples/app/views.py index d8ea57037..a940a7932 100644 --- a/contrib/opencensus-ext-django/examples/app/views.py +++ b/contrib/opencensus-ext-django/examples/app/views.py @@ -12,19 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. -from django.http import HttpResponse -from django.shortcuts import render - -from .forms import HelloForm - -from opencensus.trace import config_integration +import os import mysql.connector import psycopg2 +import requests import sqlalchemy +from django.http import HttpResponse +from django.shortcuts import render -import os -import requests +from opencensus.trace import config_integration + +from .forms import HelloForm DB_HOST = 'localhost' diff --git a/contrib/opencensus-ext-django/opencensus/ext/django/middleware.py b/contrib/opencensus-ext-django/opencensus/ext/django/middleware.py index 6ba6a80a9..53c26131c 100644 --- a/contrib/opencensus-ext-django/opencensus/ext/django/middleware.py +++ b/contrib/opencensus-ext-django/opencensus/ext/django/middleware.py @@ -13,20 +13,23 @@ # limitations under the License. """Django middleware helper to capture and trace a request.""" -import django -import logging import six +import logging + +import django import django.conf from django.db import connection from django.utils.deprecation import MiddlewareMixin from google.rpc import code_pb2 from opencensus.common import configuration -from opencensus.trace import attributes_helper -from opencensus.trace import execution_context -from opencensus.trace import print_exporter -from opencensus.trace import samplers +from opencensus.trace import ( + attributes_helper, + execution_context, + print_exporter, + samplers, +) from opencensus.trace import span as span_module from opencensus.trace import status as status_module from opencensus.trace import tracer as tracer_module diff --git a/contrib/opencensus-ext-django/setup.py b/contrib/opencensus-ext-django/setup.py index 699dd62e8..b2acf735e 100644 --- a/contrib/opencensus-ext-django/setup.py +++ b/contrib/opencensus-ext-django/setup.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup + from version import __version__ setup( diff --git a/contrib/opencensus-ext-django/tests/test_django_middleware.py b/contrib/opencensus-ext-django/tests/test_django_middleware.py index cbd992d4a..714e01eeb 100644 --- a/contrib/opencensus-ext-django/tests/test_django_middleware.py +++ b/contrib/opencensus-ext-django/tests/test_django_middleware.py @@ -12,15 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -import mock import unittest +import mock from django.test import RequestFactory from django.test.utils import teardown_test_environment -from opencensus.trace import execution_context -from opencensus.trace import print_exporter -from opencensus.trace import samplers +from opencensus.trace import execution_context, print_exporter, samplers from opencensus.trace import span as span_module from opencensus.trace import utils from opencensus.trace.blank_span import BlankSpan diff --git a/contrib/opencensus-ext-flask/examples/custom.py b/contrib/opencensus-ext-flask/examples/custom.py index c310958a4..d6f8fd86d 100644 --- a/contrib/opencensus-ext-flask/examples/custom.py +++ b/contrib/opencensus-ext-flask/examples/custom.py @@ -21,13 +21,12 @@ import requests import sqlalchemy +import hello_world_pb2 +import hello_world_pb2_grpc from opencensus.ext.flask.flask_middleware import FlaskMiddleware from opencensus.ext.grpc import client_interceptor from opencensus.ext.stackdriver import trace_exporter as stackdriver_exporter -from opencensus.trace import config_integration -from opencensus.trace import samplers -import hello_world_pb2 -import hello_world_pb2_grpc +from opencensus.trace import config_integration, samplers INTEGRATIONS = ['mysql', 'postgresql', 'sqlalchemy', 'requests'] diff --git a/contrib/opencensus-ext-flask/examples/simple.py b/contrib/opencensus-ext-flask/examples/simple.py index ac4eb50e9..0f3ca65c0 100644 --- a/contrib/opencensus-ext-flask/examples/simple.py +++ b/contrib/opencensus-ext-flask/examples/simple.py @@ -13,6 +13,7 @@ # limitations under the License. from flask import Flask + from opencensus.ext.flask.flask_middleware import FlaskMiddleware app = Flask(__name__) diff --git a/contrib/opencensus-ext-flask/opencensus/ext/flask/flask_middleware.py b/contrib/opencensus-ext-flask/opencensus/ext/flask/flask_middleware.py index fcbdc41e7..39b220e02 100644 --- a/contrib/opencensus-ext-flask/opencensus/ext/flask/flask_middleware.py +++ b/contrib/opencensus-ext-flask/opencensus/ext/flask/flask_middleware.py @@ -12,21 +12,23 @@ # See the License for the specific language governing permissions and # limitations under the License. -import logging import six + +import logging import sys import flask from google.rpc import code_pb2 from opencensus.common import configuration -from opencensus.trace import attributes_helper -from opencensus.trace import execution_context -from opencensus.trace import print_exporter -from opencensus.trace import samplers +from opencensus.trace import ( + attributes_helper, + execution_context, + print_exporter, + samplers, +) from opencensus.trace import span as span_module -from opencensus.trace import stack_trace -from opencensus.trace import status +from opencensus.trace import stack_trace, status from opencensus.trace import tracer as tracer_module from opencensus.trace import utils from opencensus.trace.propagation import trace_context_http_header_format diff --git a/contrib/opencensus-ext-flask/setup.py b/contrib/opencensus-ext-flask/setup.py index f05797d62..e3fc3a640 100644 --- a/contrib/opencensus-ext-flask/setup.py +++ b/contrib/opencensus-ext-flask/setup.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup + from version import __version__ setup( diff --git a/contrib/opencensus-ext-flask/tests/test_flask_middleware.py b/contrib/opencensus-ext-flask/tests/test_flask_middleware.py index bcd11a506..55b54c33d 100644 --- a/contrib/opencensus-ext-flask/tests/test_flask_middleware.py +++ b/contrib/opencensus-ext-flask/tests/test_flask_middleware.py @@ -17,25 +17,20 @@ import unittest -from google.rpc import code_pb2 import flask -from werkzeug.exceptions import NotFound import mock +from google.rpc import code_pb2 +from werkzeug.exceptions import NotFound from opencensus.ext.flask import flask_middleware -from opencensus.trace import execution_context -from opencensus.trace import print_exporter -from opencensus.trace import samplers +from opencensus.trace import execution_context, print_exporter, samplers from opencensus.trace import span as span_module -from opencensus.trace import span_data -from opencensus.trace import stack_trace -from opencensus.trace import status +from opencensus.trace import span_data, stack_trace, status from opencensus.trace.blank_span import BlankSpan from opencensus.trace.propagation import trace_context_http_header_format from opencensus.trace.span_context import SpanContext from opencensus.trace.trace_options import TraceOptions -from opencensus.trace.tracers import base -from opencensus.trace.tracers import noop_tracer +from opencensus.trace.tracers import base, noop_tracer class FlaskTestException(Exception): diff --git a/contrib/opencensus-ext-gevent/opencensus/ext/gevent/geventcompatibility.py b/contrib/opencensus-ext-gevent/opencensus/ext/gevent/geventcompatibility.py index f1f662c48..d59633573 100644 --- a/contrib/opencensus-ext-gevent/opencensus/ext/gevent/geventcompatibility.py +++ b/contrib/opencensus-ext-gevent/opencensus/ext/gevent/geventcompatibility.py @@ -1,4 +1,5 @@ import logging + import gevent.monkey diff --git a/contrib/opencensus-ext-gevent/setup.py b/contrib/opencensus-ext-gevent/setup.py index 03b666db6..022df4a14 100644 --- a/contrib/opencensus-ext-gevent/setup.py +++ b/contrib/opencensus-ext-gevent/setup.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup + from version import __version__ setup( diff --git a/contrib/opencensus-ext-gevent/tests/test_patching.py b/contrib/opencensus-ext-gevent/tests/test_patching.py index 08955fdd8..c5f2063b5 100644 --- a/contrib/opencensus-ext-gevent/tests/test_patching.py +++ b/contrib/opencensus-ext-gevent/tests/test_patching.py @@ -14,11 +14,11 @@ import unittest -import opencensus.common.runtime_context as runtime_context import gevent.monkey - import mock +import opencensus.common.runtime_context as runtime_context + class TestPatching(unittest.TestCase): def setUp(self): diff --git a/contrib/opencensus-ext-google-cloud-clientlibs/opencensus/ext/google_cloud_clientlibs/trace.py b/contrib/opencensus-ext-google-cloud-clientlibs/opencensus/ext/google_cloud_clientlibs/trace.py index 3bc9f8e71..422a91f0c 100644 --- a/contrib/opencensus-ext-google-cloud-clientlibs/opencensus/ext/google_cloud_clientlibs/trace.py +++ b/contrib/opencensus-ext-google-cloud-clientlibs/opencensus/ext/google_cloud_clientlibs/trace.py @@ -15,15 +15,11 @@ import logging import grpc - -from google.cloud import _helpers from google.api_core import grpc_helpers +from google.cloud import _helpers -from opencensus.ext.grpc.client_interceptor import ( - OpenCensusClientInterceptor) - -from opencensus.ext.requests.trace import ( - trace_integration as trace_requests) +from opencensus.ext.grpc.client_interceptor import OpenCensusClientInterceptor +from opencensus.ext.requests.trace import trace_integration as trace_requests log = logging.getLogger(__name__) diff --git a/contrib/opencensus-ext-google-cloud-clientlibs/setup.py b/contrib/opencensus-ext-google-cloud-clientlibs/setup.py index 005deedfc..a66b4c683 100644 --- a/contrib/opencensus-ext-google-cloud-clientlibs/setup.py +++ b/contrib/opencensus-ext-google-cloud-clientlibs/setup.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup + from version import __version__ setup( diff --git a/contrib/opencensus-ext-google-cloud-clientlibs/tests/test_google_cloud_clientlibs_trace.py b/contrib/opencensus-ext-google-cloud-clientlibs/tests/test_google_cloud_clientlibs_trace.py index fa10ba9f5..c503fadff 100644 --- a/contrib/opencensus-ext-google-cloud-clientlibs/tests/test_google_cloud_clientlibs_trace.py +++ b/contrib/opencensus-ext-google-cloud-clientlibs/tests/test_google_cloud_clientlibs_trace.py @@ -14,8 +14,8 @@ import unittest -import mock import grpc +import mock from opencensus.ext.google_cloud_clientlibs import trace diff --git a/contrib/opencensus-ext-grpc/examples/hello_world_client.py b/contrib/opencensus-ext-grpc/examples/hello_world_client.py index 48e662118..1875c35bc 100644 --- a/contrib/opencensus-ext-grpc/examples/hello_world_client.py +++ b/contrib/opencensus-ext-grpc/examples/hello_world_client.py @@ -18,10 +18,9 @@ import hello_world_pb2 import hello_world_pb2_grpc - -from opencensus.trace.tracer import Tracer from opencensus.ext.grpc import client_interceptor from opencensus.ext.stackdriver import trace_exporter as stackdriver_exporter +from opencensus.trace.tracer import Tracer HOST_PORT = 'localhost:50051' diff --git a/contrib/opencensus-ext-grpc/examples/hello_world_pb2.py b/contrib/opencensus-ext-grpc/examples/hello_world_pb2.py index 5927b2a1a..b67e7f718 100644 --- a/contrib/opencensus-ext-grpc/examples/hello_world_pb2.py +++ b/contrib/opencensus-ext-grpc/examples/hello_world_pb2.py @@ -3,12 +3,14 @@ # source: hello_world.proto import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) + from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pb2 from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 + +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() diff --git a/contrib/opencensus-ext-grpc/examples/hello_world_server.py b/contrib/opencensus-ext-grpc/examples/hello_world_server.py index 4698e3569..c7c559554 100644 --- a/contrib/opencensus-ext-grpc/examples/hello_world_server.py +++ b/contrib/opencensus-ext-grpc/examples/hello_world_server.py @@ -12,14 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -from concurrent import futures import time +from concurrent import futures import grpc import hello_world_pb2 import hello_world_pb2_grpc - from opencensus.ext.grpc import server_interceptor from opencensus.ext.stackdriver import trace_exporter as stackdriver_exporter from opencensus.trace import samplers diff --git a/contrib/opencensus-ext-grpc/opencensus/ext/grpc/client_interceptor.py b/contrib/opencensus-ext-grpc/opencensus/ext/grpc/client_interceptor.py index 4fa9ccdf5..288bf650b 100644 --- a/contrib/opencensus-ext-grpc/opencensus/ext/grpc/client_interceptor.py +++ b/contrib/opencensus-ext-grpc/opencensus/ext/grpc/client_interceptor.py @@ -12,16 +12,16 @@ # See the License for the specific language governing permissions and # limitations under the License. +import six + import collections import logging import grpc -import six from opencensus.ext import grpc as oc_grpc from opencensus.ext.grpc import utils as grpc_utils -from opencensus.trace import attributes_helper -from opencensus.trace import execution_context +from opencensus.trace import attributes_helper, execution_context from opencensus.trace import span as span_module from opencensus.trace import time_event from opencensus.trace.propagation import binary_format diff --git a/contrib/opencensus-ext-grpc/opencensus/ext/grpc/server_interceptor.py b/contrib/opencensus-ext-grpc/opencensus/ext/grpc/server_interceptor.py index 2308e3dea..7b82773f2 100644 --- a/contrib/opencensus-ext-grpc/opencensus/ext/grpc/server_interceptor.py +++ b/contrib/opencensus-ext-grpc/opencensus/ext/grpc/server_interceptor.py @@ -18,12 +18,10 @@ from opencensus.ext import grpc as oc_grpc from opencensus.ext.grpc import utils as grpc_utils -from opencensus.trace import attributes_helper -from opencensus.trace import execution_context +from opencensus.trace import attributes_helper, execution_context from opencensus.trace import span as span_module from opencensus.trace import stack_trace as stack_trace -from opencensus.trace import status -from opencensus.trace import time_event +from opencensus.trace import status, time_event from opencensus.trace import tracer as tracer_module from opencensus.trace.propagation import binary_format diff --git a/contrib/opencensus-ext-grpc/opencensus/ext/grpc/utils.py b/contrib/opencensus-ext-grpc/opencensus/ext/grpc/utils.py index 222bceca8..0cc213bb3 100644 --- a/contrib/opencensus-ext-grpc/opencensus/ext/grpc/utils.py +++ b/contrib/opencensus-ext-grpc/opencensus/ext/grpc/utils.py @@ -2,8 +2,8 @@ from grpc.framework.foundation import future from grpc.framework.interfaces.face import face -from opencensus.trace import execution_context -from opencensus.trace import time_event + +from opencensus.trace import execution_context, time_event def add_message_event(proto_message, span, message_event_type, message_id=1): diff --git a/contrib/opencensus-ext-grpc/setup.py b/contrib/opencensus-ext-grpc/setup.py index 9674d4ff1..31b96b390 100644 --- a/contrib/opencensus-ext-grpc/setup.py +++ b/contrib/opencensus-ext-grpc/setup.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup + from version import __version__ setup( diff --git a/contrib/opencensus-ext-grpc/tests/test_client_interceptor.py b/contrib/opencensus-ext-grpc/tests/test_client_interceptor.py index e45096bb5..ab7851b20 100644 --- a/contrib/opencensus-ext-grpc/tests/test_client_interceptor.py +++ b/contrib/opencensus-ext-grpc/tests/test_client_interceptor.py @@ -13,14 +13,14 @@ # limitations under the License. import collections -import mock import threading import unittest +import grpc +import mock from google.api_core import bidi from google.protobuf import proto_builder from grpc.framework.foundation import logging_pool -import grpc from opencensus.ext.grpc import client_interceptor from opencensus.trace import execution_context diff --git a/contrib/opencensus-ext-httplib/opencensus/ext/httplib/trace.py b/contrib/opencensus-ext-httplib/opencensus/ext/httplib/trace.py index 54ed0653d..ed75e326b 100644 --- a/contrib/opencensus-ext-httplib/opencensus/ext/httplib/trace.py +++ b/contrib/opencensus-ext-httplib/opencensus/ext/httplib/trace.py @@ -15,8 +15,7 @@ import logging import sys -from opencensus.trace import attributes_helper -from opencensus.trace import execution_context +from opencensus.trace import attributes_helper, execution_context from opencensus.trace import span as span_module from opencensus.trace import utils diff --git a/contrib/opencensus-ext-httplib/setup.py b/contrib/opencensus-ext-httplib/setup.py index f8a1cbc83..340b43406 100644 --- a/contrib/opencensus-ext-httplib/setup.py +++ b/contrib/opencensus-ext-httplib/setup.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup + from version import __version__ setup( diff --git a/contrib/opencensus-ext-jaeger/opencensus/ext/jaeger/trace_exporter/gen/jaeger/agent.py b/contrib/opencensus-ext-jaeger/opencensus/ext/jaeger/trace_exporter/gen/jaeger/agent.py index 176264a67..023a74b1d 100644 --- a/contrib/opencensus-ext-jaeger/opencensus/ext/jaeger/trace_exporter/gen/jaeger/agent.py +++ b/contrib/opencensus-ext-jaeger/opencensus/ext/jaeger/trace_exporter/gen/jaeger/agent.py @@ -6,9 +6,14 @@ # options string: py:new_style # -from thrift.Thrift import TType, TMessageType, TApplicationException -from thrift.Thrift import TProcessor +from thrift.Thrift import ( + TApplicationException, + TMessageType, + TProcessor, + TType, +) from thrift.transport import TTransport + from opencensus.ext.jaeger.trace_exporter.gen.jaeger import jaeger diff --git a/contrib/opencensus-ext-jaeger/opencensus/ext/jaeger/trace_exporter/gen/jaeger/jaeger.py b/contrib/opencensus-ext-jaeger/opencensus/ext/jaeger/trace_exporter/gen/jaeger/jaeger.py index 51c0ac617..e9fa4830b 100644 --- a/contrib/opencensus-ext-jaeger/opencensus/ext/jaeger/trace_exporter/gen/jaeger/jaeger.py +++ b/contrib/opencensus-ext-jaeger/opencensus/ext/jaeger/trace_exporter/gen/jaeger/jaeger.py @@ -6,11 +6,16 @@ # options string: py:new_style # -from thrift.Thrift import TType, TMessageType, TApplicationException -from thrift.protocol.TProtocol import TProtocolException -import sys import logging -from thrift.Thrift import TProcessor +import sys + +from thrift.protocol.TProtocol import TProtocolException +from thrift.Thrift import ( + TApplicationException, + TMessageType, + TProcessor, + TType, +) from thrift.transport import TTransport diff --git a/contrib/opencensus-ext-jaeger/setup.py b/contrib/opencensus-ext-jaeger/setup.py index d345a92b1..688958f73 100644 --- a/contrib/opencensus-ext-jaeger/setup.py +++ b/contrib/opencensus-ext-jaeger/setup.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup + from version import __version__ setup( diff --git a/contrib/opencensus-ext-jaeger/tests/test_jaeger_exporter.py b/contrib/opencensus-ext-jaeger/tests/test_jaeger_exporter.py index 67892058c..85c4b924f 100644 --- a/contrib/opencensus-ext-jaeger/tests/test_jaeger_exporter.py +++ b/contrib/opencensus-ext-jaeger/tests/test_jaeger_exporter.py @@ -18,8 +18,14 @@ from opencensus.ext.jaeger import trace_exporter from opencensus.ext.jaeger.trace_exporter.gen.jaeger import jaeger -from opencensus.trace import (attributes, link, span_context, span_data, - status, time_event) +from opencensus.trace import ( + attributes, + link, + span_context, + span_data, + status, + time_event, +) class TestJaegerExporter(unittest.TestCase): diff --git a/contrib/opencensus-ext-logging/setup.py b/contrib/opencensus-ext-logging/setup.py index ab09453c9..76cf383d3 100644 --- a/contrib/opencensus-ext-logging/setup.py +++ b/contrib/opencensus-ext-logging/setup.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup + from version import __version__ setup( diff --git a/contrib/opencensus-ext-mysql/opencensus/ext/mysql/trace.py b/contrib/opencensus-ext-mysql/opencensus/ext/mysql/trace.py index a988a9214..efcf7bb17 100644 --- a/contrib/opencensus-ext-mysql/opencensus/ext/mysql/trace.py +++ b/contrib/opencensus-ext-mysql/opencensus/ext/mysql/trace.py @@ -14,6 +14,7 @@ import inspect import logging + import mysql.connector from opencensus.ext.dbapi import trace diff --git a/contrib/opencensus-ext-mysql/setup.py b/contrib/opencensus-ext-mysql/setup.py index 4d481e91e..3456658f3 100644 --- a/contrib/opencensus-ext-mysql/setup.py +++ b/contrib/opencensus-ext-mysql/setup.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup + from version import __version__ setup( diff --git a/contrib/opencensus-ext-ocagent/opencensus/ext/ocagent/stats_exporter/__init__.py b/contrib/opencensus-ext-ocagent/opencensus/ext/ocagent/stats_exporter/__init__.py index 5b8c0b7de..d49e9e82e 100644 --- a/contrib/opencensus-ext-ocagent/opencensus/ext/ocagent/stats_exporter/__init__.py +++ b/contrib/opencensus-ext-ocagent/opencensus/ext/ocagent/stats_exporter/__init__.py @@ -14,18 +14,20 @@ import logging +import grpc from google.api_core import bidi + from opencensus.common.monitored_resource import monitored_resource from opencensus.ext.ocagent import utils from opencensus.metrics import transport -from opencensus.metrics.export import metric_descriptor -from opencensus.metrics.export import value -from opencensus.proto.agent.metrics.v1 import metrics_service_pb2 -from opencensus.proto.agent.metrics.v1 import metrics_service_pb2_grpc +from opencensus.metrics.export import metric_descriptor, value +from opencensus.proto.agent.metrics.v1 import ( + metrics_service_pb2, + metrics_service_pb2_grpc, +) from opencensus.proto.metrics.v1 import metrics_pb2 from opencensus.proto.resource.v1 import resource_pb2 from opencensus.stats import stats -import grpc class StatsExporter(object): diff --git a/contrib/opencensus-ext-ocagent/opencensus/ext/ocagent/trace_exporter/__init__.py b/contrib/opencensus-ext-ocagent/opencensus/ext/ocagent/trace_exporter/__init__.py index afb3f9bd3..ddd115755 100644 --- a/contrib/opencensus-ext-ocagent/opencensus/ext/ocagent/trace_exporter/__init__.py +++ b/contrib/opencensus-ext-ocagent/opencensus/ext/ocagent/trace_exporter/__init__.py @@ -14,13 +14,16 @@ """Export opencensus spans to ocagent""" from threading import Lock + import grpc from opencensus.common.transports import sync from opencensus.ext.ocagent import utils as ocagent_utils from opencensus.ext.ocagent.trace_exporter import utils -from opencensus.proto.agent.trace.v1 import trace_service_pb2 -from opencensus.proto.agent.trace.v1 import trace_service_pb2_grpc +from opencensus.proto.agent.trace.v1 import ( + trace_service_pb2, + trace_service_pb2_grpc, +) from opencensus.trace import base_exporter # Default agent endpoint diff --git a/contrib/opencensus-ext-ocagent/opencensus/ext/ocagent/trace_exporter/utils.py b/contrib/opencensus-ext-ocagent/opencensus/ext/ocagent/trace_exporter/utils.py index f92a9fce7..305347a3f 100644 --- a/contrib/opencensus-ext-ocagent/opencensus/ext/ocagent/trace_exporter/utils.py +++ b/contrib/opencensus-ext-ocagent/opencensus/ext/ocagent/trace_exporter/utils.py @@ -15,6 +15,7 @@ """Translates opencensus span data to trace proto""" from google.protobuf.wrappers_pb2 import BoolValue, UInt32Value + from opencensus.ext.ocagent import utils as ocagent_utils from opencensus.proto.trace.v1 import trace_pb2 diff --git a/contrib/opencensus-ext-ocagent/opencensus/ext/ocagent/utils/__init__.py b/contrib/opencensus-ext-ocagent/opencensus/ext/ocagent/utils/__init__.py index 17347e3df..2196bc79b 100644 --- a/contrib/opencensus-ext-ocagent/opencensus/ext/ocagent/utils/__init__.py +++ b/contrib/opencensus-ext-ocagent/opencensus/ext/ocagent/utils/__init__.py @@ -3,6 +3,7 @@ import socket from google.protobuf.timestamp_pb2 import Timestamp + from opencensus.common.version import __version__ as opencensus_version from opencensus.proto.agent.common.v1 import common_pb2 diff --git a/contrib/opencensus-ext-ocagent/setup.py b/contrib/opencensus-ext-ocagent/setup.py index d8f24e8ee..4ac94a067 100644 --- a/contrib/opencensus-ext-ocagent/setup.py +++ b/contrib/opencensus-ext-ocagent/setup.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup + from version import __version__ setup( diff --git a/contrib/opencensus-ext-ocagent/tests/test_ocagent_utils.py b/contrib/opencensus-ext-ocagent/tests/test_ocagent_utils.py index 2ce995651..ec28ffdbf 100644 --- a/contrib/opencensus-ext-ocagent/tests/test_ocagent_utils.py +++ b/contrib/opencensus-ext-ocagent/tests/test_ocagent_utils.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from datetime import datetime import unittest +from datetime import datetime from opencensus.common import utils as common_utils from opencensus.ext.ocagent import utils diff --git a/contrib/opencensus-ext-ocagent/tests/test_stats_exporter.py b/contrib/opencensus-ext-ocagent/tests/test_stats_exporter.py index 907a8628d..796628531 100644 --- a/contrib/opencensus-ext-ocagent/tests/test_stats_exporter.py +++ b/contrib/opencensus-ext-ocagent/tests/test_stats_exporter.py @@ -12,30 +12,34 @@ # See the License for the specific language governing permissions and # limitations under the License. -from concurrent import futures -from datetime import datetime -import grpc -import mock import os import socket import threading import time import unittest +from concurrent import futures +from datetime import datetime +import grpc +import mock from google.protobuf import timestamp_pb2 -from opencensus.common import resource -from opencensus.common import utils + +from opencensus.common import resource, utils from opencensus.common.version import __version__ as opencensus_version from opencensus.ext.ocagent import stats_exporter as ocagent from opencensus.metrics import label_value -from opencensus.metrics.export import metric -from opencensus.metrics.export import metric_descriptor -from opencensus.metrics.export import point -from opencensus.metrics.export import time_series -from opencensus.metrics.export import value +from opencensus.metrics.export import ( + metric, + metric_descriptor, + point, + time_series, + value, +) from opencensus.proto.agent.common.v1 import common_pb2 -from opencensus.proto.agent.metrics.v1 import metrics_service_pb2 -from opencensus.proto.agent.metrics.v1 import metrics_service_pb2_grpc +from opencensus.proto.agent.metrics.v1 import ( + metrics_service_pb2, + metrics_service_pb2_grpc, +) from opencensus.proto.metrics.v1 import metrics_pb2 from opencensus.proto.resource.v1 import resource_pb2 from opencensus.stats import aggregation as aggregation_module diff --git a/contrib/opencensus-ext-ocagent/tests/test_trace_exporter.py b/contrib/opencensus-ext-ocagent/tests/test_trace_exporter.py index fbd584dc8..077a1f5bf 100644 --- a/contrib/opencensus-ext-ocagent/tests/test_trace_exporter.py +++ b/contrib/opencensus-ext-ocagent/tests/test_trace_exporter.py @@ -13,19 +13,19 @@ # limitations under the License. import codecs -import grpc -import mock import os import socket import unittest +import grpc +import mock + from opencensus.common.version import __version__ from opencensus.ext.ocagent.trace_exporter import TraceExporter from opencensus.proto.trace.v1 import trace_config_pb2 from opencensus.trace import span_context as span_context_module from opencensus.trace import span_data as span_data_module - SERVICE_NAME = 'my-service' diff --git a/contrib/opencensus-ext-ocagent/tests/test_trace_exporter_utils.py b/contrib/opencensus-ext-ocagent/tests/test_trace_exporter_utils.py index 3c23e17c5..439114e1d 100644 --- a/contrib/opencensus-ext-ocagent/tests/test_trace_exporter_utils.py +++ b/contrib/opencensus-ext-ocagent/tests/test_trace_exporter_utils.py @@ -12,10 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -from datetime import datetime -from datetime import timedelta import codecs import unittest +from datetime import datetime, timedelta from opencensus.ext.ocagent.trace_exporter import utils from opencensus.proto.trace.v1 import trace_pb2 diff --git a/contrib/opencensus-ext-postgresql/opencensus/ext/postgresql/trace.py b/contrib/opencensus-ext-postgresql/opencensus/ext/postgresql/trace.py index 4c948af23..5ae2ce9fc 100644 --- a/contrib/opencensus-ext-postgresql/opencensus/ext/postgresql/trace.py +++ b/contrib/opencensus-ext-postgresql/opencensus/ext/postgresql/trace.py @@ -15,13 +15,13 @@ import inspect import logging -from opencensus.trace import execution_context -from opencensus.trace import span as span_module - import psycopg2 from psycopg2 import connect as pg_connect from psycopg2.extensions import cursor as pgcursor +from opencensus.trace import execution_context +from opencensus.trace import span as span_module + log = logging.getLogger(__name__) MODULE_NAME = 'postgresql' diff --git a/contrib/opencensus-ext-postgresql/setup.py b/contrib/opencensus-ext-postgresql/setup.py index c26c5e143..41bd59806 100644 --- a/contrib/opencensus-ext-postgresql/setup.py +++ b/contrib/opencensus-ext-postgresql/setup.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup + from version import __version__ setup( diff --git a/contrib/opencensus-ext-prometheus/examples/prometheus.py b/contrib/opencensus-ext-prometheus/examples/prometheus.py index 5e808d007..99912f90e 100644 --- a/contrib/opencensus-ext-prometheus/examples/prometheus.py +++ b/contrib/opencensus-ext-prometheus/examples/prometheus.py @@ -16,6 +16,7 @@ import random import time +from pprint import pprint from opencensus.ext.prometheus import stats_exporter as prometheus from opencensus.stats import aggregation as aggregation_module @@ -25,7 +26,6 @@ from opencensus.tags import tag_key as tag_key_module from opencensus.tags import tag_map as tag_map_module from opencensus.tags import tag_value as tag_value_module -from pprint import pprint MiB = 1 << 20 FRONTEND_KEY = tag_key_module.TagKey("myorg_keys_frontend") diff --git a/contrib/opencensus-ext-prometheus/opencensus/ext/prometheus/stats_exporter/__init__.py b/contrib/opencensus-ext-prometheus/opencensus/ext/prometheus/stats_exporter/__init__.py index ec07781cc..564ee3a11 100644 --- a/contrib/opencensus-ext-prometheus/opencensus/ext/prometheus/stats_exporter/__init__.py +++ b/contrib/opencensus-ext-prometheus/opencensus/ext/prometheus/stats_exporter/__init__.py @@ -12,20 +12,22 @@ # See the License for the specific language governing permissions and # limitations under the License. +import re + from prometheus_client import start_http_server -from prometheus_client.core import CollectorRegistry -from prometheus_client.core import CounterMetricFamily -from prometheus_client.core import GaugeMetricFamily -from prometheus_client.core import HistogramMetricFamily -from prometheus_client.core import REGISTRY -from prometheus_client.core import UnknownMetricFamily +from prometheus_client.core import ( + REGISTRY, + CollectorRegistry, + CounterMetricFamily, + GaugeMetricFamily, + HistogramMetricFamily, + UnknownMetricFamily, +) from opencensus.common.transports import sync from opencensus.stats import aggregation_data as aggregation_data_module from opencensus.stats import base_exporter -import re - class Options(object): """ Options contains options for configuring the exporter. diff --git a/contrib/opencensus-ext-prometheus/setup.py b/contrib/opencensus-ext-prometheus/setup.py index ff5271744..2fca7c76f 100644 --- a/contrib/opencensus-ext-prometheus/setup.py +++ b/contrib/opencensus-ext-prometheus/setup.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup + from version import __version__ setup( diff --git a/contrib/opencensus-ext-prometheus/tests/test_prometheus_stats.py b/contrib/opencensus-ext-prometheus/tests/test_prometheus_stats.py index 7dcf46db5..bc19b47ef 100644 --- a/contrib/opencensus-ext-prometheus/tests/test_prometheus_stats.py +++ b/contrib/opencensus-ext-prometheus/tests/test_prometheus_stats.py @@ -12,10 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from datetime import datetime -import mock import unittest +from datetime import datetime +import mock from prometheus_client.core import Sample from opencensus.ext.prometheus import stats_exporter as prometheus diff --git a/contrib/opencensus-ext-pymongo/opencensus/ext/pymongo/trace.py b/contrib/opencensus-ext-pymongo/opencensus/ext/pymongo/trace.py index 425580b8e..b90aa90b7 100644 --- a/contrib/opencensus-ext-pymongo/opencensus/ext/pymongo/trace.py +++ b/contrib/opencensus-ext-pymongo/opencensus/ext/pymongo/trace.py @@ -14,15 +14,13 @@ import logging -from pymongo import monitoring - from google.rpc import code_pb2 +from pymongo import monitoring from opencensus.trace import execution_context from opencensus.trace import span as span_module from opencensus.trace import status as status_module - log = logging.getLogger(__name__) MODULE_NAME = 'pymongo' diff --git a/contrib/opencensus-ext-pymongo/setup.py b/contrib/opencensus-ext-pymongo/setup.py index 7cc54d5c5..63adca99e 100644 --- a/contrib/opencensus-ext-pymongo/setup.py +++ b/contrib/opencensus-ext-pymongo/setup.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup + from version import __version__ setup( diff --git a/contrib/opencensus-ext-pymysql/opencensus/ext/pymysql/trace.py b/contrib/opencensus-ext-pymysql/opencensus/ext/pymysql/trace.py index 8f52ce901..3fbd7db31 100644 --- a/contrib/opencensus-ext-pymysql/opencensus/ext/pymysql/trace.py +++ b/contrib/opencensus-ext-pymysql/opencensus/ext/pymysql/trace.py @@ -14,6 +14,7 @@ import inspect import logging + import pymysql from opencensus.ext.dbapi import trace diff --git a/contrib/opencensus-ext-pymysql/setup.py b/contrib/opencensus-ext-pymysql/setup.py index 8a88d3da0..f8502034d 100644 --- a/contrib/opencensus-ext-pymysql/setup.py +++ b/contrib/opencensus-ext-pymysql/setup.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup + from version import __version__ setup( diff --git a/contrib/opencensus-ext-pyramid/examples/app/__init__.py b/contrib/opencensus-ext-pyramid/examples/app/__init__.py index 24dcc1b82..7aae18fc5 100644 --- a/contrib/opencensus-ext-pyramid/examples/app/__init__.py +++ b/contrib/opencensus-ext-pyramid/examples/app/__init__.py @@ -13,7 +13,6 @@ # limitations under the License. import requests - from pyramid.config import Configurator from pyramid.response import Response from pyramid.tweens import MAIN diff --git a/contrib/opencensus-ext-pyramid/examples/simple.py b/contrib/opencensus-ext-pyramid/examples/simple.py index 4de1a3487..001743831 100644 --- a/contrib/opencensus-ext-pyramid/examples/simple.py +++ b/contrib/opencensus-ext-pyramid/examples/simple.py @@ -14,12 +14,8 @@ from wsgiref.simple_server import make_server -from opencensus.trace import config_integration -from opencensus.trace import print_exporter -from opencensus.trace import samplers - from app import main - +from opencensus.trace import config_integration, print_exporter, samplers config_integration.trace_integrations(['requests']) diff --git a/contrib/opencensus-ext-pyramid/opencensus/ext/pyramid/config.py b/contrib/opencensus-ext-pyramid/opencensus/ext/pyramid/config.py index ec2267677..2a119cfb2 100644 --- a/contrib/opencensus-ext-pyramid/opencensus/ext/pyramid/config.py +++ b/contrib/opencensus-ext-pyramid/opencensus/ext/pyramid/config.py @@ -12,8 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from opencensus.trace import print_exporter -from opencensus.trace import samplers +from opencensus.trace import print_exporter, samplers from opencensus.trace.propagation import trace_context_http_header_format DEFAULT_PYRAMID_TRACER_CONFIG = { diff --git a/contrib/opencensus-ext-pyramid/opencensus/ext/pyramid/pyramid_middleware.py b/contrib/opencensus-ext-pyramid/opencensus/ext/pyramid/pyramid_middleware.py index e2ce58c6d..3b853ddc5 100644 --- a/contrib/opencensus-ext-pyramid/opencensus/ext/pyramid/pyramid_middleware.py +++ b/contrib/opencensus-ext-pyramid/opencensus/ext/pyramid/pyramid_middleware.py @@ -15,8 +15,7 @@ import logging from opencensus.ext.pyramid.config import PyramidTraceSettings -from opencensus.trace import attributes_helper -from opencensus.trace import execution_context +from opencensus.trace import attributes_helper, execution_context from opencensus.trace import span as span_module from opencensus.trace import tracer as tracer_module from opencensus.trace import utils diff --git a/contrib/opencensus-ext-pyramid/setup.py b/contrib/opencensus-ext-pyramid/setup.py index 189e42d68..7fcd90ef3 100644 --- a/contrib/opencensus-ext-pyramid/setup.py +++ b/contrib/opencensus-ext-pyramid/setup.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup + from version import __version__ setup( diff --git a/contrib/opencensus-ext-pyramid/tests/test_pyramid_config.py b/contrib/opencensus-ext-pyramid/tests/test_pyramid_config.py index d8fc35f1d..f31ad66e9 100644 --- a/contrib/opencensus-ext-pyramid/tests/test_pyramid_config.py +++ b/contrib/opencensus-ext-pyramid/tests/test_pyramid_config.py @@ -12,9 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -import mock import unittest +import mock + from opencensus.ext.pyramid import config diff --git a/contrib/opencensus-ext-pyramid/tests/test_pyramid_middleware.py b/contrib/opencensus-ext-pyramid/tests/test_pyramid_middleware.py index 13df75ddd..6cfd9dd8b 100644 --- a/contrib/opencensus-ext-pyramid/tests/test_pyramid_middleware.py +++ b/contrib/opencensus-ext-pyramid/tests/test_pyramid_middleware.py @@ -25,9 +25,7 @@ from opencensus.common.transports import sync from opencensus.ext.pyramid import pyramid_middleware from opencensus.ext.zipkin import trace_exporter as zipkin_exporter -from opencensus.trace import execution_context -from opencensus.trace import print_exporter -from opencensus.trace import samplers +from opencensus.trace import execution_context, print_exporter, samplers from opencensus.trace import span as span_module from opencensus.trace.blank_span import BlankSpan from opencensus.trace.propagation import trace_context_http_header_format diff --git a/contrib/opencensus-ext-requests/opencensus/ext/requests/trace.py b/contrib/opencensus-ext-requests/opencensus/ext/requests/trace.py index 1118f57d6..57dd1b6b3 100644 --- a/contrib/opencensus-ext-requests/opencensus/ext/requests/trace.py +++ b/contrib/opencensus-ext-requests/opencensus/ext/requests/trace.py @@ -13,18 +13,23 @@ # limitations under the License. import logging + import requests import wrapt + +from opencensus.trace import ( + attributes_helper, + exceptions_status, + execution_context, +) +from opencensus.trace import span as span_module +from opencensus.trace import utils + try: from urllib.parse import urlparse except ImportError: from urlparse import urlparse -from opencensus.trace import attributes_helper -from opencensus.trace import exceptions_status -from opencensus.trace import execution_context -from opencensus.trace import span as span_module -from opencensus.trace import utils log = logging.getLogger(__name__) diff --git a/contrib/opencensus-ext-requests/setup.py b/contrib/opencensus-ext-requests/setup.py index 3ee29bb18..37aca2d26 100644 --- a/contrib/opencensus-ext-requests/setup.py +++ b/contrib/opencensus-ext-requests/setup.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup + from version import __version__ setup( diff --git a/contrib/opencensus-ext-requests/tests/test_requests_trace.py b/contrib/opencensus-ext-requests/tests/test_requests_trace.py index 10554dfa5..9aa2c7131 100644 --- a/contrib/opencensus-ext-requests/tests/test_requests_trace.py +++ b/contrib/opencensus-ext-requests/tests/test_requests_trace.py @@ -16,11 +16,12 @@ import mock import requests -from opencensus.trace.tracers import noop_tracer from opencensus.ext.requests import trace -from opencensus.trace import span as span_module, execution_context +from opencensus.trace import execution_context +from opencensus.trace import span as span_module from opencensus.trace import status as status_module +from opencensus.trace.tracers import noop_tracer class Test_requests_trace(unittest.TestCase): diff --git a/contrib/opencensus-ext-sqlalchemy/opencensus/ext/sqlalchemy/trace.py b/contrib/opencensus-ext-sqlalchemy/opencensus/ext/sqlalchemy/trace.py index 100170fe1..bba541106 100644 --- a/contrib/opencensus-ext-sqlalchemy/opencensus/ext/sqlalchemy/trace.py +++ b/contrib/opencensus-ext-sqlalchemy/opencensus/ext/sqlalchemy/trace.py @@ -14,9 +14,7 @@ import logging -from sqlalchemy import engine -from sqlalchemy import event - +from sqlalchemy import engine, event from opencensus.trace import execution_context from opencensus.trace import span as span_module diff --git a/contrib/opencensus-ext-sqlalchemy/setup.py b/contrib/opencensus-ext-sqlalchemy/setup.py index 3ac55a79e..30090c4f4 100644 --- a/contrib/opencensus-ext-sqlalchemy/setup.py +++ b/contrib/opencensus-ext-sqlalchemy/setup.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup + from version import __version__ setup( diff --git a/contrib/opencensus-ext-sqlalchemy/tests/test_sqlalchemy_trace.py b/contrib/opencensus-ext-sqlalchemy/tests/test_sqlalchemy_trace.py index 3f8f9ba06..9df47e415 100644 --- a/contrib/opencensus-ext-sqlalchemy/tests/test_sqlalchemy_trace.py +++ b/contrib/opencensus-ext-sqlalchemy/tests/test_sqlalchemy_trace.py @@ -16,8 +16,8 @@ import mock -from opencensus.trace import span as span_module from opencensus.ext.sqlalchemy import trace +from opencensus.trace import span as span_module class Test_sqlalchemy_trace(unittest.TestCase): diff --git a/contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/stats_exporter/__init__.py b/contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/stats_exporter/__init__.py index 7dee30e24..9249294ff 100644 --- a/contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/stats_exporter/__init__.py +++ b/contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/stats_exporter/__init__.py @@ -12,32 +12,31 @@ # See the License for the specific language governing permissions and # limitations under the License. -from datetime import datetime import itertools import os import platform import re import string import threading +from datetime import datetime +import google.auth from google.api_core.gapic_v1 import client_info from google.cloud import monitoring_v3 -import google.auth from opencensus.common import utils -from opencensus.common.monitored_resource import aws_identity_doc_utils -from opencensus.common.monitored_resource import gcp_metadata_config -from opencensus.common.monitored_resource import k8s_utils -from opencensus.common.monitored_resource import monitored_resource +from opencensus.common.monitored_resource import ( + aws_identity_doc_utils, + gcp_metadata_config, + k8s_utils, + monitored_resource, +) from opencensus.common.version import __version__ -from opencensus.metrics import label_key -from opencensus.metrics import label_value -from opencensus.metrics import transport +from opencensus.metrics import label_key, label_value, transport from opencensus.metrics.export import metric as metric_module from opencensus.metrics.export import metric_descriptor from opencensus.stats import stats - MAX_TIME_SERIES_PER_UPLOAD = 200 OPENCENSUS_TASK = "opencensus_task" OPENCENSUS_TASK_DESCRIPTION = "Opencensus task identifier" diff --git a/contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/trace_exporter/__init__.py b/contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/trace_exporter/__init__.py index b5dc999d7..8e07756cc 100644 --- a/contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/trace_exporter/__init__.py +++ b/contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/trace_exporter/__init__.py @@ -12,23 +12,22 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections import defaultdict import os +from collections import defaultdict from google.cloud.trace.client import Client -from opencensus.common.monitored_resource import aws_identity_doc_utils -from opencensus.common.monitored_resource import gcp_metadata_config -from opencensus.common.monitored_resource import k8s_utils -from opencensus.common.monitored_resource import monitored_resource +from opencensus.common.monitored_resource import ( + aws_identity_doc_utils, + gcp_metadata_config, + k8s_utils, + monitored_resource, +) from opencensus.common.transports import sync from opencensus.common.version import __version__ -from opencensus.trace import attributes_helper -from opencensus.trace import base_exporter -from opencensus.trace import span_data +from opencensus.trace import attributes_helper, base_exporter, span_data from opencensus.trace.attributes import Attributes - # Agent AGENT = 'opencensus-python [{}]'.format(__version__) diff --git a/contrib/opencensus-ext-stackdriver/setup.py b/contrib/opencensus-ext-stackdriver/setup.py index 808badb1f..7c4c8c926 100644 --- a/contrib/opencensus-ext-stackdriver/setup.py +++ b/contrib/opencensus-ext-stackdriver/setup.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup + from version import __version__ setup( diff --git a/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_stats.py b/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_stats.py index e7819ad54..fd3b517c6 100644 --- a/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_stats.py +++ b/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_stats.py @@ -12,24 +12,25 @@ # See the License for the specific language governing permissions and # limitations under the License. -from datetime import datetime -import mock import unittest +from datetime import datetime -from google.cloud import monitoring_v3 import google.auth +import mock +from google.cloud import monitoring_v3 from opencensus.common import utils from opencensus.common.version import __version__ from opencensus.ext.stackdriver import stats_exporter as stackdriver -from opencensus.metrics import label_key -from opencensus.metrics import label_value +from opencensus.metrics import label_key, label_value from opencensus.metrics import transport as transport_module -from opencensus.metrics.export import metric -from opencensus.metrics.export import metric_descriptor -from opencensus.metrics.export import point -from opencensus.metrics.export import time_series -from opencensus.metrics.export import value +from opencensus.metrics.export import ( + metric, + metric_descriptor, + point, + time_series, + value, +) from opencensus.stats import aggregation as aggregation_module from opencensus.stats import aggregation_data as aggregation_data_module from opencensus.stats import execution_context diff --git a/contrib/opencensus-ext-threading/opencensus/ext/threading/trace.py b/contrib/opencensus-ext-threading/opencensus/ext/threading/trace.py index 33d0e7522..c2824aa4f 100644 --- a/contrib/opencensus-ext-threading/opencensus/ext/threading/trace.py +++ b/contrib/opencensus-ext-threading/opencensus/ext/threading/trace.py @@ -14,11 +14,10 @@ import logging import threading -from multiprocessing import pool from concurrent import futures +from multiprocessing import pool -from opencensus.trace import execution_context -from opencensus.trace import tracer +from opencensus.trace import execution_context, tracer from opencensus.trace.propagation import binary_format log = logging.getLogger(__name__) diff --git a/contrib/opencensus-ext-threading/setup.py b/contrib/opencensus-ext-threading/setup.py index 4d2e579df..963e01e89 100644 --- a/contrib/opencensus-ext-threading/setup.py +++ b/contrib/opencensus-ext-threading/setup.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup + from version import __version__ setup( diff --git a/contrib/opencensus-ext-threading/tests/test_threading_trace.py b/contrib/opencensus-ext-threading/tests/test_threading_trace.py index 47bd19b0f..6985f7b8b 100644 --- a/contrib/opencensus-ext-threading/tests/test_threading_trace.py +++ b/contrib/opencensus-ext-threading/tests/test_threading_trace.py @@ -12,11 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest import threading -import mock -from multiprocessing.pool import Pool +import unittest from concurrent.futures import ThreadPoolExecutor +from multiprocessing.pool import Pool + +import mock from opencensus.ext.threading import trace from opencensus.trace import execution_context, tracer diff --git a/contrib/opencensus-ext-zipkin/opencensus/ext/zipkin/trace_exporter/__init__.py b/contrib/opencensus-ext-zipkin/opencensus/ext/zipkin/trace_exporter/__init__.py index 2333e0e0f..096a08701 100644 --- a/contrib/opencensus-ext-zipkin/opencensus/ext/zipkin/trace_exporter/__init__.py +++ b/contrib/opencensus-ext-zipkin/opencensus/ext/zipkin/trace_exporter/__init__.py @@ -20,8 +20,7 @@ import requests from opencensus.common.transports import sync -from opencensus.common.utils import check_str_length -from opencensus.common.utils import timestamp_to_microseconds +from opencensus.common.utils import check_str_length, timestamp_to_microseconds from opencensus.trace import base_exporter DEFAULT_ENDPOINT = '/api/v2/spans' diff --git a/contrib/opencensus-ext-zipkin/setup.py b/contrib/opencensus-ext-zipkin/setup.py index e6f790eed..b9d24c277 100644 --- a/contrib/opencensus-ext-zipkin/setup.py +++ b/contrib/opencensus-ext-zipkin/setup.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup + from version import __version__ setup( diff --git a/contrib/opencensus-ext-zipkin/tests/test_zipkin_exporter.py b/contrib/opencensus-ext-zipkin/tests/test_zipkin_exporter.py index a7032532c..607d13c3c 100644 --- a/contrib/opencensus-ext-zipkin/tests/test_zipkin_exporter.py +++ b/contrib/opencensus-ext-zipkin/tests/test_zipkin_exporter.py @@ -13,9 +13,10 @@ # limitations under the License. import unittest +from datetime import datetime import mock -from datetime import datetime + from opencensus.ext.zipkin import trace_exporter from opencensus.trace import span_context from opencensus.trace import span_data as span_data_module diff --git a/examples/stats/helloworld/main.py b/examples/stats/helloworld/main.py index 5f65696ca..0d806cb41 100644 --- a/examples/stats/helloworld/main.py +++ b/examples/stats/helloworld/main.py @@ -16,6 +16,7 @@ import random import time +from pprint import pprint from opencensus.stats import aggregation as aggregation_module from opencensus.stats import measure as measure_module @@ -24,7 +25,6 @@ from opencensus.tags import tag_key as tag_key_module from opencensus.tags import tag_map as tag_map_module from opencensus.tags import tag_value as tag_value_module -from pprint import pprint MiB = 1 << 20 FRONTEND_KEY = tag_key_module.TagKey("my.org/keys/frontend") diff --git a/examples/trace/helloworld/main.py b/examples/trace/helloworld/main.py index 3336fb133..25732a357 100644 --- a/examples/trace/helloworld/main.py +++ b/examples/trace/helloworld/main.py @@ -14,9 +14,7 @@ import time -from opencensus.trace import execution_context -from opencensus.trace import print_exporter -from opencensus.trace import samplers +from opencensus.trace import execution_context, print_exporter, samplers from opencensus.trace.tracer import Tracer diff --git a/noxfile.py b/noxfile.py index e419f2f89..48e63b47b 100644 --- a/noxfile.py +++ b/noxfile.py @@ -14,9 +14,10 @@ from __future__ import absolute_import -import nox import os +import nox + def _install_dev_packages(session): session.install('-e', 'context/opencensus-context') diff --git a/opencensus/common/backports/__init__.py b/opencensus/common/backports/__init__.py index 02c648c81..46fcc7d3a 100644 --- a/opencensus/common/backports/__init__.py +++ b/opencensus/common/backports/__init__.py @@ -13,6 +13,7 @@ # limitations under the License. import six + import weakref diff --git a/opencensus/common/monitored_resource/aws_identity_doc_utils.py b/opencensus/common/monitored_resource/aws_identity_doc_utils.py index 0b2b7ab98..bfa99c205 100644 --- a/opencensus/common/monitored_resource/aws_identity_doc_utils.py +++ b/opencensus/common/monitored_resource/aws_identity_doc_utils.py @@ -12,9 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from opencensus.common.http_handler import get_request import json +from opencensus.common.http_handler import get_request + REGION_KEY = 'region' ACCOUNT_ID_KEY = 'aws_account' INSTANCE_ID_KEY = 'instance_id' diff --git a/opencensus/common/monitored_resource/monitored_resource.py b/opencensus/common/monitored_resource/monitored_resource.py index c11b9996d..99e166804 100644 --- a/opencensus/common/monitored_resource/monitored_resource.py +++ b/opencensus/common/monitored_resource/monitored_resource.py @@ -13,10 +13,11 @@ # limitations under the License. from opencensus.common import resource -from opencensus.common.monitored_resource import aws_identity_doc_utils -from opencensus.common.monitored_resource import gcp_metadata_config -from opencensus.common.monitored_resource import k8s_utils - +from opencensus.common.monitored_resource import ( + aws_identity_doc_utils, + gcp_metadata_config, + k8s_utils, +) # Supported environments (resource types) _GCE_INSTANCE = "gce_instance" diff --git a/opencensus/common/resource/__init__.py b/opencensus/common/resource/__init__.py index 0b9507bc1..1a44a82ad 100644 --- a/opencensus/common/resource/__init__.py +++ b/opencensus/common/resource/__init__.py @@ -13,11 +13,10 @@ # limitations under the License. -from copy import copy import logging import os import re - +from copy import copy logger = logging.getLogger(__name__) diff --git a/opencensus/common/schedule/__init__.py b/opencensus/common/schedule/__init__.py index 580f229bd..719d89c25 100644 --- a/opencensus/common/schedule/__init__.py +++ b/opencensus/common/schedule/__init__.py @@ -12,11 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +from six.moves import queue + import threading import time -from six.moves import queue - class PeriodicTask(threading.Thread): """Thread that periodically calls a given function. diff --git a/opencensus/common/transports/async_.py b/opencensus/common/transports/async_.py index 61f3ea020..c5fe6a395 100644 --- a/opencensus/common/transports/async_.py +++ b/opencensus/common/transports/async_.py @@ -12,13 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +from six.moves import queue, range + import atexit import logging import threading -from six.moves import queue -from six.moves import range - from opencensus.common.transports import base from opencensus.trace import execution_context diff --git a/opencensus/log/__init__.py b/opencensus/log/__init__.py index ddf84f83b..ce4ce5f57 100644 --- a/opencensus/log/__init__.py +++ b/opencensus/log/__init__.py @@ -12,13 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging from collections import namedtuple from copy import copy -import logging from opencensus.trace import execution_context - _meta_logger = logging.getLogger(__name__) TRACE_ID_KEY = 'traceId' diff --git a/opencensus/metrics/export/cumulative.py b/opencensus/metrics/export/cumulative.py index 921dd3593..5c2a45aeb 100644 --- a/opencensus/metrics/export/cumulative.py +++ b/opencensus/metrics/export/cumulative.py @@ -14,8 +14,7 @@ import six -from opencensus.metrics.export import metric_descriptor -from opencensus.metrics.export import gauge +from opencensus.metrics.export import gauge, metric_descriptor class CumulativePointLong(gauge.GaugePointLong): diff --git a/opencensus/metrics/export/gauge.py b/opencensus/metrics/export/gauge.py index ff5520809..10b35419e 100644 --- a/opencensus/metrics/export/gauge.py +++ b/opencensus/metrics/export/gauge.py @@ -12,15 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections import OrderedDict -from datetime import datetime import six + import threading +from collections import OrderedDict +from datetime import datetime from opencensus.common import utils -from opencensus.metrics.export import metric -from opencensus.metrics.export import metric_descriptor -from opencensus.metrics.export import metric_producer +from opencensus.metrics.export import ( + metric, + metric_descriptor, + metric_producer, +) from opencensus.metrics.export import point as point_module from opencensus.metrics.export import time_series from opencensus.metrics.export import value as value_module diff --git a/opencensus/metrics/transport.py b/opencensus/metrics/transport.py index 02579b279..77728246a 100644 --- a/opencensus/metrics/transport.py +++ b/opencensus/metrics/transport.py @@ -19,7 +19,6 @@ from opencensus.common.schedule import PeriodicTask from opencensus.trace import execution_context - logger = logging.getLogger(__name__) DEFAULT_INTERVAL = 60 diff --git a/opencensus/stats/aggregation.py b/opencensus/stats/aggregation.py index d6a6fd4d8..acaa564f7 100644 --- a/opencensus/stats/aggregation.py +++ b/opencensus/stats/aggregation.py @@ -14,10 +14,9 @@ import logging +from opencensus.metrics.export.metric_descriptor import MetricDescriptorType from opencensus.stats import aggregation_data from opencensus.stats import measure as measure_module -from opencensus.metrics.export.metric_descriptor import MetricDescriptorType - logger = logging.getLogger(__name__) diff --git a/opencensus/stats/aggregation_data.py b/opencensus/stats/aggregation_data.py index 2f54fae33..7fa3f9ac5 100644 --- a/opencensus/stats/aggregation_data.py +++ b/opencensus/stats/aggregation_data.py @@ -15,11 +15,9 @@ import copy import logging -from opencensus.metrics.export import point -from opencensus.metrics.export import value +from opencensus.metrics.export import point, value from opencensus.stats import bucket_boundaries - logger = logging.getLogger(__name__) diff --git a/opencensus/stats/measure_to_view_map.py b/opencensus/stats/measure_to_view_map.py index e127272b5..47863e328 100644 --- a/opencensus/stats/measure_to_view_map.py +++ b/opencensus/stats/measure_to_view_map.py @@ -12,9 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections import defaultdict import copy import logging +from collections import defaultdict from opencensus.stats import metric_utils from opencensus.stats import view_data as view_data_module diff --git a/opencensus/stats/measurement_map.py b/opencensus/stats/measurement_map.py index 0df811f37..c41ab5ce4 100644 --- a/opencensus/stats/measurement_map.py +++ b/opencensus/stats/measurement_map.py @@ -17,7 +17,6 @@ from opencensus.common import utils from opencensus.tags import TagContext - logger = logging.getLogger(__name__) diff --git a/opencensus/stats/metric_utils.py b/opencensus/stats/metric_utils.py index 912083e52..4cd8ec6a7 100644 --- a/opencensus/stats/metric_utils.py +++ b/opencensus/stats/metric_utils.py @@ -16,9 +16,7 @@ """ from opencensus.metrics import label_value -from opencensus.metrics.export import metric -from opencensus.metrics.export import metric_descriptor -from opencensus.metrics.export import time_series +from opencensus.metrics.export import metric, metric_descriptor, time_series def is_gauge(md_type): diff --git a/opencensus/stats/stats_recorder.py b/opencensus/stats/stats_recorder.py index 8ea04daf8..f74b6d3b7 100644 --- a/opencensus/stats/stats_recorder.py +++ b/opencensus/stats/stats_recorder.py @@ -12,9 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -from opencensus.stats.measurement_map import MeasurementMap -from opencensus.stats.measure_to_view_map import MeasureToViewMap from opencensus.stats import execution_context +from opencensus.stats.measure_to_view_map import MeasureToViewMap +from opencensus.stats.measurement_map import MeasurementMap class StatsRecorder(object): diff --git a/opencensus/stats/view_manager.py b/opencensus/stats/view_manager.py index 2afaea22c..3b118a11d 100644 --- a/opencensus/stats/view_manager.py +++ b/opencensus/stats/view_manager.py @@ -13,8 +13,8 @@ # limitations under the License. from opencensus.common import utils -from opencensus.stats.measure_to_view_map import MeasureToViewMap from opencensus.stats import execution_context +from opencensus.stats.measure_to_view_map import MeasureToViewMap class ViewManager(object): diff --git a/opencensus/tags/__init__.py b/opencensus/tags/__init__.py index 771d4c255..e4a064695 100644 --- a/opencensus/tags/__init__.py +++ b/opencensus/tags/__init__.py @@ -15,8 +15,8 @@ from opencensus.common.runtime_context import RuntimeContext from opencensus.tags.tag import Tag from opencensus.tags.tag_key import TagKey -from opencensus.tags.tag_value import TagValue from opencensus.tags.tag_map import TagMap +from opencensus.tags.tag_value import TagValue __all__ = ['Tag', 'TagContext', 'TagKey', 'TagValue', 'TagMap'] diff --git a/opencensus/tags/propagation/binary_serializer.py b/opencensus/tags/propagation/binary_serializer.py index 01ed717f2..49c84925a 100644 --- a/opencensus/tags/propagation/binary_serializer.py +++ b/opencensus/tags/propagation/binary_serializer.py @@ -14,9 +14,10 @@ # -*- coding: utf-8 -*- -import logging import six +import logging + from google.protobuf.internal.encoder import _VarintBytes from opencensus.tags import tag_map as tag_map_module diff --git a/opencensus/tags/tag.py b/opencensus/tags/tag.py index e5044c04d..4f78ac833 100644 --- a/opencensus/tags/tag.py +++ b/opencensus/tags/tag.py @@ -13,6 +13,7 @@ # limitations under the License. from collections import namedtuple + from opencensus.tags.tag_key import TagKey from opencensus.tags.tag_value import TagValue diff --git a/opencensus/trace/__init__.py b/opencensus/trace/__init__.py index b1bbded81..a55903b90 100644 --- a/opencensus/trace/__init__.py +++ b/opencensus/trace/__init__.py @@ -14,5 +14,4 @@ from opencensus.trace.span import Span - __all__ = ['Span'] diff --git a/opencensus/trace/exceptions_status.py b/opencensus/trace/exceptions_status.py index a57bdec60..00542d0c6 100644 --- a/opencensus/trace/exceptions_status.py +++ b/opencensus/trace/exceptions_status.py @@ -13,6 +13,7 @@ # limitations under the License. from google.rpc import code_pb2 + from opencensus.trace.status import Status CANCELLED = Status(code_pb2.CANCELLED) diff --git a/opencensus/trace/file_exporter.py b/opencensus/trace/file_exporter.py index 22eb9b738..baaeaddfc 100644 --- a/opencensus/trace/file_exporter.py +++ b/opencensus/trace/file_exporter.py @@ -17,8 +17,7 @@ import json from opencensus.common.transports import sync -from opencensus.trace import base_exporter -from opencensus.trace import span_data +from opencensus.trace import base_exporter, span_data DEFAULT_FILENAME = 'opencensus-traces.json' diff --git a/opencensus/trace/logging_exporter.py b/opencensus/trace/logging_exporter.py index f232e3512..e727f602c 100644 --- a/opencensus/trace/logging_exporter.py +++ b/opencensus/trace/logging_exporter.py @@ -17,8 +17,7 @@ import logging from opencensus.common.transports import sync -from opencensus.trace import base_exporter -from opencensus.trace import span_data +from opencensus.trace import base_exporter, span_data class LoggingExporter(base_exporter.Exporter): diff --git a/opencensus/trace/propagation/b3_format.py b/opencensus/trace/propagation/b3_format.py index cd175efa5..b60dcd8a1 100644 --- a/opencensus/trace/propagation/b3_format.py +++ b/opencensus/trace/propagation/b3_format.py @@ -13,7 +13,7 @@ # limitations under the License. -from opencensus.trace.span_context import SpanContext, INVALID_SPAN_ID +from opencensus.trace.span_context import INVALID_SPAN_ID, SpanContext from opencensus.trace.trace_options import TraceOptions _STATE_HEADER_KEY = 'b3' diff --git a/opencensus/trace/propagation/trace_context_http_header_format.py b/opencensus/trace/propagation/trace_context_http_header_format.py index 4b7afa100..095935a2e 100644 --- a/opencensus/trace/propagation/trace_context_http_header_format.py +++ b/opencensus/trace/propagation/trace_context_http_header_format.py @@ -14,10 +14,11 @@ import re +from opencensus.trace.propagation.tracestate_string_format import ( + TracestateStringFormatter, +) from opencensus.trace.span_context import SpanContext from opencensus.trace.trace_options import TraceOptions -from opencensus.trace.propagation.tracestate_string_format \ - import TracestateStringFormatter _TRACEPARENT_HEADER_NAME = 'traceparent' _TRACESTATE_HEADER_NAME = 'tracestate' diff --git a/opencensus/trace/propagation/tracestate_string_format.py b/opencensus/trace/propagation/tracestate_string_format.py index da2882880..9a7af86a5 100644 --- a/opencensus/trace/propagation/tracestate_string_format.py +++ b/opencensus/trace/propagation/tracestate_string_format.py @@ -13,9 +13,8 @@ # limitations under the License. import re -from opencensus.trace.tracestate import Tracestate -from opencensus.trace.tracestate import _KEY_FORMAT -from opencensus.trace.tracestate import _VALUE_FORMAT + +from opencensus.trace.tracestate import _KEY_FORMAT, _VALUE_FORMAT, Tracestate _DELIMITER_FORMAT = '[ \t]*,[ \t]*' _MEMBER_FORMAT = '(%s)(=)(%s)' % (_KEY_FORMAT, _VALUE_FORMAT) diff --git a/opencensus/trace/span.py b/opencensus/trace/span.py index cb8e42dae..04aa376e7 100644 --- a/opencensus/trace/span.py +++ b/opencensus/trace/span.py @@ -19,11 +19,10 @@ from collections import MutableMapping from collections import Sequence -from collections import OrderedDict -from collections import deque +import threading +from collections import OrderedDict, deque from datetime import datetime from itertools import chain -import threading from opencensus.common import utils from opencensus.trace import attributes as attributes_module @@ -35,7 +34,6 @@ from opencensus.trace.span_context import generate_span_id from opencensus.trace.tracers import base - # https://github.com/census-instrumentation/opencensus-specs/blob/master/trace/TraceConfig.md # noqa MAX_NUM_ATTRIBUTES = 32 MAX_NUM_ANNOTATIONS = 32 diff --git a/opencensus/trace/span_context.py b/opencensus/trace/span_context.py index 3bd501fd9..24fc216f1 100644 --- a/opencensus/trace/span_context.py +++ b/opencensus/trace/span_context.py @@ -14,10 +14,11 @@ """SpanContext encapsulates the current context within the request's trace.""" -import logging -import re import six + +import logging import random +import re from opencensus.trace import trace_options as trace_options_module diff --git a/opencensus/trace/tracer.py b/opencensus/trace/tracer.py index 3981b3db7..71b7a4646 100644 --- a/opencensus/trace/tracer.py +++ b/opencensus/trace/tracer.py @@ -12,13 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from opencensus.trace import execution_context -from opencensus.trace import print_exporter -from opencensus.trace import samplers +from opencensus.trace import execution_context, print_exporter, samplers from opencensus.trace.propagation import trace_context_http_header_format from opencensus.trace.span_context import SpanContext -from opencensus.trace.tracers import context_tracer -from opencensus.trace.tracers import noop_tracer +from opencensus.trace.tracers import context_tracer, noop_tracer class Tracer(object): diff --git a/opencensus/trace/tracers/context_tracer.py b/opencensus/trace/tracers/context_tracer.py index 331a26661..4a6153a92 100644 --- a/opencensus/trace/tracers/context_tracer.py +++ b/opencensus/trace/tracers/context_tracer.py @@ -15,11 +15,10 @@ import logging import threading -from opencensus.trace import execution_context -from opencensus.trace.span_context import SpanContext -from opencensus.trace import print_exporter +from opencensus.trace import execution_context, print_exporter from opencensus.trace import span as trace_span from opencensus.trace import span_data as span_data_module +from opencensus.trace.span_context import SpanContext from opencensus.trace.tracers import base diff --git a/opencensus/trace/tracers/noop_tracer.py b/opencensus/trace/tracers/noop_tracer.py index 2a4b326ff..b61fab53c 100644 --- a/opencensus/trace/tracers/noop_tracer.py +++ b/opencensus/trace/tracers/noop_tracer.py @@ -12,10 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from opencensus.trace.tracers import base from opencensus.trace import blank_span as trace_span -from opencensus.trace.span_context import SpanContext from opencensus.trace import trace_options +from opencensus.trace.span_context import SpanContext +from opencensus.trace.tracers import base class NoopTracer(base.Tracer): diff --git a/opencensus/trace/tracestate.py b/opencensus/trace/tracestate.py index 8e328140e..f9110eec9 100644 --- a/opencensus/trace/tracestate.py +++ b/opencensus/trace/tracestate.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections import OrderedDict import re +from collections import OrderedDict _KEY_WITHOUT_VENDOR_FORMAT = r'[a-z][_0-9a-z\-\*\/]{0,255}' _KEY_WITH_VENDOR_FORMAT = \ diff --git a/opencensus/trace/utils.py b/opencensus/trace/utils.py index 93add22d5..7aabf73ab 100644 --- a/opencensus/trace/utils.py +++ b/opencensus/trace/utils.py @@ -15,6 +15,7 @@ import re from google.rpc import code_pb2 + from opencensus.trace import execution_context from opencensus.trace.status import Status diff --git a/setup.py b/setup.py index 0b9761871..c060e343d 100644 --- a/setup.py +++ b/setup.py @@ -13,8 +13,7 @@ # limitations under the License. """A setup module for OpenCensus Instrumentation Library""" -from setuptools import find_packages -from setuptools import setup +from setuptools import find_packages, setup exec(open('opencensus/common/version/__init__.py').read()) diff --git a/tests/system/stats/stackdriver/stackdriver_stats_test.py b/tests/system/stats/stackdriver/stackdriver_stats_test.py index a18291b37..783a2d252 100644 --- a/tests/system/stats/stackdriver/stackdriver_stats_test.py +++ b/tests/system/stats/stackdriver/stackdriver_stats_test.py @@ -17,8 +17,8 @@ import sys import time -from google.cloud import monitoring_v3 import mock +from google.cloud import monitoring_v3 from opencensus.ext.stackdriver import stats_exporter as stackdriver from opencensus.metrics import transport diff --git a/tests/system/trace/django/app/urls.py b/tests/system/trace/django/app/urls.py index 9eda96a51..e9e014cf8 100644 --- a/tests/system/trace/django/app/urls.py +++ b/tests/system/trace/django/app/urls.py @@ -32,7 +32,6 @@ import app.views - urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^$', app.views.home), diff --git a/tests/system/trace/django/app/views.py b/tests/system/trace/django/app/views.py index 39873e338..cf1fad4a3 100644 --- a/tests/system/trace/django/app/views.py +++ b/tests/system/trace/django/app/views.py @@ -12,18 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. -from django.http import HttpResponse -from django.shortcuts import render - -from .forms import HelloForm - -from opencensus.trace import config_integration +import os import mysql.connector import psycopg2 import sqlalchemy +from django.http import HttpResponse +from django.shortcuts import render -import os +from opencensus.trace import config_integration + +from .forms import HelloForm DB_HOST = 'localhost' diff --git a/tests/system/trace/django/django_system_test.py b/tests/system/trace/django/django_system_test.py index 4dba05db4..c8a101683 100644 --- a/tests/system/trace/django/django_system_test.py +++ b/tests/system/trace/django/django_system_test.py @@ -13,15 +13,14 @@ # limitations under the License. import os -import requests import signal import subprocess +import unittest import uuid +import requests from retrying import retry -import unittest - PROJECT = os.environ.get('GCLOUD_PROJECT_PYTHON') HOST_PORT = 'localhost:8000' diff --git a/tests/system/trace/flask/flask_system_test.py b/tests/system/trace/flask/flask_system_test.py index e89f66aa5..7cf97ebb9 100644 --- a/tests/system/trace/flask/flask_system_test.py +++ b/tests/system/trace/flask/flask_system_test.py @@ -13,15 +13,14 @@ # limitations under the License. import os -import requests import signal import subprocess +import unittest import uuid +import requests from retrying import retry -import unittest - PROJECT = os.environ.get('GCLOUD_PROJECT_PYTHON') HOST_PORT = 'localhost:8080' diff --git a/tests/unit/common/monitored_resource_util/test_aws_identity_doc_utils.py b/tests/unit/common/monitored_resource_util/test_aws_identity_doc_utils.py index 73f031f45..5658b862e 100644 --- a/tests/unit/common/monitored_resource_util/test_aws_identity_doc_utils.py +++ b/tests/unit/common/monitored_resource_util/test_aws_identity_doc_utils.py @@ -13,9 +13,10 @@ # limitations under the License. import json -import mock import unittest +import mock + from opencensus.common.monitored_resource import aws_identity_doc_utils diff --git a/tests/unit/common/monitored_resource_util/test_gcp_metadata_config.py b/tests/unit/common/monitored_resource_util/test_gcp_metadata_config.py index 10c97776f..d636bcf77 100644 --- a/tests/unit/common/monitored_resource_util/test_gcp_metadata_config.py +++ b/tests/unit/common/monitored_resource_util/test_gcp_metadata_config.py @@ -12,10 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -import mock import os import unittest +import mock + from opencensus.common.monitored_resource import gcp_metadata_config diff --git a/tests/unit/common/monitored_resource_util/test_monitored_resource.py b/tests/unit/common/monitored_resource_util/test_monitored_resource.py index 4cbe537d2..38a1fe4b5 100644 --- a/tests/unit/common/monitored_resource_util/test_monitored_resource.py +++ b/tests/unit/common/monitored_resource_util/test_monitored_resource.py @@ -12,9 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -from contextlib import contextmanager import os import sys +from contextlib import contextmanager import mock diff --git a/tests/unit/common/test_http_handler.py b/tests/unit/common/test_http_handler.py index 83f86b66f..cfabbc066 100644 --- a/tests/unit/common/test_http_handler.py +++ b/tests/unit/common/test_http_handler.py @@ -12,10 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest +import json import socket +import unittest + import mock -import json from opencensus.common.http_handler import get_request diff --git a/tests/unit/common/test_schedule.py b/tests/unit/common/test_schedule.py index a6d69b075..4fd85be0a 100644 --- a/tests/unit/common/test_schedule.py +++ b/tests/unit/common/test_schedule.py @@ -14,9 +14,7 @@ import unittest -from opencensus.common.schedule import PeriodicTask -from opencensus.common.schedule import Queue -from opencensus.common.schedule import QueueEvent +from opencensus.common.schedule import PeriodicTask, Queue, QueueEvent TIMEOUT = .1 diff --git a/tests/unit/common/test_utils.py b/tests/unit/common/test_utils.py index 42fc9c57f..49419afa9 100644 --- a/tests/unit/common/test_utils.py +++ b/tests/unit/common/test_utils.py @@ -26,10 +26,11 @@ from opencensus.common.backports import WeakMethod import gc -import mock import unittest import weakref +import mock + from opencensus.common import utils diff --git a/tests/unit/common/transports/test_sync.py b/tests/unit/common/transports/test_sync.py index 71cce563b..c812b025e 100644 --- a/tests/unit/common/transports/test_sync.py +++ b/tests/unit/common/transports/test_sync.py @@ -13,7 +13,9 @@ # limitations under the License. import unittest + import mock + from opencensus.common.transports import sync diff --git a/tests/unit/log/test_log.py b/tests/unit/log/test_log.py index b1cde14eb..528d9e7bd 100644 --- a/tests/unit/log/test_log.py +++ b/tests/unit/log/test_log.py @@ -12,9 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -from contextlib import contextmanager import logging import sys +from contextlib import contextmanager import mock diff --git a/tests/unit/metrics/export/test_cumulative.py b/tests/unit/metrics/export/test_cumulative.py index a11b67c45..0a7dc4ebb 100644 --- a/tests/unit/metrics/export/test_cumulative.py +++ b/tests/unit/metrics/export/test_cumulative.py @@ -16,9 +16,7 @@ from mock import Mock -from opencensus.metrics.export import cumulative -from opencensus.metrics.export import gauge -from opencensus.metrics.export import metric_descriptor +from opencensus.metrics.export import cumulative, gauge, metric_descriptor from opencensus.metrics.export import value as value_module diff --git a/tests/unit/metrics/export/test_gauge.py b/tests/unit/metrics/export/test_gauge.py index 2fa1a6afe..59fafd5ef 100644 --- a/tests/unit/metrics/export/test_gauge.py +++ b/tests/unit/metrics/export/test_gauge.py @@ -17,8 +17,7 @@ from mock import Mock -from opencensus.metrics.export import gauge -from opencensus.metrics.export import metric_descriptor +from opencensus.metrics.export import gauge, metric_descriptor from opencensus.metrics.export import value as value_module diff --git a/tests/unit/metrics/export/test_metric.py b/tests/unit/metrics/export/test_metric.py index 610f08a22..730ff1792 100644 --- a/tests/unit/metrics/export/test_metric.py +++ b/tests/unit/metrics/export/test_metric.py @@ -19,9 +19,7 @@ import unittest -from opencensus.metrics.export import metric -from opencensus.metrics.export import metric_descriptor -from opencensus.metrics.export import time_series +from opencensus.metrics.export import metric, metric_descriptor, time_series class TestMetric(unittest.TestCase): diff --git a/tests/unit/metrics/export/test_metric_descriptor.py b/tests/unit/metrics/export/test_metric_descriptor.py index 2a5f7b80c..ff79a3d3c 100644 --- a/tests/unit/metrics/export/test_metric_descriptor.py +++ b/tests/unit/metrics/export/test_metric_descriptor.py @@ -17,8 +17,7 @@ import unittest from opencensus.metrics import label_key -from opencensus.metrics.export import metric_descriptor -from opencensus.metrics.export import value +from opencensus.metrics.export import metric_descriptor, value NAME = 'metric' DESCRIPTION = 'Metric description' diff --git a/tests/unit/metrics/export/test_point.py b/tests/unit/metrics/export/test_point.py index 637c02201..76a937d4f 100644 --- a/tests/unit/metrics/export/test_point.py +++ b/tests/unit/metrics/export/test_point.py @@ -13,6 +13,7 @@ # limitations under the License. import unittest + from opencensus.metrics.export import point as point_module from opencensus.metrics.export import summary as summary_module from opencensus.metrics.export import value as value_module diff --git a/tests/unit/metrics/export/test_summary.py b/tests/unit/metrics/export/test_summary.py index cd7984c58..82c2bdedc 100644 --- a/tests/unit/metrics/export/test_summary.py +++ b/tests/unit/metrics/export/test_summary.py @@ -13,6 +13,7 @@ # limitations under the License. from six import assertRaisesRegex + import unittest from opencensus.metrics.export import summary as summary_module diff --git a/tests/unit/metrics/export/test_time_series.py b/tests/unit/metrics/export/test_time_series.py index b3948361a..57c0ef4aa 100644 --- a/tests/unit/metrics/export/test_time_series.py +++ b/tests/unit/metrics/export/test_time_series.py @@ -17,9 +17,7 @@ import unittest from opencensus.metrics import label_value -from opencensus.metrics.export import point -from opencensus.metrics.export import time_series -from opencensus.metrics.export import value +from opencensus.metrics.export import point, time_series, value START_TIMESTAMP = '2018-10-09T22:33:44.012345Z' LABEL_VALUE1 = label_value.LabelValue('value one') diff --git a/tests/unit/metrics/test_label_key.py b/tests/unit/metrics/test_label_key.py index 5a685e2e2..80c63cace 100644 --- a/tests/unit/metrics/test_label_key.py +++ b/tests/unit/metrics/test_label_key.py @@ -15,6 +15,7 @@ # limitations under the License. import unittest + from opencensus.metrics import label_key as label_key_module diff --git a/tests/unit/metrics/test_label_value.py b/tests/unit/metrics/test_label_value.py index 7af9f2d19..8fe463cd2 100644 --- a/tests/unit/metrics/test_label_value.py +++ b/tests/unit/metrics/test_label_value.py @@ -15,6 +15,7 @@ # limitations under the License. import unittest + from opencensus.metrics import label_value as label_value_module diff --git a/tests/unit/stats/test_aggregation.py b/tests/unit/stats/test_aggregation.py index ca03ced14..671c1910d 100644 --- a/tests/unit/stats/test_aggregation.py +++ b/tests/unit/stats/test_aggregation.py @@ -12,12 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -import mock import unittest +import mock + +from opencensus.metrics.export import value from opencensus.stats import aggregation as aggregation_module from opencensus.stats import measure as measure_module -from opencensus.metrics.export import value class TestSumAggregation(unittest.TestCase): diff --git a/tests/unit/stats/test_aggregation_data.py b/tests/unit/stats/test_aggregation_data.py index ceb5f0e4a..d1870f2c9 100644 --- a/tests/unit/stats/test_aggregation_data.py +++ b/tests/unit/stats/test_aggregation_data.py @@ -12,9 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -from datetime import datetime import time import unittest +from datetime import datetime import mock diff --git a/tests/unit/stats/test_base_stats.py b/tests/unit/stats/test_base_stats.py index 78723f36b..1d403c7dc 100644 --- a/tests/unit/stats/test_base_stats.py +++ b/tests/unit/stats/test_base_stats.py @@ -13,7 +13,9 @@ # limitations under the License. import unittest + import mock + from opencensus.stats import base_exporter diff --git a/tests/unit/stats/test_measure_to_view_map.py b/tests/unit/stats/test_measure_to_view_map.py index 4beb0cb98..a754baf04 100644 --- a/tests/unit/stats/test_measure_to_view_map.py +++ b/tests/unit/stats/test_measure_to_view_map.py @@ -18,13 +18,11 @@ from opencensus.stats import measure_to_view_map as measure_to_view_map_module from opencensus.stats.aggregation import CountAggregation -from opencensus.stats.measure import BaseMeasure -from opencensus.stats.measure import MeasureInt +from opencensus.stats.measure import BaseMeasure, MeasureInt from opencensus.stats.view import View from opencensus.stats.view_data import ViewData from opencensus.tags import tag_key as tag_key_module - METHOD_KEY = tag_key_module.TagKey("method") REQUEST_COUNT_MEASURE = MeasureInt( "request_count", "number of requests", "1") diff --git a/tests/unit/stats/test_measurement.py b/tests/unit/stats/test_measurement.py index 431c96863..3780a8657 100644 --- a/tests/unit/stats/test_measurement.py +++ b/tests/unit/stats/test_measurement.py @@ -12,10 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from opencensus.stats import measurement as measurement_module - import unittest +from opencensus.stats import measurement as measurement_module + class TestMeasurement(unittest.TestCase): def test_constructor(self): diff --git a/tests/unit/stats/test_measurement_map.py b/tests/unit/stats/test_measurement_map.py index ee103771f..89add276b 100644 --- a/tests/unit/stats/test_measurement_map.py +++ b/tests/unit/stats/test_measurement_map.py @@ -12,13 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -import mock import unittest +import mock + from opencensus.stats import measurement_map as measurement_map_module -from opencensus.tags import Tag -from opencensus.tags import TagContext -from opencensus.tags import TagMap +from opencensus.tags import Tag, TagContext, TagMap logger_patch = mock.patch('opencensus.stats.measurement_map.logger') diff --git a/tests/unit/stats/test_metric_utils.py b/tests/unit/stats/test_metric_utils.py index 2ec3e41a0..355f0da2e 100644 --- a/tests/unit/stats/test_metric_utils.py +++ b/tests/unit/stats/test_metric_utils.py @@ -17,17 +17,16 @@ import mock -from opencensus.metrics.export import metric_descriptor -from opencensus.metrics.export import point -from opencensus.metrics.export import value -from opencensus.stats import aggregation -from opencensus.stats import aggregation_data -from opencensus.stats import measure -from opencensus.stats import metric_utils -from opencensus.stats import view -from opencensus.stats import view_data -from opencensus.tags import tag_key -from opencensus.tags import tag_value +from opencensus.metrics.export import metric_descriptor, point, value +from opencensus.stats import ( + aggregation, + aggregation_data, + measure, + metric_utils, + view, + view_data, +) +from opencensus.tags import tag_key, tag_value class TestMetricUtils(unittest.TestCase): diff --git a/tests/unit/stats/test_stats.py b/tests/unit/stats/test_stats.py index e3d3f378a..e120ac499 100644 --- a/tests/unit/stats/test_stats.py +++ b/tests/unit/stats/test_stats.py @@ -19,10 +19,8 @@ import unittest -from opencensus.metrics.export import metric_descriptor -from opencensus.metrics.export import value -from opencensus.stats import aggregation -from opencensus.stats import measure +from opencensus.metrics.export import metric_descriptor, value +from opencensus.stats import aggregation, measure from opencensus.stats import stats as stats_module from opencensus.stats import view from opencensus.tags import tag_map diff --git a/tests/unit/stats/test_stats_recorder.py b/tests/unit/stats/test_stats_recorder.py index e23b1ae82..ce22597e9 100644 --- a/tests/unit/stats/test_stats_recorder.py +++ b/tests/unit/stats/test_stats_recorder.py @@ -13,10 +13,12 @@ # limitations under the License. import unittest + import mock + +from opencensus.stats import execution_context from opencensus.stats import stats_recorder as stats_recorder_module from opencensus.stats.measurement_map import MeasurementMap -from opencensus.stats import execution_context class TestStatsRecorder(unittest.TestCase): diff --git a/tests/unit/stats/test_view.py b/tests/unit/stats/test_view.py index 408580e44..30dca3cdc 100644 --- a/tests/unit/stats/test_view.py +++ b/tests/unit/stats/test_view.py @@ -13,12 +13,11 @@ # limitations under the License. import unittest + import mock from opencensus.metrics.export import metric_descriptor -from opencensus.stats import aggregation -from opencensus.stats import measure -from opencensus.stats import view +from opencensus.stats import aggregation, measure from opencensus.stats import view as view_module @@ -48,8 +47,8 @@ def test_view_to_metric_descriptor(self): mock_agg = mock.Mock(spec=aggregation.SumAggregation) mock_agg.get_metric_type.return_value = \ metric_descriptor.MetricDescriptorType.CUMULATIVE_DOUBLE - test_view = view.View("name", "description", ["tk1", "tk2"], - mock_measure, mock_agg) + test_view = view_module.View("name", "description", ["tk1", "tk2"], + mock_measure, mock_agg) self.assertIsNone(test_view._metric_descriptor) md = test_view.get_metric_descriptor() diff --git a/tests/unit/stats/test_view_data.py b/tests/unit/stats/test_view_data.py index 6454c768b..87791802a 100644 --- a/tests/unit/stats/test_view_data.py +++ b/tests/unit/stats/test_view_data.py @@ -12,15 +12,16 @@ # See the License for the specific language governing permissions and # limitations under the License. +import unittest from datetime import datetime + import mock -import unittest from opencensus.common import utils from opencensus.stats import aggregation as aggregation_module from opencensus.stats import measure as measure_module -from opencensus.stats import view_data as view_data_module from opencensus.stats import view as view_module +from opencensus.stats import view_data as view_data_module class TestViewData(unittest.TestCase): diff --git a/tests/unit/stats/test_view_manager.py b/tests/unit/stats/test_view_manager.py index ded2ecc74..b6dea72bc 100644 --- a/tests/unit/stats/test_view_manager.py +++ b/tests/unit/stats/test_view_manager.py @@ -13,9 +13,11 @@ # limitations under the License. import unittest + import mock -from opencensus.stats import view_manager as view_manager_module + from opencensus.stats import execution_context +from opencensus.stats import view_manager as view_manager_module from opencensus.stats.measure_to_view_map import MeasureToViewMap diff --git a/tests/unit/tags/test_tag.py b/tests/unit/tags/test_tag.py index 21c5d0eb3..6deac8fc5 100644 --- a/tests/unit/tags/test_tag.py +++ b/tests/unit/tags/test_tag.py @@ -13,6 +13,7 @@ # limitations under the License. import unittest + from opencensus.tags import Tag diff --git a/tests/unit/tags/test_tag_value.py b/tests/unit/tags/test_tag_value.py index f5aa0b771..dfd10d565 100644 --- a/tests/unit/tags/test_tag_value.py +++ b/tests/unit/tags/test_tag_value.py @@ -15,6 +15,7 @@ # limitations under the License. import unittest + from opencensus.tags import TagValue diff --git a/tests/unit/trace/exporters/test_logging_exporter.py b/tests/unit/trace/exporters/test_logging_exporter.py index 36ca52795..4dd45e938 100644 --- a/tests/unit/trace/exporters/test_logging_exporter.py +++ b/tests/unit/trace/exporters/test_logging_exporter.py @@ -17,8 +17,7 @@ import mock -from opencensus.trace import logging_exporter -from opencensus.trace import span_context +from opencensus.trace import logging_exporter, span_context from opencensus.trace import span_data as span_data_module diff --git a/tests/unit/trace/propagation/test_b3_format.py b/tests/unit/trace/propagation/test_b3_format.py index 75ce604ed..68feb20f0 100644 --- a/tests/unit/trace/propagation/test_b3_format.py +++ b/tests/unit/trace/propagation/test_b3_format.py @@ -13,10 +13,11 @@ # limitations under the License. import unittest + import mock -from opencensus.trace.span_context import INVALID_SPAN_ID from opencensus.trace.propagation import b3_format +from opencensus.trace.span_context import INVALID_SPAN_ID class TestB3FormatPropagator(unittest.TestCase): diff --git a/tests/unit/trace/test_base_span.py b/tests/unit/trace/test_base_span.py index f6477e8a0..d27bc6826 100644 --- a/tests/unit/trace/test_base_span.py +++ b/tests/unit/trace/test_base_span.py @@ -13,7 +13,9 @@ # limitations under the License. import unittest + import mock + from opencensus.trace.base_span import BaseSpan diff --git a/tests/unit/trace/test_blank_span.py b/tests/unit/trace/test_blank_span.py index c0bf333c7..d9d1e1c15 100644 --- a/tests/unit/trace/test_blank_span.py +++ b/tests/unit/trace/test_blank_span.py @@ -13,13 +13,14 @@ # limitations under the License. import datetime -import mock import unittest +import mock + from opencensus.common import utils from opencensus.trace.link import Link -from opencensus.trace.status import Status from opencensus.trace.span import format_span_json +from opencensus.trace.status import Status from opencensus.trace.time_event import MessageEvent diff --git a/tests/unit/trace/test_exceptions_status.py b/tests/unit/trace/test_exceptions_status.py index 1b3f7e963..2d17349db 100644 --- a/tests/unit/trace/test_exceptions_status.py +++ b/tests/unit/trace/test_exceptions_status.py @@ -15,6 +15,7 @@ import unittest from google.rpc import code_pb2 + from opencensus.trace import exceptions_status diff --git a/tests/unit/trace/test_execution_context.py b/tests/unit/trace/test_execution_context.py index 5f884b9fc..9df5e7d1f 100644 --- a/tests/unit/trace/test_execution_context.py +++ b/tests/unit/trace/test_execution_context.py @@ -12,9 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +import threading import unittest + import mock -import threading from opencensus.trace import execution_context diff --git a/tests/unit/trace/test_ext_utils.py b/tests/unit/trace/test_ext_utils.py index a7d946e49..0816b2d7e 100644 --- a/tests/unit/trace/test_ext_utils.py +++ b/tests/unit/trace/test_ext_utils.py @@ -15,8 +15,8 @@ import unittest import mock - from google.rpc import code_pb2 + from opencensus.trace import utils diff --git a/tests/unit/trace/test_span.py b/tests/unit/trace/test_span.py index f7ddf5d7b..75637ad88 100644 --- a/tests/unit/trace/test_span.py +++ b/tests/unit/trace/test_span.py @@ -12,21 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections import OrderedDict import datetime import unittest +from collections import OrderedDict import mock - from google.rpc import code_pb2 from opencensus.common import utils -from opencensus.trace.span import BoundedDict -from opencensus.trace.span import BoundedList +from opencensus.trace.span import BoundedDict, BoundedList from opencensus.trace.stack_trace import StackTrace from opencensus.trace.status import Status -from opencensus.trace.time_event import Annotation -from opencensus.trace.time_event import MessageEvent +from opencensus.trace.time_event import Annotation, MessageEvent class TestSpan(unittest.TestCase): diff --git a/tests/unit/trace/test_span_context.py b/tests/unit/trace/test_span_context.py index 13602e3f0..d3d1a279b 100644 --- a/tests/unit/trace/test_span_context.py +++ b/tests/unit/trace/test_span_context.py @@ -13,6 +13,7 @@ # limitations under the License. import unittest + from opencensus.trace import span_context as span_context_module from opencensus.trace.trace_options import TraceOptions from opencensus.trace.tracestate import Tracestate diff --git a/tests/unit/trace/test_span_data.py b/tests/unit/trace/test_span_data.py index eb8109ef6..60b23ba9b 100644 --- a/tests/unit/trace/test_span_data.py +++ b/tests/unit/trace/test_span_data.py @@ -16,12 +16,9 @@ import unittest from opencensus.common import utils -from opencensus.trace import link -from opencensus.trace import span_context +from opencensus.trace import link, span_context from opencensus.trace import span_data as span_data_module -from opencensus.trace import stack_trace -from opencensus.trace import status -from opencensus.trace import time_event +from opencensus.trace import stack_trace, status, time_event class TestSpanData(unittest.TestCase): diff --git a/tests/unit/trace/test_time_event.py b/tests/unit/trace/test_time_event.py index a1e753181..063dfb4ad 100644 --- a/tests/unit/trace/test_time_event.py +++ b/tests/unit/trace/test_time_event.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from datetime import datetime import unittest +from datetime import datetime import mock diff --git a/tests/unit/trace/test_tracer.py b/tests/unit/trace/test_tracer.py index a4dbc9f0d..82b9a16a3 100644 --- a/tests/unit/trace/test_tracer.py +++ b/tests/unit/trace/test_tracer.py @@ -16,8 +16,7 @@ import mock -from opencensus.trace import samplers -from opencensus.trace import span_data +from opencensus.trace import samplers, span_data from opencensus.trace import tracer as tracer_module diff --git a/tests/unit/trace/test_tracestate.py b/tests/unit/trace/test_tracestate.py index 2aeac8f64..170f248ef 100644 --- a/tests/unit/trace/test_tracestate.py +++ b/tests/unit/trace/test_tracestate.py @@ -14,9 +14,10 @@ import unittest +from opencensus.trace.propagation.tracestate_string_format import ( + TracestateStringFormatter, +) from opencensus.trace.tracestate import Tracestate -from opencensus.trace.propagation.tracestate_string_format \ - import TracestateStringFormatter formatter = TracestateStringFormatter() diff --git a/tests/unit/trace/tracers/test_context_tracer.py b/tests/unit/trace/tracers/test_context_tracer.py index f05d048ae..1898fb37f 100644 --- a/tests/unit/trace/tracers/test_context_tracer.py +++ b/tests/unit/trace/tracers/test_context_tracer.py @@ -16,9 +16,8 @@ import mock +from opencensus.trace import execution_context, span from opencensus.trace.tracers import context_tracer -from opencensus.trace import span -from opencensus.trace import execution_context class TestContextTracer(unittest.TestCase): diff --git a/tox.ini b/tox.ini index 74a05863f..d021bf2c4 100644 --- a/tox.ini +++ b/tox.ini @@ -41,9 +41,11 @@ deps = py37-setup: pygments py37-docs: setuptools >= 36.4.0 py37-docs: sphinx >= 1.6.3 + isort ~= 4.3.21 commands = py{27,34,35,36,37}-unit: py.test --quiet --cov={envdir}/opencensus --cov=context --cov=contrib --cov-report term-missing --cov-config=.coveragerc --cov-fail-under=97 tests/unit/ context/ contrib/ + isort --check-only --diff --recursive . ; TODO: System tests py37-lint: flake8 context/ contrib/ opencensus/ tests/ examples/ py37-lint: - bash ./scripts/pylint.sh From c904f4866d160f6f0c42aae63a44bf13e0429ee7 Mon Sep 17 00:00:00 2001 From: Artur Tanona Date: Tue, 8 Oct 2019 18:10:30 +0200 Subject: [PATCH 23/79] Typo (#800) --- contrib/opencensus-ext-azure/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contrib/opencensus-ext-azure/README.rst b/contrib/opencensus-ext-azure/README.rst index 1d844e153..dcf205bb6 100644 --- a/contrib/opencensus-ext-azure/README.rst +++ b/contrib/opencensus-ext-azure/README.rst @@ -71,7 +71,7 @@ You can enrich the logs with trace IDs and span IDs by using the `logging integr logger.warning('Before the span') with tracer.span(name='test'): logger.warning('In the span') - logger.warning('After the span')s + logger.warning('After the span') Metrics ~~~~~~~ From d7d31eba33f539c2367ca6389d995801c9c9c4ab Mon Sep 17 00:00:00 2001 From: Nathan Button Date: Mon, 14 Oct 2019 09:08:50 -0700 Subject: [PATCH 24/79] Create README.rst for the Datadog trace exporter (#799) --- contrib/opencensus-ext-datadog/README.rst | 80 +++++++++++++++++++ .../examples/datadog.py | 2 +- 2 files changed, 81 insertions(+), 1 deletion(-) create mode 100644 contrib/opencensus-ext-datadog/README.rst diff --git a/contrib/opencensus-ext-datadog/README.rst b/contrib/opencensus-ext-datadog/README.rst new file mode 100644 index 000000000..1fdcf158e --- /dev/null +++ b/contrib/opencensus-ext-datadog/README.rst @@ -0,0 +1,80 @@ +OpenCensus Datadog Exporter +============================================================================ + +|pypi| + +.. |pypi| image:: https://badge.fury.io/py/opencensus-ext-datadog.svg + :target: https://pypi.org/project/opencensus-ext-datadog/ + +Installation +------------ + +:: + + pip install opencensus-ext-datadog + +Usage +----- + +Trace +~~~~~ + +The **Datadog Trace Exporter** allows you to export `OpenCensus`_ traces to `Datadog`_. + +This example shows how to send a span "hello" to Datadog. + +* Set up a `Datadog Agent `_ that is accessible to your app. +* Place the URL for the agent in the `trace_addr` of the configuration options. + + .. code:: python + + from opencensus.ext.datadog.traces import DatadogTraceExporter, Options + from opencensus.trace.samplers import ProbabilitySampler + from opencensus.trace.tracer import Tracer + + tracer = Tracer( + exporter=DatadogTraceExporter(Options(service='app-name',trace_addr='my-datdog-agent:8126`)), + sampler=ProbabilitySampler(1.0) + ) + + with tracer.span(name='hello'): + print('Hello, World!') + +OpenCensus also supports several `integrations `_ which allows OpenCensus to integrate with third party libraries. + +This example shows how to integrate with the `requests `_ library. + +* Set up a `Datadog Agent `_ that is accessible to your app. +* Place the URL for the agent in the `trace_addr` of the configuration options. + +.. code:: python + + import requests + + from opencensus.ext.datadog.traces import DatadogTraceExporter, Options + from opencensus.trace.samplers import ProbabilitySampler + from opencensus.trace.tracer import Tracer + + config_integration.trace_integrations(['requests']) + tracer = Tracer( + exporter=DatadogTraceExporter( + Options( + service='app-name', + trace_addr='my-datdog-agent:8126` + ) + ), + sampler=ProbabilitySampler(1.0), + ) + with tracer.span(name='parent'): + response = requests.get(url='https://www.wikipedia.org/wiki/Rabbit') + + +References +---------- + +* `Datadog `_ +* `Examples `_ +* `OpenCensus Project `_ + +.. _Datadog: https://www.datadoghq.com/product/ +.. _OpenCensus: https://github.com/census-instrumentation/opencensus-python/ diff --git a/contrib/opencensus-ext-datadog/examples/datadog.py b/contrib/opencensus-ext-datadog/examples/datadog.py index 58ea73484..d415323e6 100644 --- a/contrib/opencensus-ext-datadog/examples/datadog.py +++ b/contrib/opencensus-ext-datadog/examples/datadog.py @@ -1,8 +1,8 @@ from flask import Flask +from opencensus.ext.datadog.traces import DatadogTraceExporter, Options from opencensus.ext.flask.flask_middleware import FlaskMiddleware from opencensus.trace.samplers import AlwaysOnSampler -from traces import DatadogTraceExporter, Options app = Flask(__name__) middleware = FlaskMiddleware(app, From b5d3a6685cd62c41e4e7ab67010a0b690ec3ad12 Mon Sep 17 00:00:00 2001 From: John Vandenberg Date: Tue, 15 Oct 2019 23:12:51 +0700 Subject: [PATCH 25/79] usage.rst: Replace nox-automation with nox (#803) --- docs/trace/usage.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/trace/usage.rst b/docs/trace/usage.rst index 8c9bb85c8..f0ed9a452 100644 --- a/docs/trace/usage.rst +++ b/docs/trace/usage.rst @@ -376,7 +376,7 @@ Tests source .tox/py34/bin/activate # Run the unit test - pip install nox-automation + pip install nox # See what's available in the nox suite nox -l From e9129b74681df204c027702360be20fc0eea7f76 Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Wed, 23 Oct 2019 14:39:50 -0700 Subject: [PATCH 26/79] Validate instrumentation key in Azure Exporters (#789) --- contrib/opencensus-ext-azure/README.rst | 2 +- .../opencensus/ext/azure/common/utils.py | 25 +++++ .../ext/azure/log_exporter/__init__.py | 3 +- .../ext/azure/metrics_exporter/__init__.py | 3 +- .../ext/azure/trace_exporter/__init__.py | 3 +- .../tests/test_azure_utils.py | 94 +++++++++++++++++++ 6 files changed, 123 insertions(+), 7 deletions(-) diff --git a/contrib/opencensus-ext-azure/README.rst b/contrib/opencensus-ext-azure/README.rst index dcf205bb6..2149d591b 100644 --- a/contrib/opencensus-ext-azure/README.rst +++ b/contrib/opencensus-ext-azure/README.rst @@ -37,11 +37,11 @@ This example shows how to send a warning level log to Azure Monitor. logger.addHandler(AzureLogHandler(connection_string='InstrumentationKey=')) logger.warning('Hello, World!') -* Alternatively, you can specify your `connection string` in an environment variable ``APPLICATIONINSIGHTS_CONNECTION_STRING``. You can enrich the logs with trace IDs and span IDs by using the `logging integration <../opencensus-ext-logging>`_. * Create an Azure Monitor resource and get the instrumentation key, more information can be found `here `_. +* Install the `logging integration package <../opencensus-ext-logging>`_ using ``pip install opencensus-ext-logging``. * Place your instrumentation key in a `connection string` and directly into your code. * Alternatively, you can specify your `connection string` in an environment variable ``APPLICATIONINSIGHTS_CONNECTION_STRING``. diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/utils.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/utils.py index bb3550e89..6224b9561 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/utils.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/utils.py @@ -16,6 +16,7 @@ import locale import os import platform +import re import sys from opencensus.common.utils import timestamp_to_microseconds, to_iso_str @@ -65,3 +66,27 @@ def timestamp_to_iso_str(timestamp): def url_to_dependency_name(url): return urlparse(url).netloc + + +# Validate UUID format +# Specs taken from https://tools.ietf.org/html/rfc4122 +uuid_regex_pattern = re.compile('^[0-9a-f]{8}-' + '[0-9a-f]{4}-' + '[1-5][0-9a-f]{3}-' + '[89ab][0-9a-f]{3}-' + '[0-9a-f]{12}$') + + +def validate_instrumentation_key(instrumentation_key): + """Validates the instrumentation key used for Azure Monitor. + + An instrumentation key cannot be null or empty. An instrumentation key + is valid for Azure Monitor only if it is a valid UUID. + + :param instrumentation_key: The instrumentation key to validate + """ + if not instrumentation_key: + raise ValueError("Instrumentation key cannot be none or empty.") + match = uuid_regex_pattern.match(instrumentation_key) + if not match: + raise ValueError("Invalid instrumentation key.") diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py index eccaa9372..b9b1be168 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py @@ -115,8 +115,7 @@ class AzureLogHandler(TransportMixin, BaseLogHandler): def __init__(self, **options): self.options = Options(**options) - if not self.options.instrumentation_key: - raise ValueError('The instrumentation_key is not provided.') + utils.validate_instrumentation_key(self.options.instrumentation_key) self.export_interval = self.options.export_interval self.max_batch_size = self.options.max_batch_size self.storage = LocalFileStorage( diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py index c37468acc..9b531b808 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py @@ -42,8 +42,7 @@ def __init__(self, options=None): if options is None: options = Options() self.options = options - if not self.options.instrumentation_key: - raise ValueError('The instrumentation_key is not provided.') + utils.validate_instrumentation_key(self.options.instrumentation_key) if self.options.max_batch_size <= 0: raise ValueError('Max batch size must be at least 1.') self.max_batch_size = self.options.max_batch_size diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py index 41f2a06aa..9daf58706 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py @@ -40,8 +40,7 @@ class AzureExporter(TransportMixin, BaseExporter): def __init__(self, **options): self.options = Options(**options) - if not self.options.instrumentation_key: - raise ValueError('The instrumentation_key is not provided.') + utils.validate_instrumentation_key(self.options.instrumentation_key) self.storage = LocalFileStorage( path=self.options.storage_path, max_size=self.options.storage_max_size, diff --git a/contrib/opencensus-ext-azure/tests/test_azure_utils.py b/contrib/opencensus-ext-azure/tests/test_azure_utils.py index 213a7fe0a..946f71b8a 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_utils.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_utils.py @@ -43,3 +43,97 @@ def test_url_to_dependency_name(self): 'https://www.wikipedia.org/wiki/Rabbit' ), 'www.wikipedia.org') + + def test_validate_instrumentation_key(self): + key = '1234abcd-5678-4efa-8abc-1234567890ab' + self.assertIsNone(utils.validate_instrumentation_key(key)) + + def test_invalid_key_none(self): + key = None + self.assertRaises(ValueError, + lambda: utils.validate_instrumentation_key(key)) + + def test_invalid_key_empty(self): + key = '' + self.assertRaises(ValueError, + lambda: utils.validate_instrumentation_key(key)) + + def test_invalid_key_prefix(self): + key = 'test1234abcd-5678-4efa-8abc-1234567890ab' + self.assertRaises(ValueError, + lambda: utils.validate_instrumentation_key(key)) + + def test_invalid_key_suffix(self): + key = '1234abcd-5678-4efa-8abc-1234567890abtest' + self.assertRaises(ValueError, + lambda: utils.validate_instrumentation_key(key)) + + def test_invalid_key_length(self): + key = '1234abcd-5678-4efa-8abc-12234567890ab' + self.assertRaises(ValueError, + lambda: utils.validate_instrumentation_key(key)) + + def test_invalid_key_dashes(self): + key = '1234abcda5678-4efa-8abc-1234567890ab' + self.assertRaises(ValueError, + lambda: utils.validate_instrumentation_key(key)) + + def test_invalid_key_section1_length(self): + key = '1234abcda-678-4efa-8abc-1234567890ab' + self.assertRaises(ValueError, + lambda: utils.validate_instrumentation_key(key)) + + def test_invalid_key_section2_length(self): + key = '1234abcd-678-a4efa-8abc-1234567890ab' + self.assertRaises(ValueError, + lambda: utils.validate_instrumentation_key(key)) + + def test_invalid_key_section3_length(self): + key = '1234abcd-6789-4ef-8cabc-1234567890ab' + self.assertRaises(ValueError, + lambda: utils.validate_instrumentation_key(key)) + + def test_invalid_key_section4_length(self): + key = '1234abcd-678-4efa-8bc-11234567890ab' + self.assertRaises(ValueError, + lambda: utils.validate_instrumentation_key(key)) + + def test_invalid_key_section5_length(self): + key = '234abcd-678-4efa-8abc-11234567890ab' + self.assertRaises(ValueError, + lambda: utils.validate_instrumentation_key(key)) + + def test_invalid_key_section1_hex(self): + key = 'x234abcd-5678-4efa-8abc-1234567890ab' + self.assertRaises(ValueError, + lambda: utils.validate_instrumentation_key(key)) + + def test_invalid_key_section2_hex(self): + key = '1234abcd-x678-4efa-8abc-1234567890ab' + self.assertRaises(ValueError, + lambda: utils.validate_instrumentation_key(key)) + + def test_invalid_key_section3_hex(self): + key = '1234abcd-5678-4xfa-8abc-1234567890ab' + self.assertRaises(ValueError, + lambda: utils.validate_instrumentation_key(key)) + + def test_invalid_key_section4_hex(self): + key = '1234abcd-5678-4xfa-8abc-1234567890ab' + self.assertRaises(ValueError, + lambda: utils.validate_instrumentation_key(key)) + + def test_invalid_key_section5_hex(self): + key = '1234abcd-5678-4xfa-8abc-1234567890ab' + self.assertRaises(ValueError, + lambda: utils.validate_instrumentation_key(key)) + + def test_invalid_key_version(self): + key = '1234abcd-5678-6efa-8abc-1234567890ab' + self.assertRaises(ValueError, + lambda: utils.validate_instrumentation_key(key)) + + def test_invalid_key_variant(self): + key = '1234abcd-5678-4efa-2abc-1234567890ab' + self.assertRaises(ValueError, + lambda: utils.validate_instrumentation_key(key)) From 75f853dc03afe97ab93bdc0a7c3b2c188b476607 Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Mon, 25 Nov 2019 14:49:04 -0800 Subject: [PATCH 27/79] remove test (#824) --- .../tests/test_patching.py | 164 +++++++++--------- 1 file changed, 82 insertions(+), 82 deletions(-) diff --git a/contrib/opencensus-ext-gevent/tests/test_patching.py b/contrib/opencensus-ext-gevent/tests/test_patching.py index c5f2063b5..5172d54f7 100644 --- a/contrib/opencensus-ext-gevent/tests/test_patching.py +++ b/contrib/opencensus-ext-gevent/tests/test_patching.py @@ -1,96 +1,96 @@ -# Copyright 2019, OpenCensus Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# # Copyright 2019, OpenCensus Authors +# # +# # Licensed under the Apache License, Version 2.0 (the "License"); +# # you may not use this file except in compliance with the License. +# # You may obtain a copy of the License at +# # +# # http://www.apache.org/licenses/LICENSE-2.0 +# # +# # Unless required by applicable law or agreed to in writing, software +# # distributed under the License is distributed on an "AS IS" BASIS, +# # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# # See the License for the specific language governing permissions and +# # limitations under the License. -import unittest +# import unittest -import gevent.monkey -import mock +# import gevent.monkey +# import mock -import opencensus.common.runtime_context as runtime_context +# import opencensus.common.runtime_context as runtime_context -class TestPatching(unittest.TestCase): - def setUp(self): - self.original_context = runtime_context.RuntimeContext +# class TestPatching(unittest.TestCase): +# def setUp(self): +# self.original_context = runtime_context.RuntimeContext - def tearDown(self): - runtime_context.RuntimeContext = self.original_context +# def tearDown(self): +# runtime_context.RuntimeContext = self.original_context - @mock.patch("gevent.monkey.is_module_patched", return_value=False) - def test_context_is_switched_without_contextvar_support( - self, patched_is_module_patched - ): - # patched_is_module_patched.return_value = False +# @mock.patch("gevent.monkey.is_module_patched", return_value=False) +# def test_context_is_switched_without_contextvar_support( +# self, patched_is_module_patched +# ): +# # patched_is_module_patched.return_value = False - # Trick gevent into thinking it is run for the first time. - # Allows to run multiple tests. - gevent.monkey.saved = {} +# # Trick gevent into thinking it is run for the first time. +# # Allows to run multiple tests. +# gevent.monkey.saved = {} - # All module patching is disabled to avoid the need of "unpatching". - # The needed events are emitted nevertheless. - gevent.monkey.patch_all( - contextvar=False, - socket=False, - dns=False, - time=False, - select=False, - thread=False, - os=False, - ssl=False, - httplib=False, - subprocess=False, - sys=False, - aggressive=False, - Event=False, - builtins=False, - signal=False, - queue=False - ) +# # All module patching is disabled to avoid the need of "unpatching". +# # The needed events are emitted nevertheless. +# gevent.monkey.patch_all( +# contextvar=False, +# socket=False, +# dns=False, +# time=False, +# select=False, +# thread=False, +# os=False, +# ssl=False, +# httplib=False, +# subprocess=False, +# sys=False, +# aggressive=False, +# Event=False, +# builtins=False, +# signal=False, +# queue=False +# ) - assert isinstance( - runtime_context.RuntimeContext, - runtime_context._ThreadLocalRuntimeContext, - ) +# assert isinstance( +# runtime_context.RuntimeContext, +# runtime_context._ThreadLocalRuntimeContext, +# ) - @mock.patch("gevent.monkey.is_module_patched", return_value=True) - def test_context_is_switched_with_contextvar_support( - self, patched_is_module_patched - ): +# @mock.patch("gevent.monkey.is_module_patched", return_value=True) +# def test_context_is_switched_with_contextvar_support( +# self, patched_is_module_patched +# ): - # Trick gevent into thinking it is run for the first time. - # Allows to run multiple tests. - gevent.monkey.saved = {} +# # Trick gevent into thinking it is run for the first time. +# # Allows to run multiple tests. +# gevent.monkey.saved = {} - # All module patching is disabled to avoid the need of "unpatching". - # The needed events are emitted nevertheless. - gevent.monkey.patch_all( - contextvar=False, - socket=False, - dns=False, - time=False, - select=False, - thread=False, - os=False, - ssl=False, - httplib=False, - subprocess=False, - sys=False, - aggressive=False, - Event=False, - builtins=False, - signal=False, - queue=False - ) +# # All module patching is disabled to avoid the need of "unpatching". +# # The needed events are emitted nevertheless. +# gevent.monkey.patch_all( +# contextvar=False, +# socket=False, +# dns=False, +# time=False, +# select=False, +# thread=False, +# os=False, +# ssl=False, +# httplib=False, +# subprocess=False, +# sys=False, +# aggressive=False, +# Event=False, +# builtins=False, +# signal=False, +# queue=False +# ) - assert runtime_context.RuntimeContext is self.original_context +# assert runtime_context.RuntimeContext is self.original_context From 49495ae823295f68644aea41f438e0a8da12188c Mon Sep 17 00:00:00 2001 From: Ivan Ramirez Date: Wed, 27 Nov 2019 02:22:34 +0800 Subject: [PATCH 28/79] Add optional custom properties to logging messages. (#822) --- contrib/opencensus-ext-azure/README.rst | 14 +++++++++ .../examples/logs/properties.py | 24 ++++++++++++++ .../ext/azure/log_exporter/__init__.py | 2 ++ .../tests/test_azure_log_exporter.py | 31 +++++++++++++++++++ 4 files changed, 71 insertions(+) create mode 100644 contrib/opencensus-ext-azure/examples/logs/properties.py diff --git a/contrib/opencensus-ext-azure/README.rst b/contrib/opencensus-ext-azure/README.rst index 2149d591b..8de7a8049 100644 --- a/contrib/opencensus-ext-azure/README.rst +++ b/contrib/opencensus-ext-azure/README.rst @@ -73,6 +73,20 @@ You can enrich the logs with trace IDs and span IDs by using the `logging integr logger.warning('In the span') logger.warning('After the span') +You can also add custom properties to your log messages in the form of key-values. + +WARNING: For this feature to work, you need to pass a dictionary as the argument. If you pass arguments of any other type, the logger will ignore them. The solution is to convert these arguments into a dictionary. + +.. code:: python + + import logging + + from opencensus.ext.azure.log_exporter import AzureLogHandler + + logger = logging.getLogger(__name__) + logger.addHandler(AzureLogHandler(connection_string='InstrumentationKey=')) + logger.warning('action', {'key-1': 'value-1', 'key-2': 'value2'}) + Metrics ~~~~~~~ diff --git a/contrib/opencensus-ext-azure/examples/logs/properties.py b/contrib/opencensus-ext-azure/examples/logs/properties.py new file mode 100644 index 000000000..e6f8c5b2a --- /dev/null +++ b/contrib/opencensus-ext-azure/examples/logs/properties.py @@ -0,0 +1,24 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from opencensus.ext.azure.log_exporter import AzureLogHandler + +logger = logging.getLogger(__name__) +# TODO: you need to specify the instrumentation key in a connection string +# and place it in the APPLICATIONINSIGHTS_CONNECTION_STRING +# environment variable. +logger.addHandler(AzureLogHandler()) +logger.warning('action', {'key-1': 'value-1', 'key-2': 'value2'}) diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py index b9b1be168..dc9a50db8 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py @@ -198,6 +198,8 @@ def log_record_to_envelope(self, record): ) envelope.data = Data(baseData=data, baseType='ExceptionData') else: + if isinstance(record.args, dict): + properties.update(record.args) envelope.name = 'Microsoft.ApplicationInsights.Message' data = Message( message=self.format(record), diff --git a/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py b/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py index 15c361cbd..7ff06022a 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py @@ -134,3 +134,34 @@ def test_log_record_to_envelope(self): envelope.iKey, '12345678-1234-5678-abcd-12345678abcd') handler.close() + + @mock.patch('requests.post', return_value=mock.Mock()) + def test_log_record_with_custom_properties(self, requests_mock): + logger = logging.getLogger(self.id()) + handler = log_exporter.AzureLogHandler( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + storage_path=os.path.join(TEST_FOLDER, self.id()), + ) + logger.addHandler(handler) + logger.warning('action', {'key-1': 'value-1', 'key-2': 'value-2'}) + handler.close() + post_body = requests_mock.call_args_list[0][1]['data'] + self.assertTrue('action' in post_body) + self.assertTrue('key-1' in post_body) + self.assertTrue('key-2' in post_body) + + @mock.patch('requests.post', return_value=mock.Mock()) + def test_log_with_invalid_custom_properties(self, requests_mock): + logger = logging.getLogger(self.id()) + handler = log_exporter.AzureLogHandler( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + storage_path=os.path.join(TEST_FOLDER, self.id()), + ) + logger.addHandler(handler) + logger.warning('action_1_%s', None) + logger.warning('action_2_%s', 'not_a_dict') + handler.close() + self.assertEqual(len(os.listdir(handler.storage.path)), 0) + post_body = requests_mock.call_args_list[0][1]['data'] + self.assertTrue('action_1' in post_body) + self.assertTrue('action_2' in post_body) From 228b5cf4fcd5f88192908dae2ac0e940790e9c4d Mon Sep 17 00:00:00 2001 From: Chris Johnston Date: Mon, 2 Dec 2019 13:17:17 -0800 Subject: [PATCH 29/79] docs: fix markdown-style links in rst readme (#828) --- contrib/opencensus-ext-azure/README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/contrib/opencensus-ext-azure/README.rst b/contrib/opencensus-ext-azure/README.rst index 8de7a8049..6b1473658 100644 --- a/contrib/opencensus-ext-azure/README.rst +++ b/contrib/opencensus-ext-azure/README.rst @@ -202,9 +202,9 @@ This example shows how to send a span "hello" to Azure Monitor. with tracer.span(name='hello'): print('Hello, World!') -OpenCensus also supports several [integrations](https://github.com/census-instrumentation/opencensus-python#integration) which allows OpenCensus to integrate with third party libraries. +OpenCensus also supports several `integrations `_ which allows OpenCensus to integrate with third party libraries. -This example shows how to integrate with the [requests](https://2.python-requests.org/en/master/) library. +This example shows how to integrate with the `requests `_ library. * Create an Azure Monitor resource and get the instrumentation key, more information can be found `here `_. * Install the `requests integration package <../opencensus-ext-requests>`_ using ``pip install opencensus-ext-requests``. From 8fc960ae6de18665dff11ab85528020ca278cd59 Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Wed, 4 Dec 2019 10:58:04 -0800 Subject: [PATCH 30/79] update changelog after 0.7.6 release (#829) --- CHANGELOG.md | 9 +++++++++ contrib/opencensus-ext-azure/CHANGELOG.md | 8 ++++++++ contrib/opencensus-ext-datadog/CHANGELOG.md | 7 +++++-- 3 files changed, 22 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index bb78b55d3..3218a8ebb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,15 @@ ## Unreleased +## 0.7.6 +Released 2019-11-26 + +- Initial release for `datadog` module + ([#793](https://github.com/census-instrumentation/opencensus-python/pull/793)) +- Updated `azure` module + ([#789](https://github.com/census-instrumentation/opencensus-python/pull/789), + [#822](https://github.com/census-instrumentation/opencensus-python/pull/822)) + ## 0.7.5 Released 2019-10-01 diff --git a/contrib/opencensus-ext-azure/CHANGELOG.md b/contrib/opencensus-ext-azure/CHANGELOG.md index 7731e418b..148905dce 100644 --- a/contrib/opencensus-ext-azure/CHANGELOG.md +++ b/contrib/opencensus-ext-azure/CHANGELOG.md @@ -2,6 +2,14 @@ ## Unreleased +## 1.0.1 +Released 2019-11-26 + +- Validate instrumentation key in Azure Exporters + ([#789](https://github.com/census-instrumentation/opencensus-python/pull/789)) +- Add optional custom properties to logging messages + ([#822](https://github.com/census-instrumentation/opencensus-python/pull/822)) + ## 1.0.0 Released 2019-09-30 diff --git a/contrib/opencensus-ext-datadog/CHANGELOG.md b/contrib/opencensus-ext-datadog/CHANGELOG.md index b2705a57e..b93e3f3d1 100644 --- a/contrib/opencensus-ext-datadog/CHANGELOG.md +++ b/contrib/opencensus-ext-datadog/CHANGELOG.md @@ -1,4 +1,7 @@ # Changelog -## Unreleased -- Initial version. +## 0.1.0 +Released 2019-11-26 + +- Initial version + ([#793](https://github.com/census-instrumentation/opencensus-python/pull/793)) From 157af589d4ab3ca4a5dc9e7e4ad880581126d8c6 Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Wed, 15 Jan 2020 11:53:28 -0800 Subject: [PATCH 31/79] Remove coverage temporarily to unblock CI (#842) --- noxfile.py | 36 +++++++++++++++++++----------------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/noxfile.py b/noxfile.py index 48e63b47b..571ac6d64 100644 --- a/noxfile.py +++ b/noxfile.py @@ -52,7 +52,9 @@ def _install_dev_packages(session): def _install_test_dependencies(session): session.install('mock') session.install('pytest==4.6.4') - session.install('pytest-cov') + # 842 - Unit tests failing on CI due to failed import for coverage + # Might have something to do with the CircleCI image + # session.install('pytest-cov') session.install('retrying') session.install('unittest2') @@ -71,13 +73,13 @@ def unit(session): session.run( 'py.test', '--quiet', - '--cov=opencensus', - '--cov=context', - '--cov=contrib', - '--cov-append', - '--cov-config=.coveragerc', - '--cov-report=', - '--cov-fail-under=97', + # '--cov=opencensus', + # '--cov=context', + # '--cov=contrib', + # '--cov-append', + # '--cov-config=.coveragerc', + # '--cov-report=', + # '--cov-fail-under=97', 'tests/unit/', 'context/', 'contrib/', @@ -137,15 +139,15 @@ def lint_setup_py(session): 'python', 'setup.py', 'check', '--restructuredtext', '--strict') -@nox.session(python='3.6') -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install('coverage', 'pytest-cov') - session.run('coverage', 'report', '--show-missing', '--fail-under=100') - session.run('coverage', 'erase') +# @nox.session(python='3.6') +# def cover(session): +# """Run the final coverage report. +# This outputs the coverage report aggregating coverage from the unit +# test runs (not system test runs), and then erases coverage data. +# """ +# session.install('coverage', 'pytest-cov') +# session.run('coverage', 'report', '--show-missing', '--fail-under=100') +# session.run('coverage', 'erase') @nox.session(python='3.6') From 22c1a285d8234d3ac5414c1e7be6901b772681aa Mon Sep 17 00:00:00 2001 From: Xinyi Joffre Date: Wed, 15 Jan 2020 15:16:17 -0800 Subject: [PATCH 32/79] Add custom_dimensions using kwargs instead of args (#837) --- contrib/opencensus-ext-azure/README.rst | 8 +-- .../examples/logs/properties.py | 12 ++++- .../ext/azure/log_exporter/__init__.py | 8 ++- .../tests/test_azure_log_exporter.py | 54 ++++++++++++++++--- 4 files changed, 70 insertions(+), 12 deletions(-) diff --git a/contrib/opencensus-ext-azure/README.rst b/contrib/opencensus-ext-azure/README.rst index 6b1473658..6b7bf250b 100644 --- a/contrib/opencensus-ext-azure/README.rst +++ b/contrib/opencensus-ext-azure/README.rst @@ -73,9 +73,9 @@ You can enrich the logs with trace IDs and span IDs by using the `logging integr logger.warning('In the span') logger.warning('After the span') -You can also add custom properties to your log messages in the form of key-values. +You can also add custom properties to your log messages in the *extra* keyword argument using the custom_dimensions field. -WARNING: For this feature to work, you need to pass a dictionary as the argument. If you pass arguments of any other type, the logger will ignore them. The solution is to convert these arguments into a dictionary. +WARNING: For this feature to work, you need to pass a dictionary to the custom_dimensions field. If you pass arguments of any other type, the logger will ignore them. .. code:: python @@ -85,7 +85,9 @@ WARNING: For this feature to work, you need to pass a dictionary as the argument logger = logging.getLogger(__name__) logger.addHandler(AzureLogHandler(connection_string='InstrumentationKey=')) - logger.warning('action', {'key-1': 'value-1', 'key-2': 'value2'}) + + properties = {'custom_dimensions': {'key_1': 'value_1', 'key_2': 'value_2'}} + logger.warning('action', extra=properties) Metrics ~~~~~~~ diff --git a/contrib/opencensus-ext-azure/examples/logs/properties.py b/contrib/opencensus-ext-azure/examples/logs/properties.py index e6f8c5b2a..5cfdd3568 100644 --- a/contrib/opencensus-ext-azure/examples/logs/properties.py +++ b/contrib/opencensus-ext-azure/examples/logs/properties.py @@ -21,4 +21,14 @@ # and place it in the APPLICATIONINSIGHTS_CONNECTION_STRING # environment variable. logger.addHandler(AzureLogHandler()) -logger.warning('action', {'key-1': 'value-1', 'key-2': 'value2'}) + +properties = {'custom_dimensions': {'key_1': 'value_1', 'key_2': 'value_2'}} + +# Use properties in logging statements +logger.warning('action', extra=properties) + +# Use properties in exception logs +try: + result = 1 / 0 # generate a ZeroDivisionError +except Exception: + logger.exception('Captured an exception.', extra=properties) diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py index dc9a50db8..7ce44afd8 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py @@ -153,6 +153,7 @@ def log_record_to_envelope(self, record): tags=dict(utils.azure_monitor_context), time=utils.timestamp_to_iso_str(record.created), ) + envelope.tags['ai.operation.id'] = getattr( record, 'traceId', @@ -169,6 +170,11 @@ def log_record_to_envelope(self, record): 'lineNumber': record.lineno, 'level': record.levelname, } + + if (hasattr(record, 'custom_dimensions') and + isinstance(record.custom_dimensions, dict)): + properties.update(record.custom_dimensions) + if record.exc_info: exctype, _value, tb = record.exc_info callstack = [] @@ -198,8 +204,6 @@ def log_record_to_envelope(self, record): ) envelope.data = Data(baseData=data, baseType='ExceptionData') else: - if isinstance(record.args, dict): - properties.update(record.args) envelope.name = 'Microsoft.ApplicationInsights.Message' data = Message( message=self.format(record), diff --git a/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py b/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py index 7ff06022a..f835f185c 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py @@ -95,6 +95,32 @@ def test_exception(self, requests_mock): post_body = requests_mock.call_args_list[0][1]['data'] self.assertTrue('ZeroDivisionError' in post_body) + @mock.patch('requests.post', return_value=mock.Mock()) + def test_exception_with_custom_properties(self, requests_mock): + logger = logging.getLogger(self.id()) + handler = log_exporter.AzureLogHandler( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + storage_path=os.path.join(TEST_FOLDER, self.id()), + ) + logger.addHandler(handler) + try: + return 1 / 0 # generate a ZeroDivisionError + except Exception: + properties = { + 'custom_dimensions': + { + 'key_1': 'value_1', + 'key_2': 'value_2' + } + } + logger.exception('Captured an exception.', extra=properties) + handler.close() + self.assertEqual(len(requests_mock.call_args_list), 1) + post_body = requests_mock.call_args_list[0][1]['data'] + self.assertTrue('ZeroDivisionError' in post_body) + self.assertTrue('key_1' in post_body) + self.assertTrue('key_2' in post_body) + @mock.patch('requests.post', return_value=mock.Mock()) def test_export_empty(self, request_mock): handler = log_exporter.AzureLogHandler( @@ -143,12 +169,18 @@ def test_log_record_with_custom_properties(self, requests_mock): storage_path=os.path.join(TEST_FOLDER, self.id()), ) logger.addHandler(handler) - logger.warning('action', {'key-1': 'value-1', 'key-2': 'value-2'}) + logger.warning('action', extra={ + 'custom_dimensions': + { + 'key_1': 'value_1', + 'key_2': 'value_2' + } + }) handler.close() post_body = requests_mock.call_args_list[0][1]['data'] self.assertTrue('action' in post_body) - self.assertTrue('key-1' in post_body) - self.assertTrue('key-2' in post_body) + self.assertTrue('key_1' in post_body) + self.assertTrue('key_2' in post_body) @mock.patch('requests.post', return_value=mock.Mock()) def test_log_with_invalid_custom_properties(self, requests_mock): @@ -159,9 +191,19 @@ def test_log_with_invalid_custom_properties(self, requests_mock): ) logger.addHandler(handler) logger.warning('action_1_%s', None) - logger.warning('action_2_%s', 'not_a_dict') + logger.warning('action_2_%s', 'arg', extra={ + 'custom_dimensions': 'not_a_dict' + }) + logger.warning('action_3_%s', 'arg', extra={ + 'notcustom_dimensions': {'key_1': 'value_1'} + }) + handler.close() self.assertEqual(len(os.listdir(handler.storage.path)), 0) post_body = requests_mock.call_args_list[0][1]['data'] - self.assertTrue('action_1' in post_body) - self.assertTrue('action_2' in post_body) + self.assertTrue('action_1_' in post_body) + self.assertTrue('action_2_arg' in post_body) + self.assertTrue('action_3_arg' in post_body) + + self.assertFalse('not_a_dict' in post_body) + self.assertFalse('key_1' in post_body) From 3b48294c1e5860e5c5beadc6afc1c6d1acf9e1a1 Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Tue, 28 Jan 2020 13:29:16 -0800 Subject: [PATCH 33/79] Fix HTTP attributes for deps, fix success and response/resultCode for Azure Exporter (#827) --- .../opencensus/ext/azure/common/utils.py | 10 - .../ext/azure/trace_exporter/__init__.py | 47 ++- .../tests/test_azure_trace_exporter.py | 297 +++++++++++++++++- .../tests/test_azure_utils.py | 7 - .../opencensus/ext/httplib/trace.py | 4 + .../tests/test_httplib_trace.py | 3 +- .../opencensus/ext/requests/trace.py | 8 + .../tests/test_requests_trace.py | 8 + 8 files changed, 338 insertions(+), 46 deletions(-) diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/utils.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/utils.py index 6224b9561..4907f74af 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/utils.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/utils.py @@ -23,12 +23,6 @@ from opencensus.common.version import __version__ as opencensus_version from opencensus.ext.azure.common.version import __version__ as ext_version -try: - from urllib.parse import urlparse -except ImportError: - from urlparse import urlparse - - azure_monitor_context = { 'ai.cloud.role': os.path.basename(sys.argv[0]) or 'Python Application', 'ai.cloud.roleInstance': platform.node(), @@ -64,10 +58,6 @@ def timestamp_to_iso_str(timestamp): return to_iso_str(datetime.datetime.utcfromtimestamp(timestamp)) -def url_to_dependency_name(url): - return urlparse(url).netloc - - # Validate UUID format # Specs taken from https://tools.ietf.org/html/rfc4122 uuid_regex_pattern = re.compile('^[0-9a-f]{8}-' diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py index 9daf58706..52db582ab 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py @@ -27,6 +27,11 @@ from opencensus.ext.azure.common.transport import TransportMixin from opencensus.trace.span import SpanKind +try: + from urllib.parse import urlparse +except ImportError: + from urlparse import urlparse + logger = logging.getLogger(__name__) __all__ = ['AzureExporter'] @@ -55,50 +60,58 @@ def span_data_to_envelope(self, sd): tags=dict(utils.azure_monitor_context), time=sd.start_time, ) + envelope.tags['ai.operation.id'] = sd.context.trace_id if sd.parent_span_id: - envelope.tags['ai.operation.parentId'] = '|{}.{}.'.format( - sd.context.trace_id, + envelope.tags['ai.operation.parentId'] = '{}'.format( sd.parent_span_id, ) if sd.span_kind == SpanKind.SERVER: envelope.name = 'Microsoft.ApplicationInsights.Request' data = Request( - id='|{}.{}.'.format(sd.context.trace_id, sd.span_id), + id='{}'.format(sd.span_id), duration=utils.timestamp_to_duration( sd.start_time, sd.end_time, ), - responseCode='0', - success=False, + responseCode=str(sd.status.code), + success=False, # Modify based off attributes or status properties={}, ) envelope.data = Data(baseData=data, baseType='RequestData') + data.name = '' if 'http.method' in sd.attributes: data.name = sd.attributes['http.method'] if 'http.route' in sd.attributes: data.name = data.name + ' ' + sd.attributes['http.route'] envelope.tags['ai.operation.name'] = data.name + data.properties['request.name'] = data.name + elif 'http.path' in sd.attributes: + data.properties['request.name'] = data.name + \ + ' ' + sd.attributes['http.path'] if 'http.url' in sd.attributes: data.url = sd.attributes['http.url'] + data.properties['request.url'] = sd.attributes['http.url'] if 'http.status_code' in sd.attributes: status_code = sd.attributes['http.status_code'] data.responseCode = str(status_code) data.success = ( status_code >= 200 and status_code <= 399 ) + elif sd.status.code == 0: + data.success = True else: envelope.name = \ 'Microsoft.ApplicationInsights.RemoteDependency' data = RemoteDependency( name=sd.name, # TODO - id='|{}.{}.'.format(sd.context.trace_id, sd.span_id), - resultCode='0', # TODO + id='{}'.format(sd.span_id), + resultCode=str(sd.status.code), duration=utils.timestamp_to_duration( sd.start_time, sd.end_time, ), - success=True, # TODO + success=False, # Modify based off attributes or status properties={}, ) envelope.data = Data( @@ -106,15 +119,27 @@ def span_data_to_envelope(self, sd): baseType='RemoteDependencyData', ) if sd.span_kind == SpanKind.CLIENT: - data.type = 'HTTP' # TODO + data.type = sd.attributes.get('component') if 'http.url' in sd.attributes: url = sd.attributes['http.url'] # TODO: error handling, probably put scheme as well - data.name = utils.url_to_dependency_name(url) + data.data = url + parse_url = urlparse(url) + # target matches authority (host:port) + data.target = parse_url.netloc + if 'http.method' in sd.attributes: + # name is METHOD/path + data.name = sd.attributes['http.method'] \ + + ' ' + parse_url.path if 'http.status_code' in sd.attributes: - data.resultCode = str(sd.attributes['http.status_code']) + status_code = sd.attributes["http.status_code"] + data.resultCode = str(status_code) + data.success = 200 <= status_code < 400 + elif sd.status.code == 0: + data.success = True else: data.type = 'INPROC' + data.success = True # TODO: links, tracestate, tags for key in sd.attributes: # This removes redundant data from ApplicationInsights diff --git a/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py b/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py index 935a52a7e..3543157b5 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py @@ -101,6 +101,7 @@ def test_span_data_to_envelope(self): from opencensus.trace.span import SpanKind from opencensus.trace.span_context import SpanContext from opencensus.trace.span_data import SpanData + from opencensus.trace.status import Status from opencensus.trace.trace_options import TraceOptions from opencensus.trace.tracestate import Tracestate @@ -122,6 +123,7 @@ def test_span_data_to_envelope(self): span_id='6e0c63257de34c92', parent_span_id='6e0c63257de34c93', attributes={ + 'component': 'HTTP', 'http.method': 'GET', 'http.url': 'https://www.wikipedia.org/wiki/Rabbit', 'http.status_code': 200, @@ -130,7 +132,7 @@ def test_span_data_to_envelope(self): end_time='2010-10-24T07:28:38.234567Z', stack_trace=None, links=None, - status=None, + status=Status(0), annotations=None, message_events=None, same_process_as_parent_span=None, @@ -145,7 +147,7 @@ def test_span_data_to_envelope(self): 'Microsoft.ApplicationInsights.RemoteDependency') self.assertEqual( envelope.tags['ai.operation.parentId'], - '|6e0c63257de34c90bf9efcd03927272e.6e0c63257de34c93.') + '6e0c63257de34c93') self.assertEqual( envelope.tags['ai.operation.id'], '6e0c63257de34c90bf9efcd03927272e') @@ -154,10 +156,16 @@ def test_span_data_to_envelope(self): '2010-10-24T07:28:38.123456Z') self.assertEqual( envelope.data.baseData.name, + 'GET /wiki/Rabbit') + self.assertEqual( + envelope.data.baseData.data, + 'https://www.wikipedia.org/wiki/Rabbit') + self.assertEqual( + envelope.data.baseData.target, 'www.wikipedia.org') self.assertEqual( envelope.data.baseData.id, - '|6e0c63257de34c90bf9efcd03927272e.6e0c63257de34c92.') + '6e0c63257de34c92') self.assertEqual( envelope.data.baseData.resultCode, '200') @@ -188,7 +196,66 @@ def test_span_data_to_envelope(self): end_time='2010-10-24T07:28:38.234567Z', stack_trace=None, links=None, - status=None, + status=Status(0), + annotations=None, + message_events=None, + same_process_as_parent_span=None, + child_span_count=None, + span_kind=SpanKind.CLIENT, + )) + self.assertEqual( + envelope.iKey, + '12345678-1234-5678-abcd-12345678abcd') + self.assertEqual( + envelope.name, + 'Microsoft.ApplicationInsights.RemoteDependency') + self.assertEqual( + envelope.tags['ai.operation.parentId'], + '6e0c63257de34c93') + self.assertEqual( + envelope.tags['ai.operation.id'], + '6e0c63257de34c90bf9efcd03927272e') + self.assertEqual( + envelope.time, + '2010-10-24T07:28:38.123456Z') + self.assertEqual( + envelope.data.baseData.name, + 'test') + self.assertEqual( + envelope.data.baseData.id, + '6e0c63257de34c92') + self.assertEqual( + envelope.data.baseData.duration, + '0.00:00:00.111') + self.assertEqual( + envelope.data.baseData.type, + None) + self.assertEqual( + envelope.data.baseType, + 'RemoteDependencyData') + + # SpanKind.CLIENT missing method + envelope = exporter.span_data_to_envelope(SpanData( + name='test', + context=SpanContext( + trace_id='6e0c63257de34c90bf9efcd03927272e', + span_id='6e0c63257de34c91', + trace_options=TraceOptions('1'), + tracestate=Tracestate(), + from_header=False, + ), + span_id='6e0c63257de34c92', + parent_span_id='6e0c63257de34c93', + attributes={ + 'component': 'HTTP', + 'http.url': 'https://www.wikipedia.org/wiki/Rabbit', + 'http.status_code': 200, + }, + start_time='2010-10-24T07:28:38.123456Z', + end_time='2010-10-24T07:28:38.234567Z', + stack_trace=None, + links=None, + status=Status(0), annotations=None, message_events=None, same_process_as_parent_span=None, @@ -203,7 +270,7 @@ def test_span_data_to_envelope(self): 'Microsoft.ApplicationInsights.RemoteDependency') self.assertEqual( envelope.tags['ai.operation.parentId'], - '|6e0c63257de34c90bf9efcd03927272e.6e0c63257de34c93.') + '6e0c63257de34c93') self.assertEqual( envelope.tags['ai.operation.id'], '6e0c63257de34c90bf9efcd03927272e') @@ -213,9 +280,18 @@ def test_span_data_to_envelope(self): self.assertEqual( envelope.data.baseData.name, 'test') + self.assertEqual( + envelope.data.baseData.data, + 'https://www.wikipedia.org/wiki/Rabbit') + self.assertEqual( + envelope.data.baseData.target, + 'www.wikipedia.org') self.assertEqual( envelope.data.baseData.id, - '|6e0c63257de34c90bf9efcd03927272e.6e0c63257de34c92.') + '6e0c63257de34c92') + self.assertEqual( + envelope.data.baseData.resultCode, + '200') self.assertEqual( envelope.data.baseData.duration, '0.00:00:00.111') @@ -239,6 +315,7 @@ def test_span_data_to_envelope(self): span_id='6e0c63257de34c92', parent_span_id='6e0c63257de34c93', attributes={ + 'component': 'HTTP', 'http.method': 'GET', 'http.path': '/wiki/Rabbit', 'http.route': '/wiki/Rabbit', @@ -249,7 +326,7 @@ def test_span_data_to_envelope(self): end_time='2010-10-24T07:28:38.234567Z', stack_trace=None, links=None, - status=None, + status=Status(0), annotations=None, message_events=None, same_process_as_parent_span=None, @@ -264,7 +341,7 @@ def test_span_data_to_envelope(self): 'Microsoft.ApplicationInsights.Request') self.assertEqual( envelope.tags['ai.operation.parentId'], - '|6e0c63257de34c90bf9efcd03927272e.6e0c63257de34c93.') + '6e0c63257de34c93') self.assertEqual( envelope.tags['ai.operation.id'], '6e0c63257de34c90bf9efcd03927272e') @@ -276,7 +353,7 @@ def test_span_data_to_envelope(self): '2010-10-24T07:28:38.123456Z') self.assertEqual( envelope.data.baseData.id, - '|6e0c63257de34c90bf9efcd03927272e.6e0c63257de34c92.') + '6e0c63257de34c92') self.assertEqual( envelope.data.baseData.duration, '0.00:00:00.111') @@ -286,12 +363,18 @@ def test_span_data_to_envelope(self): self.assertEqual( envelope.data.baseData.name, 'GET /wiki/Rabbit') + self.assertEqual( + envelope.data.baseData.properties['request.name'], + 'GET /wiki/Rabbit') self.assertEqual( envelope.data.baseData.success, True) self.assertEqual( envelope.data.baseData.url, 'https://www.wikipedia.org/wiki/Rabbit') + self.assertEqual( + envelope.data.baseData.properties['request.url'], + 'https://www.wikipedia.org/wiki/Rabbit') self.assertEqual( envelope.data.baseType, 'RequestData') @@ -309,6 +392,7 @@ def test_span_data_to_envelope(self): span_id='6e0c63257de34c92', parent_span_id='6e0c63257de34c93', attributes={ + 'component': 'HTTP', 'http.method': 'GET', 'http.path': '/wiki/Rabbit', 'http.route': '/wiki/Rabbit', @@ -319,7 +403,7 @@ def test_span_data_to_envelope(self): end_time='2010-10-24T07:28:38.234567Z', stack_trace=None, links=None, - status=None, + status=Status(0), annotations=None, message_events=None, same_process_as_parent_span=None, @@ -334,7 +418,7 @@ def test_span_data_to_envelope(self): 'Microsoft.ApplicationInsights.Request') self.assertEqual( envelope.tags['ai.operation.parentId'], - '|6e0c63257de34c90bf9efcd03927272e.6e0c63257de34c93.') + '6e0c63257de34c93') self.assertEqual( envelope.tags['ai.operation.id'], '6e0c63257de34c90bf9efcd03927272e') @@ -346,7 +430,7 @@ def test_span_data_to_envelope(self): '2010-10-24T07:28:38.123456Z') self.assertEqual( envelope.data.baseData.id, - '|6e0c63257de34c90bf9efcd03927272e.6e0c63257de34c92.') + '6e0c63257de34c92') self.assertEqual( envelope.data.baseData.duration, '0.00:00:00.111') @@ -383,7 +467,7 @@ def test_span_data_to_envelope(self): end_time='2010-10-24T07:28:38.234567Z', stack_trace=None, links=None, - status=None, + status=Status(0), annotations=None, message_events=None, same_process_as_parent_span=None, @@ -398,7 +482,7 @@ def test_span_data_to_envelope(self): 'Microsoft.ApplicationInsights.Request') self.assertEqual( envelope.tags['ai.operation.parentId'], - '|6e0c63257de34c90bf9efcd03927272e.6e0c63257de34c93.') + '6e0c63257de34c93') self.assertEqual( envelope.tags['ai.operation.id'], '6e0c63257de34c90bf9efcd03927272e') @@ -407,7 +491,7 @@ def test_span_data_to_envelope(self): '2010-10-24T07:28:38.123456Z') self.assertEqual( envelope.data.baseData.id, - '|6e0c63257de34c90bf9efcd03927272e.6e0c63257de34c92.') + '6e0c63257de34c92') self.assertEqual( envelope.data.baseData.duration, '0.00:00:00.111') @@ -432,7 +516,7 @@ def test_span_data_to_envelope(self): end_time='2010-10-24T07:28:38.234567Z', stack_trace=None, links=None, - status=None, + status=Status(0), annotations=None, message_events=None, same_process_as_parent_span=None, @@ -462,14 +546,193 @@ def test_span_data_to_envelope(self): '0.00:00:00.111') self.assertEqual( envelope.data.baseData.id, - '|6e0c63257de34c90bf9efcd03927272e.6e0c63257de34c92.') + '6e0c63257de34c92') self.assertEqual( envelope.data.baseData.type, 'INPROC') + self.assertEqual( + envelope.data.baseData.success, + True + ) self.assertEqual( envelope.data.baseType, 'RemoteDependencyData') + # Status server status code attribute + envelope = exporter.span_data_to_envelope(SpanData( + name='test', + context=SpanContext( + trace_id='6e0c63257de34c90bf9efcd03927272e', + span_id='6e0c63257de34c91', + trace_options=TraceOptions('1'), + tracestate=Tracestate(), + from_header=False, + ), + span_id='6e0c63257de34c92', + parent_span_id='6e0c63257de34c93', + attributes={ + 'http.status_code': 201 + }, + start_time='2010-10-24T07:28:38.123456Z', + end_time='2010-10-24T07:28:38.234567Z', + stack_trace=None, + links=None, + status=Status(0), + annotations=None, + message_events=None, + same_process_as_parent_span=None, + child_span_count=None, + span_kind=SpanKind.SERVER, + )) + self.assertEqual(envelope.data.baseData.responseCode, "201") + self.assertTrue(envelope.data.baseData.success) + + # Status server status code attribute missing + envelope = exporter.span_data_to_envelope(SpanData( + name='test', + context=SpanContext( + trace_id='6e0c63257de34c90bf9efcd03927272e', + span_id='6e0c63257de34c91', + trace_options=TraceOptions('1'), + tracestate=Tracestate(), + from_header=False, + ), + span_id='6e0c63257de34c92', + parent_span_id='6e0c63257de34c93', + attributes={}, + start_time='2010-10-24T07:28:38.123456Z', + end_time='2010-10-24T07:28:38.234567Z', + stack_trace=None, + links=None, + status=Status(1), + annotations=None, + message_events=None, + same_process_as_parent_span=None, + child_span_count=None, + span_kind=SpanKind.SERVER, + )) + self.assertFalse(envelope.data.baseData.success) + + # Server route attribute missing + envelope = exporter.span_data_to_envelope(SpanData( + name='test', + context=SpanContext( + trace_id='6e0c63257de34c90bf9efcd03927272e', + span_id='6e0c63257de34c91', + trace_options=TraceOptions('1'), + tracestate=Tracestate(), + from_header=False, + ), + span_id='6e0c63257de34c92', + parent_span_id='6e0c63257de34c93', + attributes={ + 'component': 'HTTP', + 'http.method': 'GET', + 'http.path': '/wiki/Rabbitz', + 'http.url': 'https://www.wikipedia.org/wiki/Rabbit', + 'http.status_code': 400, + }, + start_time='2010-10-24T07:28:38.123456Z', + end_time='2010-10-24T07:28:38.234567Z', + stack_trace=None, + links=None, + status=Status(1), + annotations=None, + message_events=None, + same_process_as_parent_span=None, + child_span_count=None, + span_kind=SpanKind.SERVER, + )) + self.assertEqual(envelope.data.baseData.properties['request.name'], + 'GET /wiki/Rabbitz') + + # Server route and path attribute missing + envelope = exporter.span_data_to_envelope(SpanData( + name='test', + context=SpanContext( + trace_id='6e0c63257de34c90bf9efcd03927272e', + span_id='6e0c63257de34c91', + trace_options=TraceOptions('1'), + tracestate=Tracestate(), + from_header=False, + ), + span_id='6e0c63257de34c92', + parent_span_id='6e0c63257de34c93', + attributes={ + 'component': 'HTTP', + 'http.method': 'GET', + 'http.url': 'https://www.wikipedia.org/wiki/Rabbit', + 'http.status_code': 400, + }, + start_time='2010-10-24T07:28:38.123456Z', + end_time='2010-10-24T07:28:38.234567Z', + stack_trace=None, + links=None, + status=Status(1), + annotations=None, + message_events=None, + same_process_as_parent_span=None, + child_span_count=None, + span_kind=SpanKind.SERVER, + )) + self.assertIsNone( + envelope.data.baseData.properties.get('request.name')) + + # Status client status code attribute + envelope = exporter.span_data_to_envelope(SpanData( + name='test', + context=SpanContext( + trace_id='6e0c63257de34c90bf9efcd03927272e', + span_id='6e0c63257de34c91', + trace_options=TraceOptions('1'), + tracestate=Tracestate(), + from_header=False, + ), + span_id='6e0c63257de34c92', + parent_span_id='6e0c63257de34c93', + attributes={ + 'http.status_code': 201 + }, + start_time='2010-10-24T07:28:38.123456Z', + end_time='2010-10-24T07:28:38.234567Z', + stack_trace=None, + links=None, + status=Status(0), + annotations=None, + message_events=None, + same_process_as_parent_span=None, + child_span_count=None, + span_kind=SpanKind.CLIENT, + )) + self.assertEqual(envelope.data.baseData.resultCode, "201") + self.assertTrue(envelope.data.baseData.success) + + # Status client status code attributes missing + envelope = exporter.span_data_to_envelope(SpanData( + name='test', + context=SpanContext( + trace_id='6e0c63257de34c90bf9efcd03927272e', + span_id='6e0c63257de34c91', + trace_options=TraceOptions('1'), + tracestate=Tracestate(), + from_header=False, + ), + span_id='6e0c63257de34c92', + parent_span_id='6e0c63257de34c93', + attributes={}, + start_time='2010-10-24T07:28:38.123456Z', + end_time='2010-10-24T07:28:38.234567Z', + stack_trace=None, + links=None, + status=Status(1), + annotations=None, + message_events=None, + same_process_as_parent_span=None, + child_span_count=None, + span_kind=SpanKind.CLIENT, + )) + self.assertFalse(envelope.data.baseData.success) + exporter._stop() def test_transmission_nothing(self): diff --git a/contrib/opencensus-ext-azure/tests/test_azure_utils.py b/contrib/opencensus-ext-azure/tests/test_azure_utils.py index 946f71b8a..47ef484d8 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_utils.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_utils.py @@ -37,13 +37,6 @@ def test_timestamp_to_iso_str(self): 1287905318.123456, ), '2010-10-24T07:28:38.123456Z') - def test_url_to_dependency_name(self): - self.assertEqual( - utils.url_to_dependency_name( - 'https://www.wikipedia.org/wiki/Rabbit' - ), - 'www.wikipedia.org') - def test_validate_instrumentation_key(self): key = '1234abcd-5678-4efa-8abc-1234567890ab' self.assertIsNone(utils.validate_instrumentation_key(key)) diff --git a/contrib/opencensus-ext-httplib/opencensus/ext/httplib/trace.py b/contrib/opencensus-ext-httplib/opencensus/ext/httplib/trace.py index ed75e326b..9df2b6308 100644 --- a/contrib/opencensus-ext-httplib/opencensus/ext/httplib/trace.py +++ b/contrib/opencensus-ext-httplib/opencensus/ext/httplib/trace.py @@ -75,6 +75,10 @@ def call(self, method, url, body, headers, *args, **kwargs): _span.span_kind = span_module.SpanKind.CLIENT _span.name = '[httplib]{}'.format(request_func.__name__) + # Add the component type to attributes + _tracer.add_attribute_to_current_span( + "component", "HTTP") + # Add the request url to attributes _tracer.add_attribute_to_current_span(HTTP_URL, url) diff --git a/contrib/opencensus-ext-httplib/tests/test_httplib_trace.py b/contrib/opencensus-ext-httplib/tests/test_httplib_trace.py index e85bfecdd..eed7d8f42 100644 --- a/contrib/opencensus-ext-httplib/tests/test_httplib_trace.py +++ b/contrib/opencensus-ext-httplib/tests/test_httplib_trace.py @@ -91,7 +91,8 @@ def test_wrap_httplib_request(self): with patch, patch_thread: wrapped(mock_self, method, url, body, headers) - expected_attributes = {'http.url': url, 'http.method': method} + expected_attributes = {'component': 'HTTP', + 'http.url': url, 'http.method': method} expected_name = '[httplib]request' mock_request_func.assert_called_with(mock_self, method, url, body, { diff --git a/contrib/opencensus-ext-requests/opencensus/ext/requests/trace.py b/contrib/opencensus-ext-requests/opencensus/ext/requests/trace.py index 57dd1b6b3..c99862018 100644 --- a/contrib/opencensus-ext-requests/opencensus/ext/requests/trace.py +++ b/contrib/opencensus-ext-requests/opencensus/ext/requests/trace.py @@ -91,6 +91,10 @@ def call(url, *args, **kwargs): _span.name = '{}'.format(path) _span.span_kind = span_module.SpanKind.CLIENT + # Add the component type to attributes + _tracer.add_attribute_to_current_span( + "component", "HTTP") + # Add the requests host to attributes _tracer.add_attribute_to_current_span( HTTP_HOST, dest_url) @@ -167,6 +171,10 @@ def wrap_session_request(wrapped, instance, args, kwargs): except Exception: # pragma: NO COVER pass + # Add the component type to attributes + _tracer.add_attribute_to_current_span( + "component", "HTTP") + # Add the requests host to attributes _tracer.add_attribute_to_current_span( HTTP_HOST, dest_url) diff --git a/contrib/opencensus-ext-requests/tests/test_requests_trace.py b/contrib/opencensus-ext-requests/tests/test_requests_trace.py index 9aa2c7131..c3957ae71 100644 --- a/contrib/opencensus-ext-requests/tests/test_requests_trace.py +++ b/contrib/opencensus-ext-requests/tests/test_requests_trace.py @@ -104,6 +104,7 @@ def test_wrap_requests(self): wrapped(url) expected_attributes = { + 'component': 'HTTP', 'http.host': 'localhost:8080', 'http.method': 'GET', 'http.path': '/test', @@ -247,6 +248,7 @@ def test_wrap_requests_timeout(self): wrapped(url) expected_attributes = { + 'component': 'HTTP', 'http.host': 'localhost:8080', 'http.method': 'GET', 'http.path': '/test', @@ -294,6 +296,7 @@ def test_wrap_requests_invalid_url(self): wrapped(url) expected_attributes = { + 'component': 'HTTP', 'http.host': 'localhost:8080', 'http.method': 'GET', 'http.path': '/test', @@ -342,6 +345,7 @@ def test_wrap_requests_exception(self): wrapped(url) expected_attributes = { + 'component': 'HTTP', 'http.host': 'localhost:8080', 'http.method': 'GET', 'http.path': '/test', @@ -388,6 +392,7 @@ def test_wrap_session_request(self): ) expected_attributes = { + 'component': 'HTTP', 'http.host': 'localhost:8080', 'http.method': 'POST', 'http.path': '/test', @@ -619,6 +624,7 @@ def test_wrap_session_request_timeout(self): ) expected_attributes = { + 'component': 'HTTP', 'http.host': 'localhost:8080', 'http.method': 'POST', 'http.path': '/test', @@ -667,6 +673,7 @@ def test_wrap_session_request_invalid_url(self): ) expected_attributes = { + 'component': 'HTTP', 'http.host': 'localhost:8080', 'http.method': 'POST', 'http.path': '/test', @@ -715,6 +722,7 @@ def test_wrap_session_request_exception(self): ) expected_attributes = { + 'component': 'HTTP', 'http.host': 'localhost:8080', 'http.method': 'POST', 'http.path': '/test', From 4d1d6170a49eb35e5e232b04ebc0e6fb5bbad99e Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Tue, 28 Jan 2020 14:16:15 -0800 Subject: [PATCH 34/79] Add Fixed-rate sampling logic for Azure Log Exporter (#848) --- .../opencensus/ext/azure/common/__init__.py | 1 + .../ext/azure/log_exporter/__init__.py | 14 +++++++ .../tests/test_azure_log_exporter.py | 41 +++++++++++++++++++ 3 files changed, 56 insertions(+) diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py index 754e2a27c..6b763c037 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py @@ -92,6 +92,7 @@ def __init__(self, *args, **kwargs): export_interval=15.0, grace_period=5.0, instrumentation_key=None, + logging_sampling_rate=1.0, max_batch_size=100, minimum_retry_interval=60, # minimum retry interval in seconds proxy=None, diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py index 7ce44afd8..9df9777a4 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py @@ -13,6 +13,7 @@ # limitations under the License. import logging +import random import threading import time import traceback @@ -107,6 +108,16 @@ def stop(self, timeout=None): # pragma: NO COVER return time.time() - start_time # time taken to stop +class SamplingFilter(logging.Filter): + + def __init__(self, probability=1.0): + super(SamplingFilter, self).__init__() + self.probability = probability + + def filter(self, record): + return random.random() < self.probability + + class AzureLogHandler(TransportMixin, BaseLogHandler): """Handler for logging to Microsoft Azure Monitor. @@ -116,6 +127,8 @@ class AzureLogHandler(TransportMixin, BaseLogHandler): def __init__(self, **options): self.options = Options(**options) utils.validate_instrumentation_key(self.options.instrumentation_key) + if not 0 <= self.options.logging_sampling_rate <= 1: + raise ValueError('Sampling must be in the range: [0,1]') self.export_interval = self.options.export_interval self.max_batch_size = self.options.max_batch_size self.storage = LocalFileStorage( @@ -125,6 +138,7 @@ def __init__(self, **options): retention_period=self.options.storage_retention_period, ) super(AzureLogHandler, self).__init__() + self.addFilter(SamplingFilter(self.options.logging_sampling_rate)) def close(self): self.storage.close() diff --git a/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py b/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py index f835f185c..8aa6baa8d 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py @@ -78,6 +78,13 @@ def test_ctor(self): self.assertRaises(ValueError, lambda: log_exporter.AzureLogHandler()) Options._default.instrumentation_key = instrumentation_key + def test_invalid_sampling_rate(self): + with self.assertRaises(ValueError): + log_exporter.AzureLogHandler( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + logging_sampling_rate=4.0, + ) + @mock.patch('requests.post', return_value=mock.Mock()) def test_exception(self, requests_mock): logger = logging.getLogger(self.id()) @@ -207,3 +214,37 @@ def test_log_with_invalid_custom_properties(self, requests_mock): self.assertFalse('not_a_dict' in post_body) self.assertFalse('key_1' in post_body) + + @mock.patch('requests.post', return_value=mock.Mock()) + def test_log_record_sampled(self, requests_mock): + logger = logging.getLogger(self.id()) + handler = log_exporter.AzureLogHandler( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + logging_sampling_rate=1.0, + ) + logger.addHandler(handler) + logger.warning('Hello_World') + logger.warning('Hello_World2') + logger.warning('Hello_World3') + logger.warning('Hello_World4') + handler.close() + post_body = requests_mock.call_args_list[0][1]['data'] + self.assertTrue('Hello_World' in post_body) + self.assertTrue('Hello_World2' in post_body) + self.assertTrue('Hello_World3' in post_body) + self.assertTrue('Hello_World4' in post_body) + + @mock.patch('requests.post', return_value=mock.Mock()) + def test_log_record_not_sampled(self, requests_mock): + logger = logging.getLogger(self.id()) + handler = log_exporter.AzureLogHandler( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + logging_sampling_rate=0.0, + ) + logger.addHandler(handler) + logger.warning('Hello_World') + logger.warning('Hello_World2') + logger.warning('Hello_World3') + logger.warning('Hello_World4') + handler.close() + self.assertFalse(requests_mock.called) From 4f4f0206f1d6de7d81a37c6aa6de18bf27b059c7 Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Tue, 28 Jan 2020 16:42:11 -0800 Subject: [PATCH 35/79] Implement TelemetryProcessors for Azure exporters (#851) --- contrib/opencensus-ext-azure/README.rst | 139 +++++++++++++++++- .../opencensus/ext/azure/common/processor.py | 63 ++++++++ .../ext/azure/log_exporter/__init__.py | 5 +- .../ext/azure/metrics_exporter/__init__.py | 6 +- .../ext/azure/trace_exporter/__init__.py | 5 +- .../tests/test_processor.py | 94 ++++++++++++ 6 files changed, 306 insertions(+), 6 deletions(-) create mode 100644 contrib/opencensus-ext-azure/opencensus/ext/azure/common/processor.py create mode 100644 contrib/opencensus-ext-azure/tests/test_processor.py diff --git a/contrib/opencensus-ext-azure/README.rst b/contrib/opencensus-ext-azure/README.rst index 6b7bf250b..49ba625f3 100644 --- a/contrib/opencensus-ext-azure/README.rst +++ b/contrib/opencensus-ext-azure/README.rst @@ -37,6 +37,8 @@ This example shows how to send a warning level log to Azure Monitor. logger.addHandler(AzureLogHandler(connection_string='InstrumentationKey=')) logger.warning('Hello, World!') +Correlation +########### You can enrich the logs with trace IDs and span IDs by using the `logging integration <../opencensus-ext-logging>`_. @@ -73,6 +75,9 @@ You can enrich the logs with trace IDs and span IDs by using the `logging integr logger.warning('In the span') logger.warning('After the span') +Custom Properties +################# + You can also add custom properties to your log messages in the *extra* keyword argument using the custom_dimensions field. WARNING: For this feature to work, you need to pass a dictionary to the custom_dimensions field. If you pass arguments of any other type, the logger will ignore them. @@ -89,6 +94,34 @@ WARNING: For this feature to work, you need to pass a dictionary to the custom_d properties = {'custom_dimensions': {'key_1': 'value_1', 'key_2': 'value_2'}} logger.warning('action', extra=properties) +Modifying Logs +############## + +* You can pass a callback function to the exporter to process telemetry before it is exported. +* Your callback function can return `False` if you do not want this envelope exported. +* Your callback function must accept an [envelope](https://github.com/census-instrumentation/opencensus-python/blob/master/contrib/opencensus-ext-azure/opencensus/ext/azure/common/protocol.py#L86) data type as its parameter. +* You can see the schema for Azure Monitor data types in the envelopes [here](https://github.com/census-instrumentation/opencensus-python/blob/master/contrib/opencensus-ext-azure/opencensus/ext/azure/common/protocol.py). +* The `AzureLogHandler` handles `ExceptionData` and `MessageData` data types. + +.. code:: python + + import logging + + from opencensus.ext.azure.log_exporter import AzureLogHandler + + logger = logging.getLogger(__name__) + + # Callback function to append '_hello' to each log message telemetry + def callback_function(envelope): + envelope.data.baseData.message += '_hello' + return True + + handler = AzureLogHandler(connection_string='InstrumentationKey=') + handler.add_telemetry_processor(callback_function) + logger.addHandler(handler) + logger.warning('Hello, World!') + + Metrics ~~~~~~~ @@ -145,6 +178,9 @@ The **Azure Monitor Metrics Exporter** allows you to export metrics to `Azure Mo if __name__ == "__main__": main() +Standard Metrics +################ + The exporter also includes a set of standard metrics that are exported to Azure Monitor by default. .. code:: python @@ -179,6 +215,67 @@ Below is a list of standard metrics that are currently available: - Process CPU Usage (percentage) - Process Private Bytes (bytes) +Modifying Metrics +################# + +* You can pass a callback function to the exporter to process telemetry before it is exported. +* Your callback function can return `False` if you do not want this envelope exported. +* Your callback function must accept an [envelope](https://github.com/census-instrumentation/opencensus-python/blob/master/contrib/opencensus-ext-azure/opencensus/ext/azure/common/protocol.py#L86) data type as its parameter. +* You can see the schema for Azure Monitor data types in the envelopes [here](https://github.com/census-instrumentation/opencensus-python/blob/master/contrib/opencensus-ext-azure/opencensus/ext/azure/common/protocol.py). +* The `MetricsExporter` handles `MetricData` data types. + +.. code:: python + + import time + + from opencensus.ext.azure import metrics_exporter + from opencensus.stats import aggregation as aggregation_module + from opencensus.stats import measure as measure_module + from opencensus.stats import stats as stats_module + from opencensus.stats import view as view_module + from opencensus.tags import tag_map as tag_map_module + + stats = stats_module.stats + view_manager = stats.view_manager + stats_recorder = stats.stats_recorder + + CARROTS_MEASURE = measure_module.MeasureInt("carrots", + "number of carrots", + "carrots") + CARROTS_VIEW = view_module.View("carrots_view", + "number of carrots", + [], + CARROTS_MEASURE, + aggregation_module.CountAggregation()) + + # Callback function to only export the metric if value is greater than 0 + def callback_function(envelope): + return envelope.data.baseData.metrics[0].value > 0 + + def main(): + # Enable metrics + # Set the interval in seconds in which you want to send metrics + exporter = metrics_exporter.new_metrics_exporter(connection_string='InstrumentationKey=') + exporter.add_telemetry_processor(callback_function) + view_manager.register_exporter(exporter) + + view_manager.register_view(CARROTS_VIEW) + mmap = stats_recorder.new_measurement_map() + tmap = tag_map_module.TagMap() + + mmap.measure_int_put(CARROTS_MEASURE, 1000) + mmap.record(tmap) + # Default export interval is every 15.0s + # Your application should run for at least this amount + # of time so the exporter will meet this interval + # Sleep can fulfill this + time.sleep(60) + + print("Done recording metrics") + + if __name__ == "__main__": + main() + Trace ~~~~~ @@ -197,13 +294,18 @@ This example shows how to send a span "hello" to Azure Monitor. from opencensus.trace.tracer import Tracer tracer = Tracer( - exporter=AzureExporter(connection_string='InstrumentationKey='), + exporter=AzureExporter( + connection_string='InstrumentationKey=' + ), sampler=ProbabilitySampler(1.0) ) with tracer.span(name='hello'): print('Hello, World!') +Integrations +############ + OpenCensus also supports several `integrations `_ which allows OpenCensus to integrate with third party libraries. This example shows how to integrate with the `requests `_ library. @@ -225,14 +327,45 @@ This example shows how to integrate with the `requests 0: self.storage.put(envelopes, result) diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py index 9b531b808..081dc727e 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py @@ -19,6 +19,7 @@ from opencensus.common import utils as common_utils from opencensus.ext.azure.common import Options, utils +from opencensus.ext.azure.common.processor import ProcessorMixin from opencensus.ext.azure.common.protocol import ( Data, DataPoint, @@ -35,7 +36,7 @@ logger = logging.getLogger(__name__) -class MetricsExporter(object): +class MetricsExporter(ProcessorMixin): """Metrics exporter for Microsoft Azure Monitor.""" def __init__(self, options=None): @@ -46,6 +47,8 @@ def __init__(self, options=None): if self.options.max_batch_size <= 0: raise ValueError('Max batch size must be at least 1.') self.max_batch_size = self.options.max_batch_size + self._telemetry_processors = [] + super(MetricsExporter, self).__init__() def export_metrics(self, metrics): if metrics: @@ -75,6 +78,7 @@ def export_metrics(self, metrics): batched_envelopes = list(common_utils.window( envelopes, self.max_batch_size)) for batch in batched_envelopes: + batch = self.apply_telemetry_processors(batch) self._transmit_without_retry(batch) def create_data_points(self, time_series, metric_descriptor): diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py index 52db582ab..ce904e673 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py @@ -17,6 +17,7 @@ from opencensus.common.schedule import QueueExitEvent from opencensus.ext.azure.common import Options, utils from opencensus.ext.azure.common.exporter import BaseExporter +from opencensus.ext.azure.common.processor import ProcessorMixin from opencensus.ext.azure.common.protocol import ( Data, Envelope, @@ -37,7 +38,7 @@ __all__ = ['AzureExporter'] -class AzureExporter(TransportMixin, BaseExporter): +class AzureExporter(BaseExporter, ProcessorMixin, TransportMixin): """An exporter that sends traces to Microsoft Azure Monitor. :param options: Options for the exporter. @@ -52,6 +53,7 @@ def __init__(self, **options): maintenance_period=self.options.storage_maintenance_period, retention_period=self.options.storage_retention_period, ) + self._telemetry_processors = [] super(AzureExporter, self).__init__(**options) def span_data_to_envelope(self, sd): @@ -152,6 +154,7 @@ def emit(self, batch, event=None): try: if batch: envelopes = [self.span_data_to_envelope(sd) for sd in batch] + envelopes = self.apply_telemetry_processors(envelopes) result = self._transmit(envelopes) if result > 0: self.storage.put(envelopes, result) diff --git a/contrib/opencensus-ext-azure/tests/test_processor.py b/contrib/opencensus-ext-azure/tests/test_processor.py new file mode 100644 index 000000000..7ec01eb83 --- /dev/null +++ b/contrib/opencensus-ext-azure/tests/test_processor.py @@ -0,0 +1,94 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +from opencensus.ext.azure.common.processor import ProcessorMixin +from opencensus.ext.azure.common.protocol import Envelope + + +# pylint: disable=W0212 +class TestProcessorMixin(unittest.TestCase): + def test_add(self): + mixin = ProcessorMixin() + mixin._telemetry_processors = [] + mixin.add_telemetry_processor(lambda: True) + self.assertEqual(len(mixin._telemetry_processors), 1) + + def test_clear(self): + mixin = ProcessorMixin() + mixin._telemetry_processors = [] + mixin.add_telemetry_processor(lambda: True) + self.assertEqual(len(mixin._telemetry_processors), 1) + mixin.clear_telemetry_processors() + self.assertEqual(len(mixin._telemetry_processors), 0) + + def test_apply(self): + mixin = ProcessorMixin() + mixin._telemetry_processors = [] + + def callback_function(envelope): + envelope.baseType += '_world' + mixin.add_telemetry_processor(callback_function) + envelope = Envelope() + envelope.baseType = 'type1' + mixin.apply_telemetry_processors([envelope]) + self.assertEqual(envelope.baseType, 'type1_world') + + def test_apply_multiple(self): + mixin = ProcessorMixin() + mixin._telemetry_processors = [] + + def callback_function(envelope): + envelope.baseType += '_world' + + def callback_function2(envelope): + envelope.baseType += '_world2' + mixin.add_telemetry_processor(callback_function) + mixin.add_telemetry_processor(callback_function2) + envelope = Envelope() + envelope.baseType = 'type1' + mixin.apply_telemetry_processors([envelope]) + self.assertEqual(envelope.baseType, 'type1_world_world2') + + def test_apply_exception(self): + mixin = ProcessorMixin() + mixin._telemetry_processors = [] + + def callback_function(envelope): + raise ValueError() + + def callback_function2(envelope): + envelope.baseType += '_world2' + mixin.add_telemetry_processor(callback_function) + mixin.add_telemetry_processor(callback_function2) + envelope = Envelope() + envelope.baseType = 'type1' + mixin.apply_telemetry_processors([envelope]) + self.assertEqual(envelope.baseType, 'type1_world2') + + def test_apply_not_accepted(self): + mixin = ProcessorMixin() + mixin._telemetry_processors = [] + + def callback_function(envelope): + return envelope.baseType == 'type2' + mixin.add_telemetry_processor(callback_function) + envelope = Envelope() + envelope.baseType = 'type1' + envelope2 = Envelope() + envelope2.baseType = 'type2' + envelopes = mixin.apply_telemetry_processors([envelope, envelope2]) + self.assertEqual(len(envelopes), 1) + self.assertEqual(envelopes[0].baseType, 'type2') From e877825d330c1af9a2852aca0b206ae16a0bd094 Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Thu, 30 Jan 2020 10:26:07 -0800 Subject: [PATCH 36/79] Add local storage and retry logic for Azure Metrics Exporter + flush telemetry on exit (#845) --- .../ext/azure/metrics_exporter/__init__.py | 218 ++++-------- .../tests/test_azure_metrics_exporter.py | 324 ++---------------- 2 files changed, 90 insertions(+), 452 deletions(-) diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py index 081dc727e..a5ba2a4ec 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py @@ -12,11 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -import json +import atexit import logging -import requests - from opencensus.common import utils as common_utils from opencensus.ext.azure.common import Options, utils from opencensus.ext.azure.common.processor import ProcessorMixin @@ -26,6 +24,8 @@ Envelope, MetricData, ) +from opencensus.ext.azure.common.storage import LocalFileStorage +from opencensus.ext.azure.common.transport import TransportMixin from opencensus.ext.azure.metrics_exporter import standard_metrics from opencensus.metrics import transport from opencensus.metrics.export.metric_descriptor import MetricDescriptorType @@ -36,52 +36,67 @@ logger = logging.getLogger(__name__) -class MetricsExporter(ProcessorMixin): +class MetricsExporter(TransportMixin, ProcessorMixin): """Metrics exporter for Microsoft Azure Monitor.""" - def __init__(self, options=None): - if options is None: - options = Options() - self.options = options + def __init__(self, **options): + self.options = Options(**options) utils.validate_instrumentation_key(self.options.instrumentation_key) if self.options.max_batch_size <= 0: raise ValueError('Max batch size must be at least 1.') + self.export_interval = self.options.export_interval self.max_batch_size = self.options.max_batch_size self._telemetry_processors = [] + self.storage = LocalFileStorage( + path=self.options.storage_path, + max_size=self.options.storage_max_size, + maintenance_period=self.options.storage_maintenance_period, + retention_period=self.options.storage_retention_period, + ) super(MetricsExporter, self).__init__() def export_metrics(self, metrics): - if metrics: - envelopes = [] - for metric in metrics: - # No support for histogram aggregations - type_ = metric.descriptor.type - if type_ != MetricDescriptorType.CUMULATIVE_DISTRIBUTION: - md = metric.descriptor - # Each time series will be uniquely identified by its - # label values - for time_series in metric.time_series: - # Using stats, time_series should only have one point - # which contains the aggregated value - data_point = self.create_data_points( - time_series, md)[0] - # The timestamp is when the metric was recorded - time_stamp = time_series.points[0].timestamp - # Get the properties using label keys from metric and - # label values of the time series - properties = self.create_properties(time_series, md) - envelopes.append(self.create_envelope(data_point, - time_stamp, - properties)) - # Send data in batches of max_batch_size - if envelopes: - batched_envelopes = list(common_utils.window( - envelopes, self.max_batch_size)) - for batch in batched_envelopes: - batch = self.apply_telemetry_processors(batch) - self._transmit_without_retry(batch) - - def create_data_points(self, time_series, metric_descriptor): + envelopes = [] + for metric in metrics: + envelopes.extend(self.metric_to_envelopes(metric)) + # Send data in batches of max_batch_size + batched_envelopes = list(common_utils.window( + envelopes, self.max_batch_size)) + for batch in batched_envelopes: + batch = self.apply_telemetry_processors(batch) + result = self._transmit(batch) + if result > 0: + self.storage.put(batch, result) + + # If there is still room to transmit envelopes, transmit from storage + # if available + if len(envelopes) < self.options.max_batch_size: + self._transmit_from_storage() + + def metric_to_envelopes(self, metric): + envelopes = [] + # No support for histogram aggregations + if (metric.descriptor.type != + MetricDescriptorType.CUMULATIVE_DISTRIBUTION): + md = metric.descriptor + # Each time series will be uniquely identified by its + # label values + for time_series in metric.time_series: + # Using stats, time_series should only have one + # point which contains the aggregated value + data_point = self._create_data_points( + time_series, md)[0] + # The timestamp is when the metric was recorded + timestamp = time_series.points[0].timestamp + # Get the properties using label keys from metric + # and label values of the time series + properties = self._create_properties(time_series, md) + envelopes.append(self._create_envelope(data_point, + timestamp, + properties)) + return envelopes + + def _create_data_points(self, time_series, metric_descriptor): """Convert a metric's OC time series to list of Azure data points.""" data_points = [] for point in time_series.points: @@ -92,7 +107,7 @@ def create_data_points(self, time_series, metric_descriptor): data_points.append(data_point) return data_points - def create_properties(self, time_series, metric_descriptor): + def _create_properties(self, time_series, metric_descriptor): properties = {} # We construct a properties map from the label keys and values. We # assume the ordering is already correct @@ -104,11 +119,11 @@ def create_properties(self, time_series, metric_descriptor): properties[metric_descriptor.label_keys[i].key] = value return properties - def create_envelope(self, data_point, time_stamp, properties): + def _create_envelope(self, data_point, timestamp, properties): envelope = Envelope( iKey=self.options.instrumentation_key, tags=dict(utils.azure_monitor_context), - time=time_stamp.isoformat(), + time=timestamp.isoformat(), ) envelope.name = "Microsoft.ApplicationInsights.Metric" data = MetricData( @@ -118,125 +133,14 @@ def create_envelope(self, data_point, time_stamp, properties): envelope.data = Data(baseData=data, baseType="MetricData") return envelope - def _transmit_without_retry(self, envelopes): - # Contains logic from transport._transmit - # TODO: Remove this function from exporter and consolidate with - # transport._transmit to cover all exporter use cases. Uses cases - # pertain to properly handling failures and implementing a retry - # policy for this exporter. - # TODO: implement retry policy - """ - Transmit the data envelopes to the ingestion service. - Does not perform retry logic. For partial success and - non-retryable failure, simply outputs result to logs. - This function should never throw exception. - """ - try: - response = requests.post( - url=self.options.endpoint, - data=json.dumps(envelopes), - headers={ - 'Accept': 'application/json', - 'Content-Type': 'application/json; charset=utf-8', - }, - timeout=self.options.timeout, - ) - except Exception as ex: - # No retry policy, log output - logger.warning('Transient client side error %s.', ex) - return - - text = 'N/A' - data = None - # Handle the possible results from the response - if response is None: - logger.warning('Error: cannot read response.') - return - try: - status_code = response.status_code - except Exception as ex: - logger.warning('Error while reading response status code %s.', ex) - return - try: - text = response.text - except Exception as ex: - logger.warning('Error while reading response body %s.', ex) - return - try: - data = json.loads(text) - except Exception as ex: - logger.warning('Error while loading ' + - 'json from response body %s.', ex) - return - if status_code == 200: - logger.info('Transmission succeeded: %s.', text) - return - # Check for retryable partial content - if status_code == 206: - if data: - try: - retryable_envelopes = [] - for error in data['errors']: - if error['statusCode'] in ( - 429, # Too Many Requests - 500, # Internal Server Error - 503, # Service Unavailable - ): - retryable_envelopes.append( - envelopes[error['index']]) - else: - logger.error( - 'Data drop %s: %s %s.', - error['statusCode'], - error['message'], - envelopes[error['index']], - ) - # show the envelopes that can be retried manually for - # visibility - if retryable_envelopes: - logger.warning( - 'Error while processing data. Data dropped. ' + - 'Consider manually retrying for envelopes: %s.', - retryable_envelopes - ) - return - except Exception: - logger.exception( - 'Error while processing %s: %s.', - status_code, - text - ) - return - # Check for non-retryable result - if status_code in ( - 206, # Partial Content - 429, # Too Many Requests - 500, # Internal Server Error - 503, # Service Unavailable - ): - # server side error (retryable) - logger.warning( - 'Transient server side error %s: %s. ' + - 'Consider manually trying.', - status_code, - text, - ) - else: - # server side error (non-retryable) - logger.error( - 'Non-retryable server side error %s: %s.', - status_code, - text, - ) - def new_metrics_exporter(**options): - options_ = Options(**options) - exporter = MetricsExporter(options=options_) + exporter = MetricsExporter(**options) producers = [stats_module.stats] - if options_.enable_standard_metrics: + if exporter.options.enable_standard_metrics: producers.append(standard_metrics.producer) transport.get_exporter_thread(producers, exporter, - interval=options_.export_interval) + interval=exporter.options.export_interval) + atexit.register(exporter.export_metrics, stats_module.stats.get_metrics()) return exporter diff --git a/contrib/opencensus-ext-azure/tests/test_azure_metrics_exporter.py b/contrib/opencensus-ext-azure/tests/test_azure_metrics_exporter.py index 63c802e0d..9c84118f0 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_metrics_exporter.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_metrics_exporter.py @@ -20,7 +20,7 @@ from opencensus.common import utils from opencensus.ext.azure import metrics_exporter from opencensus.ext.azure.common import Options -from opencensus.ext.azure.common.protocol import DataPoint, Envelope +from opencensus.ext.azure.common.protocol import DataPoint from opencensus.ext.azure.metrics_exporter import standard_metrics from opencensus.metrics import label_key, label_value from opencensus.metrics.export import ( @@ -56,10 +56,6 @@ def create_metric(): return mm -def create_envelope(): - return Envelope._default - - class TestAzureMetricsExporter(unittest.TestCase): def test_constructor_missing_key(self): instrumentation_key = Options._default.instrumentation_key @@ -69,20 +65,18 @@ def test_constructor_missing_key(self): Options._default.instrumentation_key = instrumentation_key def test_constructor_invalid_batch_size(self): - options = Options( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd', - max_batch_size=-1) self.assertRaises( ValueError, - lambda: metrics_exporter.MetricsExporter(options=options) - ) + lambda: metrics_exporter.MetricsExporter( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + max_batch_size=-1 + )) @mock.patch('requests.post', return_value=mock.Mock()) def test_export_metrics(self, requests_mock): metric = create_metric() - options = Options( + exporter = metrics_exporter.MetricsExporter( instrumentation_key='12345678-1234-5678-abcd-12345678abcd') - exporter = metrics_exporter.MetricsExporter(options) requests_mock.return_value.text = '{"itemsReceived":1,'\ '"itemsAccepted":1,'\ '"errors":[]}' @@ -96,18 +90,16 @@ def test_export_metrics(self, requests_mock): def test_export_metrics_histogram(self): metric = create_metric() - options = Options( + exporter = metrics_exporter.MetricsExporter( instrumentation_key='12345678-1234-5678-abcd-12345678abcd') metric.descriptor._type = MetricDescriptorType.CUMULATIVE_DISTRIBUTION - exporter = metrics_exporter.MetricsExporter(options) self.assertIsNone(exporter.export_metrics([metric])) @mock.patch('requests.post', return_value=mock.Mock()) def test_export_metrics_empty(self, requests_mock): - options = Options( + exporter = metrics_exporter.MetricsExporter( instrumentation_key='12345678-1234-5678-abcd-12345678abcd') - exporter = metrics_exporter.MetricsExporter(options) exporter.export_metrics([]) self.assertEqual(len(requests_mock.call_args_list), 0) @@ -115,10 +107,9 @@ def test_export_metrics_empty(self, requests_mock): @mock.patch('requests.post', return_value=mock.Mock()) def test_export_metrics_full_batch(self, requests_mock): metric = create_metric() - options = Options( + exporter = metrics_exporter.MetricsExporter( instrumentation_key='12345678-1234-5678-abcd-12345678abcd', max_batch_size=1) - exporter = metrics_exporter.MetricsExporter(options) requests_mock.return_value.status_code = 200 requests_mock.return_value.text = '{"itemsReceived":1,'\ '"itemsAccepted":1,'\ @@ -130,270 +121,13 @@ def test_export_metrics_full_batch(self, requests_mock): self.assertTrue('metrics' in post_body) self.assertTrue('properties' in post_body) - @mock.patch('opencensus.ext.azure.metrics_exporter' + - '.logger.warning', return_value=mock.Mock()) - def test_transmit_client_error(self, logger_mock): - options = Options( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd') - exporter = metrics_exporter.MetricsExporter(options) - exporter._transmit_without_retry(mock.Mock()) - - self.assertEqual(len(logger_mock.call_args_list), 1) - - @mock.patch('requests.post', return_value=None) - @mock.patch('opencensus.ext.azure.metrics_exporter' + - '.logger.warning', return_value=mock.Mock()) - def test_transmit_no_response(self, requests_mock, logger_mock): - envelope = create_envelope() - options = Options( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd') - exporter = metrics_exporter.MetricsExporter(options) - exporter._transmit_without_retry([envelope]) - - self.assertEqual(len(requests_mock.call_args_list), 1) - self.assertEqual(len(logger_mock.call_args_list), 1) - - @mock.patch('opencensus.ext.azure.metrics_exporter' + - '.logger.warning', return_value=mock.Mock()) - def test_transmit_no_status_code(self, logger_mock): - with mock.patch('requests.post') as requests_mock: - type(requests_mock.return_value).status_code = mock.PropertyMock( - side_effect=Exception()) - envelope = create_envelope() - options = Options( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd') - exporter = metrics_exporter.MetricsExporter(options) - exporter._transmit_without_retry([envelope]) - - self.assertEqual(len(requests_mock.call_args_list), 1) - self.assertEqual(len(logger_mock.call_args_list), 1) - - @mock.patch('opencensus.ext.azure.metrics_exporter' + - '.logger.warning', return_value=mock.Mock()) - def test_transmit_no_response_body(self, logger_mock): - with mock.patch('requests.post') as requests_mock: - type(requests_mock.return_value).text = mock.PropertyMock( - side_effect=Exception()) - envelope = create_envelope() - options = Options( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd') - exporter = metrics_exporter.MetricsExporter(options) - exporter._transmit_without_retry([envelope]) - - self.assertEqual(len(requests_mock.call_args_list), 1) - self.assertEqual(len(logger_mock.call_args_list), 1) - - @mock.patch('opencensus.ext.azure.metrics_exporter' + - '.logger.warning', return_value=mock.Mock()) - def test_transmit_invalid_response_body(self, logger_mock): - with mock.patch('requests.post') as requests_mock: - type(requests_mock.return_value).text = mock.PropertyMock( - return_value='invalid') - envelope = create_envelope() - options = Options( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd') - exporter = metrics_exporter.MetricsExporter(options) - exporter._transmit_without_retry([envelope]) - - self.assertEqual(len(requests_mock.call_args_list), 1) - self.assertEqual(len(logger_mock.call_args_list), 1) - - @mock.patch('opencensus.ext.azure.metrics_exporter' + - '.logger.info', return_value=mock.Mock()) - def test_transmit_success(self, logger_mock): - with mock.patch('requests.post') as requests_mock: - text = '{"itemsReceived":1,'\ - '"itemsAccepted":1,'\ - '"errors":[]}' - type(requests_mock.return_value).text = mock.PropertyMock( - return_value=text) - type(requests_mock.return_value).status_code = mock.PropertyMock( - return_value=200) - envelope = create_envelope() - options = Options( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd') - exporter = metrics_exporter.MetricsExporter(options) - exporter._transmit_without_retry([envelope]) - - self.assertEqual(len(requests_mock.call_args_list), 1) - self.assertEqual(len(logger_mock.call_args_list), 1) - - @mock.patch('opencensus.ext.azure.metrics_exporter' + - '.logger.warning', return_value=mock.Mock()) - @mock.patch('opencensus.ext.azure.metrics_exporter' + - '.json.loads', return_value=None) - def test_transmit_none_data_retryable(self, logger_mock, json_mock): - with mock.patch('requests.post') as requests_mock: - text = '{"itemsReceived":1,'\ - '"itemsAccepted":1,'\ - '"errors":[{"statusCode":500, "index":0}]}' - type(requests_mock.return_value).text = mock.PropertyMock( - return_value=text) - type(requests_mock.return_value).status_code = mock.PropertyMock( - return_value=206) - envelope = create_envelope() - options = Options( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd') - exporter = metrics_exporter.MetricsExporter(options) - exporter._transmit_without_retry([envelope]) - - self.assertEqual(len(requests_mock.call_args_list), 1) - self.assertEqual(len(logger_mock.call_args_list), 1) - - @mock.patch('opencensus.ext.azure.metrics_exporter' + - '.logger.error', return_value=mock.Mock()) - @mock.patch('opencensus.ext.azure.metrics_exporter' + - '.json.loads', return_value=None) - def test_transmit_none_data_non_retryable(self, logger_mock, json_mock): - with mock.patch('requests.post') as requests_mock: - text = '{"itemsReceived":1,'\ - '"itemsAccepted":1,'\ - '"errors":[{"statusCode":500, "index":0}]}' - type(requests_mock.return_value).text = mock.PropertyMock( - return_value=text) - type(requests_mock.return_value).status_code = mock.PropertyMock( - return_value=402) - envelope = create_envelope() - options = Options( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd') - exporter = metrics_exporter.MetricsExporter(options) - exporter._transmit_without_retry([envelope]) - - self.assertEqual(len(requests_mock.call_args_list), 1) - self.assertEqual(len(logger_mock.call_args_list), 1) - - @mock.patch('opencensus.ext.azure.metrics_exporter' + - '.logger.exception', return_value=mock.Mock()) - def test_transmit_partial_exception(self, logger_mock): - with mock.patch('requests.post') as requests_mock: - text = '{"itemsReceived":1,'\ - '"itemsAccepted":1}' - type(requests_mock.return_value).text = mock.PropertyMock( - return_value=text) - type(requests_mock.return_value).status_code = mock.PropertyMock( - return_value=206) - envelope = create_envelope() - options = Options( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd') - exporter = metrics_exporter.MetricsExporter(options) - exporter._transmit_without_retry([envelope]) - - self.assertEqual(len(requests_mock.call_args_list), 1) - self.assertEqual(len(logger_mock.call_args_list), 1) - - @mock.patch('opencensus.ext.azure.metrics_exporter' + - '.logger.warning', return_value=mock.Mock()) - def test_transmit_partial_retryable(self, logger_mock): - with mock.patch('requests.post') as requests_mock: - text = '{"itemsReceived":1,'\ - '"itemsAccepted":1,'\ - '"errors":[{"statusCode":429, "index":0}]}' - type(requests_mock.return_value).text = mock.PropertyMock( - return_value=text) - type(requests_mock.return_value).status_code = mock.PropertyMock( - return_value=206) - envelope = create_envelope() - options = Options( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd') - exporter = metrics_exporter.MetricsExporter(options) - exporter._transmit_without_retry([envelope]) - - self.assertEqual(len(requests_mock.call_args_list), 1) - self.assertEqual(len(logger_mock.call_args_list), 1) - - @mock.patch('opencensus.ext.azure.metrics_exporter' + - '.logger.error', return_value=mock.Mock()) - def test_transmit_partial_non_retryable(self, logger_mock): - with mock.patch('requests.post') as requests_mock: - text = '{"itemsReceived":1,'\ - '"itemsAccepted":1,'\ - '"errors":[{"statusCode":402,'\ - '"index":0,"message":"error"}]}' - type(requests_mock.return_value).text = mock.PropertyMock( - return_value=text) - type(requests_mock.return_value).status_code = mock.PropertyMock( - return_value=206) - envelope = create_envelope() - options = Options( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd') - exporter = metrics_exporter.MetricsExporter(options) - exporter._transmit_without_retry([envelope]) - - self.assertEqual(len(requests_mock.call_args_list), 1) - self.assertEqual(len(logger_mock.call_args_list), 1) - - @mock.patch('opencensus.ext.azure.metrics_exporter' + - '.logger.error', return_value=mock.Mock()) - @mock.patch('opencensus.ext.azure.metrics_exporter' + - '.logger.warning', return_value=mock.Mock()) - def test_transmit_partial_mix_retryable(self, logger_mock, logger2_mock): - with mock.patch('requests.post') as requests_mock: - text = '{"itemsReceived":1,'\ - '"itemsAccepted":0,'\ - '"errors":[{"statusCode":402,'\ - '"index":0,"message":"error"},'\ - '{"statusCode":429, "index":0}]}' - type(requests_mock.return_value).text = mock.PropertyMock( - return_value=text) - type(requests_mock.return_value).status_code = mock.PropertyMock( - return_value=206) - envelope = create_envelope() - options = Options( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd') - exporter = metrics_exporter.MetricsExporter(options) - exporter._transmit_without_retry([envelope]) - - self.assertEqual(len(requests_mock.call_args_list), 1) - self.assertEqual(len(logger_mock.call_args_list), 1) - self.assertEqual(len(logger2_mock.call_args_list), 1) - - @mock.patch('opencensus.ext.azure.metrics_exporter' + - '.logger.warning', return_value=mock.Mock()) - def test_transmit_server_retryable(self, logger_mock): - with mock.patch('requests.post') as requests_mock: - text = '{"itemsReceived":1,'\ - '"itemsAccepted":1,'\ - '"errors":[{"statusCode":500, "index":0}]}' - type(requests_mock.return_value).text = mock.PropertyMock( - return_value=text) - type(requests_mock.return_value).status_code = mock.PropertyMock( - return_value=500) - envelope = create_envelope() - options = Options( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd') - exporter = metrics_exporter.MetricsExporter(options) - exporter._transmit_without_retry([envelope]) - - self.assertEqual(len(requests_mock.call_args_list), 1) - self.assertEqual(len(logger_mock.call_args_list), 1) - - @mock.patch('opencensus.ext.azure.metrics_exporter' + - '.logger.error', return_value=mock.Mock()) - def test_transmit_server_non_retryable(self, logger_mock): - with mock.patch('requests.post') as requests_mock: - text = '{"itemsReceived":1,'\ - '"itemsAccepted":1,'\ - '"errors":[{"statusCode":402, "index":0}]}' - type(requests_mock.return_value).text = mock.PropertyMock( - return_value=text) - type(requests_mock.return_value).status_code = mock.PropertyMock( - return_value=402) - envelope = create_envelope() - options = Options( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd') - exporter = metrics_exporter.MetricsExporter(options) - exporter._transmit_without_retry([envelope]) - - self.assertEqual(len(requests_mock.call_args_list), 1) - self.assertEqual(len(logger_mock.call_args_list), 1) - def test_create_data_points(self): metric = create_metric() - options = Options( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd') - exporter = metrics_exporter.MetricsExporter(options) - data_points = exporter.create_data_points(metric.time_series[0], - metric.descriptor) + exporter = metrics_exporter.MetricsExporter( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd' + ) + data_points = exporter._create_data_points(metric.time_series[0], + metric.descriptor) self.assertEqual(len(data_points), 1) data_point = data_points[0] @@ -404,42 +138,42 @@ def test_create_data_points(self): def test_create_properties(self): metric = create_metric() - options = Options( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd') - exporter = metrics_exporter.MetricsExporter(options) - properties = exporter.create_properties(metric.time_series[0], - metric.descriptor) + exporter = metrics_exporter.MetricsExporter( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd' + ) + properties = exporter._create_properties(metric.time_series[0], + metric.descriptor) self.assertEqual(len(properties), 1) self.assertEqual(properties['key'], 'val') def test_create_properties_none(self): metric = create_metric() - options = Options( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd') - exporter = metrics_exporter.MetricsExporter(options) + exporter = metrics_exporter.MetricsExporter( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd' + ) metric.time_series[0].label_values[0]._value = None - properties = exporter.create_properties(metric.time_series[0], - metric.descriptor) + properties = exporter._create_properties(metric.time_series[0], + metric.descriptor) self.assertEqual(len(properties), 1) self.assertEqual(properties['key'], 'null') def test_create_envelope(self): metric = create_metric() - options = Options( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd') - exporter = metrics_exporter.MetricsExporter(options) + exporter = metrics_exporter.MetricsExporter( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd' + ) value = metric.time_series[0].points[0].value.value data_point = DataPoint(ns=metric.descriptor.name, name=metric.descriptor.name, value=value) timestamp = datetime(2019, 3, 20, 21, 34, 0, 537954) properties = {'url': 'website.com'} - envelope = exporter.create_envelope(data_point, timestamp, properties) + envelope = exporter._create_envelope(data_point, timestamp, properties) self.assertTrue('iKey' in envelope) - self.assertEqual(envelope.iKey, options.instrumentation_key) + self.assertEqual(envelope.iKey, '12345678-1234-5678-abcd-12345678abcd') self.assertTrue('tags' in envelope) self.assertTrue('time' in envelope) self.assertEqual(envelope.time, timestamp.isoformat()) From bb02e5bc7c298743a0c86088ed960f650e5c171a Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Fri, 21 Feb 2020 11:14:31 -0800 Subject: [PATCH 37/79] update changelog from v0.7.7 release (#854) --- CHANGELOG.md | 9 +++++++++ contrib/opencensus-ext-httplib/CHANGELOG.md | 5 +++++ contrib/opencensus-ext-requests/CHANGELOG.md | 5 +++++ 3 files changed, 19 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3218a8ebb..73ceed5c6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,15 @@ ## Unreleased +## 0.7.7 +Released 2020-02-03 + +- Updated `azure` module +([#837](https://github.com/census-instrumentation/opencensus-python/pull/837), + [#845](https://github.com/census-instrumentation/opencensus-python/pull/845), + [#848](https://github.com/census-instrumentation/opencensus-python/pull/848), + [#851](https://github.com/census-instrumentation/opencensus-python/pull/851)) + ## 0.7.6 Released 2019-11-26 diff --git a/contrib/opencensus-ext-httplib/CHANGELOG.md b/contrib/opencensus-ext-httplib/CHANGELOG.md index d2241c7af..605fd3a33 100644 --- a/contrib/opencensus-ext-httplib/CHANGELOG.md +++ b/contrib/opencensus-ext-httplib/CHANGELOG.md @@ -2,6 +2,11 @@ ## Unreleased +## 0.7.3 +Released 2020-02-03 + +- Added `component` span attribute + ## 0.7.2 Released 2019-08-26 diff --git a/contrib/opencensus-ext-requests/CHANGELOG.md b/contrib/opencensus-ext-requests/CHANGELOG.md index cacdd28a3..4c2c4cf24 100644 --- a/contrib/opencensus-ext-requests/CHANGELOG.md +++ b/contrib/opencensus-ext-requests/CHANGELOG.md @@ -2,6 +2,11 @@ ## Unreleased +## 0.7.3 +Released 2020-02-03 + +- Added `component` span attribute + ## 0.7.2 Released 2019-08-26 From 99821a0b826445cecce0881a01790c8cc6958ceb Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Tue, 7 Apr 2020 10:59:02 -0700 Subject: [PATCH 38/79] Rename long named tests + refactor tox/nox (#868) --- .circleci/config.yml | 2 +- .../tests/test_azure_trace_exporter.py | 6 +- noxfile.py | 4 +- tox.ini | 91 ++++++++++--------- 4 files changed, 53 insertions(+), 50 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index c490c04ef..a23cc35bc 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -10,7 +10,7 @@ jobs: ignore: - gh-pages docker: - - image: googleapis/nox:0.17.0 + - image: googleapis/nox:0.18.2 - image: mysql:5.7 environment: MYSQL_ROOT_HOST: "%" diff --git a/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py b/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py index 3543157b5..716c9dc55 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py @@ -745,7 +745,7 @@ def test_transmission_nothing(self): exporter._transmit_from_storage() exporter._stop() - def test_transmission_request_exception(self): + def test_transmission_pre_exception(self): exporter = trace_exporter.AzureExporter( instrumentation_key='12345678-1234-5678-abcd-12345678abcd', storage_path=os.path.join(TEST_FOLDER, self.id()), @@ -771,7 +771,7 @@ def test_transmission_lease_failure(self, requests_mock): self.assertTrue(exporter.storage.get()) exporter._stop() - def test_transmission_response_exception(self): + def test_transmission_exception(self): exporter = trace_exporter.AzureExporter( instrumentation_key='12345678-1234-5678-abcd-12345678abcd', storage_path=os.path.join(TEST_FOLDER, self.id()), @@ -840,7 +840,7 @@ def test_transmission_206_500(self): self.assertEqual(exporter.storage.get().get(), (3,)) exporter._stop() - def test_transmission_206_nothing_to_retry(self): + def test_transmission_206_no_retry(self): exporter = trace_exporter.AzureExporter( instrumentation_key='12345678-1234-5678-abcd-12345678abcd', storage_path=os.path.join(TEST_FOLDER, self.id()), diff --git a/noxfile.py b/noxfile.py index 571ac6d64..abf5aa8c4 100644 --- a/noxfile.py +++ b/noxfile.py @@ -50,7 +50,7 @@ def _install_dev_packages(session): def _install_test_dependencies(session): - session.install('mock') + session.install('mock==3.0.5') session.install('pytest==4.6.4') # 842 - Unit tests failing on CI due to failed import for coverage # Might have something to do with the CircleCI image @@ -59,7 +59,7 @@ def _install_test_dependencies(session): session.install('unittest2') -@nox.session(python=['2.7', '3.4', '3.5', '3.6']) +@nox.session(python=['2.7', '3.5', '3.6']) def unit(session): """Run the unit test suite.""" diff --git a/tox.ini b/tox.ini index d021bf2c4..a3c253fc4 100644 --- a/tox.ini +++ b/tox.ini @@ -1,55 +1,58 @@ [tox] -envlist = py{27,34,35,36,37}-unit, py37-lint, py37-setup, py37-docs +envlist = + py{27,34,35,36,37}-unit + py37-lint + py37-setup + py37-docs [testenv] install_command = python -m pip install {opts} {packages} deps = - py{27,34,35,36,37}-unit,py37-lint: mock - py{27,34,35,36,37}-unit,py37-lint: pytest==4.6.4 - py{27,34,35,36,37}-unit,py37-lint: pytest-cov - py{27,34,35,36,37}-unit,py37-lint: retrying - py{27,34,35,36,37}-unit,py37-lint: unittest2 - py{27,34,35,36,37}-unit,py37-lint,py37-setup,py37-docs: -e context/opencensus-context - py{27,34,35,36,37}-unit,py37-lint,py37-docs: -e contrib/opencensus-correlation - py{27,34,35,36,37}-unit,py37-lint,py37-docs: -e . - py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-azure - py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-datadog - py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-dbapi - py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-django - py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-flask - py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-gevent - py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-grpc - py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-httplib - py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-jaeger - py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-logging - py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-mysql - py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-ocagent - py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-postgresql - py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-prometheus - py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-pymongo - py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-pymysql - py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-pyramid - py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-requests - py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-sqlalchemy - py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-stackdriver - py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-threading - py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-zipkin - py{27,34,35,36,37}-unit,py37-lint: -e contrib/opencensus-ext-google-cloud-clientlibs - py37-lint: flake8 - py37-setup: docutils - py37-setup: pygments - py37-docs: setuptools >= 36.4.0 - py37-docs: sphinx >= 1.6.3 - isort ~= 4.3.21 + unit,lint: mock==3.0.5 + unit,lint: pytest==4.6.4 + unit,lint: pytest-cov + unit,lint: retrying + unit,lint: unittest2 + unit,lint,py37-setup,docs: -e context/opencensus-context + unit,lint,docs: -e contrib/opencensus-correlation + unit,lint,docs: -e . + unit,lint: -e contrib/opencensus-ext-azure + unit,lint: -e contrib/opencensus-ext-datadog + unit,lint: -e contrib/opencensus-ext-dbapi + unit,lint: -e contrib/opencensus-ext-django + unit,lint: -e contrib/opencensus-ext-flask + unit,lint: -e contrib/opencensus-ext-gevent + unit,lint: -e contrib/opencensus-ext-grpc + unit,lint: -e contrib/opencensus-ext-httplib + unit,lint: -e contrib/opencensus-ext-jaeger + unit,lint: -e contrib/opencensus-ext-logging + unit,lint: -e contrib/opencensus-ext-mysql + unit,lint: -e contrib/opencensus-ext-ocagent + unit,lint: -e contrib/opencensus-ext-postgresql + unit,lint: -e contrib/opencensus-ext-prometheus + unit,lint: -e contrib/opencensus-ext-pymongo + unit,lint: -e contrib/opencensus-ext-pymysql + unit,lint: -e contrib/opencensus-ext-pyramid + unit,lint: -e contrib/opencensus-ext-requests + unit,lint: -e contrib/opencensus-ext-sqlalchemy + unit,lint: -e contrib/opencensus-ext-stackdriver + unit,lint: -e contrib/opencensus-ext-threading + unit,lint: -e contrib/opencensus-ext-zipkin + unit,lint: -e contrib/opencensus-ext-google-cloud-clientlibs + lint: flake8 + lint: isort ~= 4.3.21 + setup: docutils + setup: pygments + docs: setuptools >= 36.4.0 + docs: sphinx >= 1.6.3 commands = - py{27,34,35,36,37}-unit: py.test --quiet --cov={envdir}/opencensus --cov=context --cov=contrib --cov-report term-missing --cov-config=.coveragerc --cov-fail-under=97 tests/unit/ context/ contrib/ - isort --check-only --diff --recursive . - ; TODO: System tests - py37-lint: flake8 context/ contrib/ opencensus/ tests/ examples/ - py37-lint: - bash ./scripts/pylint.sh + unit: py.test --quiet --cov={envdir}/opencensus --cov=context --cov=contrib --cov-report term-missing --cov-config=.coveragerc --cov-fail-under=97 tests/unit/ context/ contrib/ + ; TODO system tests + lint: isort --check-only --diff --recursive . + lint: flake8 context/ contrib/ opencensus/ tests/ examples/ + lint: - bash ./scripts/pylint.sh py37-setup: python setup.py check --restructuredtext --strict py37-docs: bash ./scripts/update_docs.sh ; TODO deployment - From d6114d791dab48b8afc4f88c4355ec95c3154b59 Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Mon, 8 Jun 2020 14:55:02 -0700 Subject: [PATCH 39/79] Implement max size logic for local storage for Azure exporters (#876) --- .../opencensus/ext/azure/common/__init__.py | 2 +- .../opencensus/ext/azure/common/storage.py | 32 +++++++++++++- .../tests/test_storage.py | 44 ++++++++++++++++++- 3 files changed, 75 insertions(+), 3 deletions(-) diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py index 6b763c037..ce6cd801b 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py @@ -97,7 +97,7 @@ def __init__(self, *args, **kwargs): minimum_retry_interval=60, # minimum retry interval in seconds proxy=None, storage_maintenance_period=60, - storage_max_size=100*1024*1024, + storage_max_size=50*1024*1024, # 50MiB storage_path=os.path.join( os.path.expanduser('~'), '.opencensus', diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/storage.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/storage.py index 55f375fb4..cf9283cc0 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/storage.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/storage.py @@ -1,10 +1,13 @@ import datetime import json +import logging import os import random from opencensus.common.schedule import PeriodicTask +logger = logging.getLogger(__name__) + def _fmt(timestamp): return timestamp.strftime('%Y-%m-%dT%H%M%S.%f') @@ -77,7 +80,7 @@ class LocalFileStorage(object): def __init__( self, path, - max_size=100*1024*1024, # 100MB + max_size=50*1024*1024, # 50MiB maintenance_period=60, # 1 minute retention_period=7*24*60*60, # 7 days write_timeout=60, # 1 minute @@ -162,6 +165,8 @@ def get(self): return None def put(self, data, lease_period=0, silent=False): + if not self._check_storage_size(): + return None blob = LocalFileBlob(os.path.join( self.path, '{}-{}.blob'.format( @@ -170,3 +175,28 @@ def put(self, data, lease_period=0, silent=False): ), )) return blob.put(data, lease_period=lease_period, silent=silent) + + def _check_storage_size(self): + size = 0 + for dirpath, dirnames, filenames in os.walk(self.path): + for f in filenames: + fp = os.path.join(dirpath, f) + # skip if it is symbolic link + if not os.path.islink(fp): + try: + size += os.path.getsize(fp) + except OSError: + logger.error( + "Path %s does not exist or is inaccessible.", fp + ) + continue + if size >= self.max_size: + logger.warning( + "Persistent storage max capacity has been " + "reached. Currently at %fKB. Telemetry will be " + "lost. Please consider increasing the value of " + "'storage_max_size' in exporter config.", + format(size/1024) + ) + return False + return True diff --git a/contrib/opencensus-ext-azure/tests/test_storage.py b/contrib/opencensus-ext-azure/tests/test_storage.py index b5776926d..98e1196b3 100644 --- a/contrib/opencensus-ext-azure/tests/test_storage.py +++ b/contrib/opencensus-ext-azure/tests/test_storage.py @@ -25,7 +25,7 @@ _seconds, ) -TEST_FOLDER = os.path.abspath('.test') +TEST_FOLDER = os.path.abspath('.test.storage') def setUpModule(): @@ -116,6 +116,48 @@ def test_put(self): self.assertIsNone(stor.put(input, silent=True)) self.assertRaises(Exception, lambda: stor.put(input)) + def test_put_max_size(self): + input = (1, 2, 3) + with LocalFileStorage(os.path.join(TEST_FOLDER, 'asd')) as stor: + size_mock = mock.Mock() + size_mock.return_value = False + stor._check_storage_size = size_mock + stor.put(input) + self.assertEqual(stor.get(), None) + + def test_check_storage_size_full(self): + input = (1, 2, 3) + with LocalFileStorage(os.path.join(TEST_FOLDER, 'asd2'), 1) as stor: + stor.put(input) + self.assertFalse(stor._check_storage_size()) + + def test_check_storage_size_not_full(self): + input = (1, 2, 3) + with LocalFileStorage(os.path.join(TEST_FOLDER, 'asd3'), 1000) as stor: + stor.put(input) + self.assertTrue(stor._check_storage_size()) + + def test_check_storage_size_no_files(self): + with LocalFileStorage(os.path.join(TEST_FOLDER, 'asd3'), 1000) as stor: + self.assertTrue(stor._check_storage_size()) + + def test_check_storage_size_links(self): + input = (1, 2, 3) + with LocalFileStorage(os.path.join(TEST_FOLDER, 'asd4'), 1000) as stor: + stor.put(input) + with mock.patch('os.path.islink') as os_mock: + os_mock.return_value = True + self.assertTrue(stor._check_storage_size()) + + def test_check_storage_size_error(self): + input = (1, 2, 3) + with LocalFileStorage(os.path.join(TEST_FOLDER, 'asd5'), 1) as stor: + with mock.patch('os.path.getsize', side_effect=throw(OSError)): + stor.put(input) + with mock.patch('os.path.islink') as os_mock: + os_mock.return_value = True + self.assertTrue(stor._check_storage_size()) + def test_maintanence_routine(self): with mock.patch('os.makedirs') as m: m.return_value = None From 8eb82ef46db796cdd504c3f2bfdb317cb0f2d70f Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Thu, 11 Jun 2020 17:26:04 -0700 Subject: [PATCH 40/79] Update CODEOWNERS (#906) --- .github/CODEOWNERS | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 02bf84a10..ce275baf7 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -2,6 +2,6 @@ # This file controls who is tagged for review for any given pull request. # For anything not explicitly taken by someone else: -* @census-instrumentation/global-owners @c24t @reyang @songy23 @victoraugustolls +* @census-instrumentation/global-owners @c24t @lzchen @reyang @songy23 @victoraugustolls -/contrib/opencensus-ext-azure/ @lzchen +/contrib/opencensus-ext-azure/ @hectorhdzg From fce9d22435e7b342175b5ac370922527bcc237fd Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Thu, 11 Jun 2020 17:51:11 -0700 Subject: [PATCH 41/79] Update CODEOWNERS (#907) --- .github/CODEOWNERS | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index ce275baf7..a599a7169 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -2,6 +2,5 @@ # This file controls who is tagged for review for any given pull request. # For anything not explicitly taken by someone else: -* @census-instrumentation/global-owners @c24t @lzchen @reyang @songy23 @victoraugustolls +* @census-instrumentation/global-owners @c24t @hectorhdzg @lzchen @reyang @songy23 @victoraugustolls -/contrib/opencensus-ext-azure/ @hectorhdzg From 7ec4ff6c3cdf0dffdae12fea7659e237eaa4825b Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Thu, 11 Jun 2020 18:35:55 -0700 Subject: [PATCH 42/79] Transport + storage changes (#903) --- README.rst | 4 + contrib/opencensus-ext-azure/CHANGELOG.md | 3 + .../opencensus/ext/azure/common/__init__.py | 19 +- .../opencensus/ext/azure/common/storage.py | 32 +- .../opencensus/ext/azure/common/transport.py | 13 +- .../metrics_exporter/standard_metrics/cpu.py | 1 + .../standard_metrics/memory.py | 1 + .../standard_metrics/process.py | 2 + .../tests/test_azure_trace_exporter.py | 181 -- ...t_processor.py => test_processor_mixin.py} | 0 .../tests/test_storage.py | 69 +- .../tests/test_transport_mixin.py | 225 ++ .../ext/ocagent/stats_exporter/__init__.py | 4 +- .../tests/test_stackdriver_exporter.py | 1738 +++++----- .../tests/test_stackdriver_stats.py | 2781 +++++++++-------- tox.ini | 2 +- 16 files changed, 2563 insertions(+), 2512 deletions(-) rename contrib/opencensus-ext-azure/tests/{test_processor.py => test_processor_mixin.py} (100%) create mode 100644 contrib/opencensus-ext-azure/tests/test_transport_mixin.py diff --git a/README.rst b/README.rst index e056d4c4d..3532ec6c1 100644 --- a/README.rst +++ b/README.rst @@ -2,11 +2,15 @@ OpenCensus - A stats collection and distributed tracing framework ================================================================= |gitter| +|travisci| |circleci| |pypi| |compat_check_pypi| |compat_check_github| + +.. |travisci| image:: https://travis-ci.org/census-instrumentation/opencensus-python.svg?branch=master + :target: https://travis-ci.org/census-instrumentation/opencensus-python .. |circleci| image:: https://circleci.com/gh/census-instrumentation/opencensus-python.svg?style=shield :target: https://circleci.com/gh/census-instrumentation/opencensus-python .. |gitter| image:: https://badges.gitter.im/census-instrumentation/lobby.svg diff --git a/contrib/opencensus-ext-azure/CHANGELOG.md b/contrib/opencensus-ext-azure/CHANGELOG.md index 148905dce..fcee072cb 100644 --- a/contrib/opencensus-ext-azure/CHANGELOG.md +++ b/contrib/opencensus-ext-azure/CHANGELOG.md @@ -2,6 +2,9 @@ ## Unreleased +- Change default path of local storage + ([#903](https://github.com/census-instrumentation/opencensus-python/pull/903)) + ## 1.0.1 Released 2019-11-26 diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py index ce6cd801b..fcb25acb5 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py @@ -13,15 +13,17 @@ # limitations under the License. import os -import sys +import tempfile from opencensus.ext.azure.common.protocol import BaseObject INGESTION_ENDPOINT = 'ingestionendpoint' INSTRUMENTATION_KEY = 'instrumentationkey' +TEMPDIR_PREFIX = "opencensus-python-" def process_options(options): + # Connection string/ikey code_cs = parse_connection_string(options.connection_string) code_ikey = options.instrumentation_key env_cs = parse_connection_string( @@ -46,6 +48,14 @@ def process_options(options): or 'https://dc.services.visualstudio.com' options.endpoint = endpoint + '/v2/track' + # storage path + if options.storage_path is None: + TEMPDIR_SUFFIX = options.instrumentation_key or "" + options.storage_path = os.path.join( + tempfile.gettempdir(), + TEMPDIR_PREFIX + TEMPDIR_SUFFIX + ) + def parse_connection_string(connection_string): if connection_string is None: @@ -98,12 +108,7 @@ def __init__(self, *args, **kwargs): proxy=None, storage_maintenance_period=60, storage_max_size=50*1024*1024, # 50MiB - storage_path=os.path.join( - os.path.expanduser('~'), - '.opencensus', - '.azure', - os.path.basename(sys.argv[0]) or '.console', - ), + storage_path=None, storage_retention_period=7*24*60*60, timeout=10.0, # networking timeout in seconds ) diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/storage.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/storage.py index cf9283cc0..304b1f5e3 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/storage.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/storage.py @@ -25,14 +25,13 @@ class LocalFileBlob(object): def __init__(self, fullpath): self.fullpath = fullpath - def delete(self, silent=False): + def delete(self): try: os.remove(self.fullpath) except Exception: - if not silent: - raise + pass # keep silent - def get(self, silent=False): + def get(self): try: with open(self.fullpath, 'r') as file: return tuple( @@ -40,10 +39,9 @@ def get(self, silent=False): for line in file.readlines() ) except Exception: - if not silent: - raise + pass # keep silent - def put(self, data, lease_period=0, silent=False): + def put(self, data, lease_period=0): try: fullpath = self.fullpath + '.tmp' with open(fullpath, 'w') as file: @@ -59,8 +57,7 @@ def put(self, data, lease_period=0, silent=False): os.rename(fullpath, self.fullpath) return self except Exception: - if not silent: - raise + pass # keep silent def lease(self, period): timestamp = _now() + _seconds(period) @@ -90,11 +87,11 @@ def __init__( self.maintenance_period = maintenance_period self.retention_period = retention_period self.write_timeout = write_timeout - self._maintenance_routine(silent=False) + # Run maintenance routine once upon instantiating + self._maintenance_routine() self._maintenance_task = PeriodicTask( interval=self.maintenance_period, function=self._maintenance_routine, - kwargs={'silent': True}, ) self._maintenance_task.daemon = True self._maintenance_task.start() @@ -109,19 +106,18 @@ def __enter__(self): def __exit__(self, type, value, traceback): self.close() - def _maintenance_routine(self, silent=False): + def _maintenance_routine(self): try: if not os.path.isdir(self.path): os.makedirs(self.path) except Exception: - if not silent: - raise + # Race case will throw OSError which we can ignore + pass try: for blob in self.gets(): pass except Exception: - if not silent: - raise + pass # keep silent def gets(self): now = _now() @@ -164,7 +160,7 @@ def get(self): pass return None - def put(self, data, lease_period=0, silent=False): + def put(self, data, lease_period=0): if not self._check_storage_size(): return None blob = LocalFileBlob(os.path.join( @@ -174,7 +170,7 @@ def put(self, data, lease_period=0, silent=False): '{:08x}'.format(random.getrandbits(32)), # thread-safe random ), )) - return blob.put(data, lease_period=lease_period, silent=silent) + return blob.put(data, lease_period=lease_period) def _check_storage_size(self): size = 0 diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/transport.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/transport.py index 661a57dd9..58c287b97 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/transport.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/transport.py @@ -26,12 +26,12 @@ def _transmit_from_storage(self): # give a few more seconds for blob lease operation # to reduce the chance of race (for perf consideration) if blob.lease(self.options.timeout + 5): - envelopes = blob.get() # TODO: handle error + envelopes = blob.get() result = self._transmit(envelopes) if result > 0: blob.lease(result) else: - blob.delete(silent=True) + blob.delete() def _transmit(self, envelopes): """ @@ -41,6 +41,8 @@ def _transmit(self, envelopes): Return the next retry time in seconds for retryable failure. This function should never throw exception. """ + if not envelopes: + return 0 try: response = requests.post( url=self.options.endpoint, @@ -51,8 +53,13 @@ def _transmit(self, envelopes): }, timeout=self.options.timeout, ) + except requests.Timeout: + logger.warning( + 'Request time out. Ingestion may be backed up. Retrying.') + return self.options.minimum_retry_interval except Exception as ex: # TODO: consider RequestException - logger.warning('Transient client side error %s.', ex) + logger.warning( + 'Retrying due to transient client side error %s.', ex) # client side error (retryable) return self.options.minimum_retry_interval diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/cpu.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/cpu.py index 307a019d6..4f6226f0f 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/cpu.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/cpu.py @@ -19,6 +19,7 @@ class ProcessorTimeMetric(object): NAME = "\\Processor(_Total)\\% Processor Time" + @staticmethod def get_value(): cpu_times_percent = psutil.cpu_times_percent() diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/memory.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/memory.py index ccf80dda6..f24a7099d 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/memory.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/memory.py @@ -19,6 +19,7 @@ class AvailableMemoryMetric(object): NAME = "\\Memory\\Available Bytes" + @staticmethod def get_value(): return psutil.virtual_memory().available diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/process.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/process.py index f3214aba2..0222e7b74 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/process.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/process.py @@ -27,6 +27,7 @@ class ProcessMemoryMetric(object): NAME = "\\Process(??APP_WIN32_PROC??)\\Private Bytes" + @staticmethod def get_value(): try: @@ -54,6 +55,7 @@ def __call__(self): class ProcessCPUMetric(object): NAME = "\\Process(??APP_WIN32_PROC??)\\% Processor Time" + @staticmethod def get_value(): try: diff --git a/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py b/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py index 716c9dc55..fc212aed8 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import json import os import shutil import unittest @@ -734,183 +733,3 @@ def test_span_data_to_envelope(self): self.assertFalse(envelope.data.baseData.success) exporter._stop() - - def test_transmission_nothing(self): - exporter = trace_exporter.AzureExporter( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd', - storage_path=os.path.join(TEST_FOLDER, self.id()), - ) - with mock.patch('requests.post') as post: - post.return_value = None - exporter._transmit_from_storage() - exporter._stop() - - def test_transmission_pre_exception(self): - exporter = trace_exporter.AzureExporter( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd', - storage_path=os.path.join(TEST_FOLDER, self.id()), - ) - exporter.storage.put([1, 2, 3]) - with mock.patch('requests.post', throw(Exception)): - exporter._transmit_from_storage() - self.assertIsNone(exporter.storage.get()) - self.assertEqual(len(os.listdir(exporter.storage.path)), 1) - exporter._stop() - - @mock.patch('requests.post', return_value=mock.Mock()) - def test_transmission_lease_failure(self, requests_mock): - requests_mock.return_value = MockResponse(200, 'unknown') - exporter = trace_exporter.AzureExporter( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd', - storage_path=os.path.join(TEST_FOLDER, self.id()), - ) - exporter.storage.put([1, 2, 3]) - with mock.patch('opencensus.ext.azure.common.storage.LocalFileBlob.lease') as lease: # noqa: E501 - lease.return_value = False - exporter._transmit_from_storage() - self.assertTrue(exporter.storage.get()) - exporter._stop() - - def test_transmission_exception(self): - exporter = trace_exporter.AzureExporter( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd', - storage_path=os.path.join(TEST_FOLDER, self.id()), - ) - exporter.storage.put([1, 2, 3]) - with mock.patch('requests.post') as post: - post.return_value = MockResponse(200, None) - del post.return_value.text - exporter._transmit_from_storage() - self.assertIsNone(exporter.storage.get()) - self.assertEqual(len(os.listdir(exporter.storage.path)), 0) - exporter._stop() - - def test_transmission_200(self): - exporter = trace_exporter.AzureExporter( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd', - storage_path=os.path.join(TEST_FOLDER, self.id()), - ) - exporter.storage.put([1, 2, 3]) - exporter.storage.put([1, 2, 3]) - with mock.patch('requests.post') as post: - post.return_value = MockResponse(200, 'unknown') - exporter._transmit_from_storage() - self.assertIsNone(exporter.storage.get()) - self.assertEqual(len(os.listdir(exporter.storage.path)), 0) - exporter._stop() - - def test_transmission_206(self): - exporter = trace_exporter.AzureExporter( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd', - storage_path=os.path.join(TEST_FOLDER, self.id()), - ) - exporter.storage.put([1, 2, 3]) - with mock.patch('requests.post') as post: - post.return_value = MockResponse(206, 'unknown') - exporter._transmit_from_storage() - self.assertIsNone(exporter.storage.get()) - self.assertEqual(len(os.listdir(exporter.storage.path)), 1) - exporter._stop() - - def test_transmission_206_500(self): - exporter = trace_exporter.AzureExporter( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd', - storage_path=os.path.join(TEST_FOLDER, self.id()), - ) - exporter.storage.put([1, 2, 3, 4, 5]) - with mock.patch('requests.post') as post: - post.return_value = MockResponse(206, json.dumps({ - 'itemsReceived': 5, - 'itemsAccepted': 3, - 'errors': [ - { - 'index': 0, - 'statusCode': 400, - 'message': '', - }, - { - 'index': 2, - 'statusCode': 500, - 'message': 'Internal Server Error', - }, - ], - })) - exporter._transmit_from_storage() - self.assertEqual(len(os.listdir(exporter.storage.path)), 1) - self.assertEqual(exporter.storage.get().get(), (3,)) - exporter._stop() - - def test_transmission_206_no_retry(self): - exporter = trace_exporter.AzureExporter( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd', - storage_path=os.path.join(TEST_FOLDER, self.id()), - ) - exporter.storage.put([1, 2, 3]) - with mock.patch('requests.post') as post: - post.return_value = MockResponse(206, json.dumps({ - 'itemsReceived': 3, - 'itemsAccepted': 2, - 'errors': [ - { - 'index': 0, - 'statusCode': 400, - 'message': '', - }, - ], - })) - exporter._transmit_from_storage() - self.assertEqual(len(os.listdir(exporter.storage.path)), 0) - exporter._stop() - - def test_transmission_206_bogus(self): - exporter = trace_exporter.AzureExporter( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd', - storage_path=os.path.join(TEST_FOLDER, self.id()), - ) - exporter.storage.put([1, 2, 3, 4, 5]) - with mock.patch('requests.post') as post: - post.return_value = MockResponse(206, json.dumps({ - 'itemsReceived': 5, - 'itemsAccepted': 3, - 'errors': [ - { - 'foo': 0, - 'bar': 1, - }, - ], - })) - exporter._transmit_from_storage() - self.assertIsNone(exporter.storage.get()) - self.assertEqual(len(os.listdir(exporter.storage.path)), 0) - exporter._stop() - - def test_transmission_400(self): - exporter = trace_exporter.AzureExporter( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd', - storage_path=os.path.join(TEST_FOLDER, self.id()), - ) - exporter.storage.put([1, 2, 3]) - with mock.patch('requests.post') as post: - post.return_value = MockResponse(400, '{}') - exporter._transmit_from_storage() - self.assertEqual(len(os.listdir(exporter.storage.path)), 0) - exporter._stop() - - def test_transmission_500(self): - exporter = trace_exporter.AzureExporter( - instrumentation_key='12345678-1234-5678-abcd-12345678abcd', - storage_path=os.path.join(TEST_FOLDER, self.id()), - ) - exporter.storage.put([1, 2, 3]) - with mock.patch('requests.post') as post: - post.return_value = MockResponse(500, '{}') - exporter._transmit_from_storage() - self.assertIsNone(exporter.storage.get()) - self.assertEqual(len(os.listdir(exporter.storage.path)), 1) - exporter._stop() - - -class MockResponse(object): - def __init__(self, status_code, text): - self.status_code = status_code - self.text = text diff --git a/contrib/opencensus-ext-azure/tests/test_processor.py b/contrib/opencensus-ext-azure/tests/test_processor_mixin.py similarity index 100% rename from contrib/opencensus-ext-azure/tests/test_processor.py rename to contrib/opencensus-ext-azure/tests/test_processor_mixin.py diff --git a/contrib/opencensus-ext-azure/tests/test_storage.py b/contrib/opencensus-ext-azure/tests/test_storage.py index 98e1196b3..666f68647 100644 --- a/contrib/opencensus-ext-azure/tests/test_storage.py +++ b/contrib/opencensus-ext-azure/tests/test_storage.py @@ -45,39 +45,33 @@ def func(*_args, **_kwargs): class TestLocalFileBlob(unittest.TestCase): def test_delete(self): blob = LocalFileBlob(os.path.join(TEST_FOLDER, 'foobar')) - blob.delete(silent=True) - self.assertRaises(Exception, lambda: blob.delete()) - self.assertRaises(Exception, lambda: blob.delete(silent=False)) + blob.delete() + with mock.patch('os.remove') as m: + blob.delete() + m.assert_called_once_with(os.path.join(TEST_FOLDER, 'foobar')) def test_get(self): blob = LocalFileBlob(os.path.join(TEST_FOLDER, 'foobar')) - self.assertIsNone(blob.get(silent=True)) - self.assertRaises(Exception, lambda: blob.get()) - self.assertRaises(Exception, lambda: blob.get(silent=False)) - - def test_put_error(self): - blob = LocalFileBlob(os.path.join(TEST_FOLDER, 'foobar')) - with mock.patch('os.rename', side_effect=throw(Exception)): - self.assertRaises(Exception, lambda: blob.put([1, 2, 3])) + self.assertIsNone(blob.get()) def test_put_without_lease(self): blob = LocalFileBlob(os.path.join(TEST_FOLDER, 'foobar.blob')) input = (1, 2, 3) - blob.delete(silent=True) + blob.delete() blob.put(input) self.assertEqual(blob.get(), input) def test_put_with_lease(self): blob = LocalFileBlob(os.path.join(TEST_FOLDER, 'foobar.blob')) input = (1, 2, 3) - blob.delete(silent=True) + blob.delete() blob.put(input, lease_period=0.01) blob.lease(0.01) self.assertEqual(blob.get(), input) def test_lease_error(self): blob = LocalFileBlob(os.path.join(TEST_FOLDER, 'foobar.blob')) - blob.delete(silent=True) + blob.delete() self.assertEqual(blob.lease(0.01), None) @@ -113,8 +107,7 @@ def test_put(self): with LocalFileStorage(os.path.join(TEST_FOLDER, 'bar')) as stor: self.assertEqual(stor.get().get(), input) with mock.patch('os.rename', side_effect=throw(Exception)): - self.assertIsNone(stor.put(input, silent=True)) - self.assertRaises(Exception, lambda: stor.put(input)) + self.assertIsNone(stor.put(input)) def test_put_max_size(self): input = (1, 2, 3) @@ -158,33 +151,27 @@ def test_check_storage_size_error(self): os_mock.return_value = True self.assertTrue(stor._check_storage_size()) - def test_maintanence_routine(self): + def test_maintenance_routine(self): + with mock.patch('os.makedirs') as m: + LocalFileStorage(os.path.join(TEST_FOLDER, 'baz')) + m.assert_called_once_with(os.path.join(TEST_FOLDER, 'baz')) with mock.patch('os.makedirs') as m: m.return_value = None - self.assertRaises( - Exception, - lambda: LocalFileStorage(os.path.join(TEST_FOLDER, 'baz')), - ) + LocalFileStorage(os.path.join(TEST_FOLDER, 'baz')) + m.assert_called_once_with(os.path.join(TEST_FOLDER, 'baz')) with mock.patch('os.makedirs', side_effect=throw(Exception)): - self.assertRaises( - Exception, - lambda: LocalFileStorage(os.path.join(TEST_FOLDER, 'baz')), - ) + LocalFileStorage(os.path.join(TEST_FOLDER, 'baz')) + m.assert_called_once_with(os.path.join(TEST_FOLDER, 'baz')) with mock.patch('os.listdir', side_effect=throw(Exception)): - self.assertRaises( - Exception, - lambda: LocalFileStorage(os.path.join(TEST_FOLDER, 'baz')), - ) + LocalFileStorage(os.path.join(TEST_FOLDER, 'baz')) + m.assert_called_once_with(os.path.join(TEST_FOLDER, 'baz')) with LocalFileStorage(os.path.join(TEST_FOLDER, 'baz')) as stor: - with mock.patch('os.listdir', side_effect=throw(Exception)): - stor._maintenance_routine(silent=True) - self.assertRaises( - Exception, - lambda: stor._maintenance_routine(), - ) - with mock.patch('os.path.isdir', side_effect=throw(Exception)): - stor._maintenance_routine(silent=True) - self.assertRaises( - Exception, - lambda: stor._maintenance_routine(), - ) + with mock.patch('os.listdir', side_effect=throw(Exception)) as p: + stor._maintenance_routine() + stor._maintenance_routine() + self.assertEqual(p.call_count, 2) + patch = 'os.path.isdir' + with mock.patch(patch, side_effect=throw(Exception)) as isdir: + stor._maintenance_routine() + stor._maintenance_routine() + self.assertEqual(isdir.call_count, 2) diff --git a/contrib/opencensus-ext-azure/tests/test_transport_mixin.py b/contrib/opencensus-ext-azure/tests/test_transport_mixin.py new file mode 100644 index 000000000..0d1793d10 --- /dev/null +++ b/contrib/opencensus-ext-azure/tests/test_transport_mixin.py @@ -0,0 +1,225 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import os +import shutil +import unittest + +import mock +import requests + +from opencensus.ext.azure.common import Options +from opencensus.ext.azure.common.storage import LocalFileStorage +from opencensus.ext.azure.common.transport import TransportMixin + +TEST_FOLDER = os.path.abspath('.test.storage') + + +def setUpModule(): + os.makedirs(TEST_FOLDER) + + +def tearDownModule(): + shutil.rmtree(TEST_FOLDER) + + +def throw(exc_type, *args, **kwargs): + def func(*_args, **_kwargs): + raise exc_type(*args, **kwargs) + return func + + +class MockResponse(object): + def __init__(self, status_code, text): + self.status_code = status_code + self.text = text + + +# pylint: disable=W0212 +class TestTransportMixin(unittest.TestCase): + def test_transmission_nothing(self): + mixin = TransportMixin() + with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: + mixin.storage = stor + with mock.patch('requests.post') as post: + post.return_value = None + mixin._transmit_from_storage() + + def test_transmission_pre_timeout(self): + mixin = TransportMixin() + mixin.options = Options() + with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: + mixin.storage = stor + mixin.storage.put([1, 2, 3]) + with mock.patch('requests.post', throw(requests.Timeout)): + mixin._transmit_from_storage() + self.assertIsNone(mixin.storage.get()) + self.assertEqual(len(os.listdir(mixin.storage.path)), 1) + + def test_transmission_pre_exception(self): + mixin = TransportMixin() + mixin.options = Options() + with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: + mixin.storage = stor + mixin.storage.put([1, 2, 3]) + with mock.patch('requests.post', throw(Exception)): + mixin._transmit_from_storage() + self.assertIsNone(mixin.storage.get()) + self.assertEqual(len(os.listdir(mixin.storage.path)), 1) + + @mock.patch('requests.post', return_value=mock.Mock()) + def test_transmission_lease_failure(self, requests_mock): + requests_mock.return_value = MockResponse(200, 'unknown') + mixin = TransportMixin() + mixin.options = Options() + with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: + mixin.storage = stor + mixin.storage.put([1, 2, 3]) + with mock.patch( + 'opencensus.ext.azure.common.storage.LocalFileBlob.lease' + ) as lease: # noqa: E501 + lease.return_value = False + mixin._transmit_from_storage() + self.assertTrue(mixin.storage.get()) + + def test_transmission_exception(self): + mixin = TransportMixin() + mixin.options = Options() + with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: + mixin.storage = stor + mixin.storage.put([1, 2, 3]) + with mock.patch('requests.post') as post: + post.return_value = MockResponse(200, None) + del post.return_value.text + mixin._transmit_from_storage() + self.assertIsNone(mixin.storage.get()) + self.assertEqual(len(os.listdir(mixin.storage.path)), 0) + + def test_transmission_200(self): + mixin = TransportMixin() + mixin.options = Options() + with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: + mixin.storage = stor + mixin.storage.put([1, 2, 3]) + mixin.storage.put([1, 2, 3]) + with mock.patch('requests.post') as post: + post.return_value = MockResponse(200, 'unknown') + mixin._transmit_from_storage() + self.assertIsNone(mixin.storage.get()) + self.assertEqual(len(os.listdir(mixin.storage.path)), 0) + + def test_transmission_206(self): + mixin = TransportMixin() + mixin.options = Options() + with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: + mixin.storage = stor + mixin.storage.put([1, 2, 3]) + with mock.patch('requests.post') as post: + post.return_value = MockResponse(206, 'unknown') + mixin._transmit_from_storage() + self.assertIsNone(mixin.storage.get()) + self.assertEqual(len(os.listdir(mixin.storage.path)), 1) + + def test_transmission_206_500(self): + mixin = TransportMixin() + mixin.options = Options() + with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: + mixin.storage = stor + mixin.storage.put([1, 2, 3, 4, 5]) + with mock.patch('requests.post') as post: + post.return_value = MockResponse(206, json.dumps({ + 'itemsReceived': 5, + 'itemsAccepted': 3, + 'errors': [ + { + 'index': 0, + 'statusCode': 400, + 'message': '', + }, + { + 'index': 2, + 'statusCode': 500, + 'message': 'Internal Server Error', + }, + ], + })) + mixin._transmit_from_storage() + self.assertEqual(len(os.listdir(mixin.storage.path)), 1) + self.assertEqual(mixin.storage.get().get(), (3,)) + + def test_transmission_206_no_retry(self): + mixin = TransportMixin() + mixin.options = Options() + with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: + mixin.storage = stor + mixin.storage.put([1, 2, 3]) + with mock.patch('requests.post') as post: + post.return_value = MockResponse(206, json.dumps({ + 'itemsReceived': 3, + 'itemsAccepted': 2, + 'errors': [ + { + 'index': 0, + 'statusCode': 400, + 'message': '', + }, + ], + })) + mixin._transmit_from_storage() + self.assertEqual(len(os.listdir(mixin.storage.path)), 0) + + def test_transmission_206_bogus(self): + mixin = TransportMixin() + mixin.options = Options() + with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: + mixin.storage = stor + mixin.storage.put([1, 2, 3, 4, 5]) + with mock.patch('requests.post') as post: + post.return_value = MockResponse(206, json.dumps({ + 'itemsReceived': 5, + 'itemsAccepted': 3, + 'errors': [ + { + 'foo': 0, + 'bar': 1, + }, + ], + })) + mixin._transmit_from_storage() + self.assertIsNone(mixin.storage.get()) + self.assertEqual(len(os.listdir(mixin.storage.path)), 0) + + def test_transmission_400(self): + mixin = TransportMixin() + mixin.options = Options() + with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: + mixin.storage = stor + mixin.storage.put([1, 2, 3]) + with mock.patch('requests.post') as post: + post.return_value = MockResponse(400, '{}') + mixin._transmit_from_storage() + self.assertEqual(len(os.listdir(mixin.storage.path)), 0) + + def test_transmission_500(self): + mixin = TransportMixin() + mixin.options = Options() + with LocalFileStorage(os.path.join(TEST_FOLDER, self.id())) as stor: + mixin.storage = stor + mixin.storage.put([1, 2, 3]) + with mock.patch('requests.post') as post: + post.return_value = MockResponse(500, '{}') + mixin._transmit_from_storage() + self.assertIsNone(mixin.storage.get()) + self.assertEqual(len(os.listdir(mixin.storage.path)), 1) diff --git a/contrib/opencensus-ext-ocagent/opencensus/ext/ocagent/stats_exporter/__init__.py b/contrib/opencensus-ext-ocagent/opencensus/ext/ocagent/stats_exporter/__init__.py index d49e9e82e..3cca8b376 100644 --- a/contrib/opencensus-ext-ocagent/opencensus/ext/ocagent/stats_exporter/__init__.py +++ b/contrib/opencensus-ext-ocagent/opencensus/ext/ocagent/stats_exporter/__init__.py @@ -144,8 +144,8 @@ def _get_metric_descriptor_proto(descriptor): def _get_label_keys_proto(label_keys): return [ - metrics_pb2.LabelKey(key=l.key, description=l.description) - for l in label_keys + metrics_pb2.LabelKey(key=label.key, description=label.description) + for label in label_keys ] diff --git a/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_exporter.py b/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_exporter.py index c48e21681..16b50447e 100644 --- a/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_exporter.py +++ b/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_exporter.py @@ -1,869 +1,869 @@ -# Copyright 2017, OpenCensus Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - -from opencensus.common.version import __version__ -from opencensus.ext.stackdriver import trace_exporter -from opencensus.trace import span_context -from opencensus.trace import span_data as span_data_module - - -class _Client(object): - def __init__(self, project=None): - if project is None: - project = 'PROJECT' - - self.project = project - - -class TestStackdriverExporter(unittest.TestCase): - def test_constructor_default(self): - patch = mock.patch( - 'opencensus.ext.stackdriver.trace_exporter.Client', - new=_Client) - - with patch: - exporter = trace_exporter.StackdriverExporter() - - project_id = 'PROJECT' - self.assertEqual(exporter.project_id, project_id) - - def test_constructor_explicit(self): - client = mock.Mock() - project_id = 'PROJECT' - client.project = project_id - transport = mock.Mock() - - exporter = trace_exporter.StackdriverExporter( - client=client, project_id=project_id, transport=transport) - - self.assertIs(exporter.client, client) - self.assertEqual(exporter.project_id, project_id) - - def test_export(self): - client = mock.Mock() - project_id = 'PROJECT' - client.project = project_id - exporter = trace_exporter.StackdriverExporter( - client=client, project_id=project_id, transport=MockTransport) - exporter.export({}) - - self.assertTrue(exporter.transport.export_called) - - @mock.patch('opencensus.ext.stackdriver.trace_exporter.' - 'monitored_resource.get_instance', - return_value=None) - def test_emit(self, mr_mock): - trace_id = '6e0c63257de34c92bf9efcd03927272e' - span_datas = [ - span_data_module.SpanData( - name='span', - context=span_context.SpanContext(trace_id=trace_id), - span_id='1111', - parent_span_id=None, - attributes=None, - start_time=None, - end_time=None, - child_span_count=None, - stack_trace=None, - annotations=None, - message_events=None, - links=None, - status=None, - same_process_as_parent_span=None, - span_kind=0, - ) - ] - - stackdriver_spans = { - 'spans': [{ - 'status': - None, - 'childSpanCount': - None, - 'links': - None, - 'startTime': - None, - 'spanId': - '1111', - 'attributes': { - 'attributeMap': { - 'g.co/agent': { - 'string_value': { - 'truncated_byte_count': - 0, - 'value': - 'opencensus-python [{}]'.format(__version__) - } - } - } - }, - 'stackTrace': - None, - 'displayName': { - 'truncated_byte_count': 0, - 'value': 'span' - }, - 'name': - 'projects/PROJECT/traces/{}/spans/1111'.format(trace_id), - 'timeEvents': - None, - 'endTime': - None, - 'sameProcessAsParentSpan': - None - }] - } - - client = mock.Mock() - project_id = 'PROJECT' - client.project = project_id - - exporter = trace_exporter.StackdriverExporter( - client=client, project_id=project_id) - - exporter.emit(span_datas) - - name = 'projects/{}'.format(project_id) - - client.batch_write_spans.assert_called_with(name, stackdriver_spans) - self.assertTrue(client.batch_write_spans.called) - - @mock.patch('opencensus.ext.stackdriver.trace_exporter.' - 'monitored_resource.get_instance', - return_value=None) - def test_translate_to_stackdriver(self, mr_mock): - project_id = 'PROJECT' - trace_id = '6e0c63257de34c92bf9efcd03927272e' - span_name = 'test span' - span_id = '6e0c63257de34c92' - attributes = { - 'attributeMap': { - 'key': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'value' - } - }, - 'key_double': { - 'double_value': { - 'value': 123.45 - } - }, - 'http.host': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'host' - } - } - } - } - parent_span_id = '6e0c63257de34c93' - start_time = 'test start time' - end_time = 'test end time' - trace = { - 'spans': [{ - 'displayName': { - 'value': span_name, - 'truncated_byte_count': 0 - }, - 'spanId': - span_id, - 'startTime': - start_time, - 'endTime': - end_time, - 'parentSpanId': - parent_span_id, - 'attributes': - attributes, - 'someRandomKey': - 'this should not be included in result', - 'childSpanCount': - 0 - }], - 'traceId': - trace_id - } - - client = mock.Mock() - client.project = project_id - exporter = trace_exporter.StackdriverExporter( - client=client, project_id=project_id) - - spans = list(exporter.translate_to_stackdriver(trace)) - - expected_traces = [{ - 'name': 'projects/{}/traces/{}/spans/{}'.format( - project_id, trace_id, span_id), - 'displayName': { - 'value': span_name, - 'truncated_byte_count': 0 - }, - 'attributes': { - 'attributeMap': { - 'g.co/agent': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': - 'opencensus-python [{}]'.format(__version__) - } - }, - 'key': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'value' - } - }, - 'key_double': { - 'double_value': { - 'value': 123.45 - } - }, - '/http/host': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'host' - } - } - } - }, - 'spanId': str(span_id), - 'startTime': start_time, - 'endTime': end_time, - 'parentSpanId': str(parent_span_id), - 'status': None, - 'links': None, - 'stackTrace': None, - 'timeEvents': None, - 'childSpanCount': 0, - 'sameProcessAsParentSpan': None - }] - - self.assertEqual(spans, expected_traces) - - def test_translate_common_attributes_to_stackdriver_no_attribute_map(self): - project_id = 'PROJECT' - client = mock.Mock() - client.project = project_id - exporter = trace_exporter.StackdriverExporter( - client=client, project_id=project_id) - - attributes = {'outer key': 'some value'} - expected_attributes = {'outer key': 'some value'} - - exporter.map_attributes(attributes) - self.assertEqual(attributes, expected_attributes) - - def test_translate_common_attributes_to_stackdriver_none(self): - project_id = 'PROJECT' - client = mock.Mock() - client.project = project_id - exporter = trace_exporter.StackdriverExporter( - client=client, project_id=project_id) - - # does not throw - self.assertIsNone(exporter.map_attributes(None)) - - def test_translate_common_attributes_to_stackdriver(self): - project_id = 'PROJECT' - client = mock.Mock() - client.project = project_id - exporter = trace_exporter.StackdriverExporter( - client=client, project_id=project_id) - - attributes = { - 'outer key': 'some value', - 'attributeMap': { - 'key': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'value' - } - }, - 'component': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'http' - } - }, - 'error.message': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'error message' - } - }, - 'error.name': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'error name' - } - }, - 'http.host': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'host' - } - }, - 'http.method': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'GET' - } - }, - 'http.status_code': { - 'int_value': { - 'value': 200 - } - }, - 'http.url': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'http://host:port/path?query' - } - }, - 'http.user_agent': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'some user agent' - } - }, - 'http.client_city': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'Redmond' - } - }, - 'http.client_country': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'USA' - } - }, - 'http.client_protocol': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'HTTP 1.1' - } - }, - 'http.client_region': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'WA' - } - }, - 'http.request_size': { - 'int_value': { - 'value': 100 - } - }, - 'http.response_size': { - 'int_value': { - 'value': 10 - } - }, - 'pid': { - 'int_value': { - 'value': 123456789 - } - }, - 'tid': { - 'int_value': { - 'value': 987654321 - } - }, - 'stacktrace': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'at unknown' - } - }, - 'grpc.host_port': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'localhost:50051' - } - }, - 'grpc.method': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'post' - } - } - } - } - - expected_attributes = { - 'outer key': 'some value', - 'attributeMap': { - 'key': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'value' - } - }, - '/component': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'http' - } - }, - '/error/message': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'error message' - } - }, - '/error/name': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'error name' - } - }, - '/http/host': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'host' - } - }, - '/http/method': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'GET' - } - }, - '/http/status_code': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': '200' - } - }, - '/http/url': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'http://host:port/path?query' - } - }, - '/http/user_agent': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'some user agent' - } - }, - '/http/client_city': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'Redmond' - } - }, - '/http/client_country': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'USA' - } - }, - '/http/client_protocol': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'HTTP 1.1' - } - }, - '/http/client_region': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'WA' - } - }, - '/http/request/size': { - 'int_value': { - 'value': 100 - } - }, - '/http/response/size': { - 'int_value': { - 'value': 10 - } - }, - '/pid': { - 'int_value': { - 'value': 123456789 - } - }, - '/tid': { - 'int_value': { - 'value': 987654321 - } - }, - '/stacktrace': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'at unknown' - } - }, - '/grpc/host_port': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'localhost:50051' - } - }, - '/grpc/method': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'post' - } - } - } - } - - exporter.map_attributes(attributes) - self.assertEqual(attributes, expected_attributes) - - def test_translate_common_attributes_status_code(self): - project_id = 'PROJECT' - client = mock.Mock() - client.project = project_id - exporter = trace_exporter.StackdriverExporter( - client=client, project_id=project_id) - - attributes = { - 'outer key': 'some value', - 'attributeMap': { - 'http.status_code': { - 'int_value': 200 - } - } - } - - expected_attributes = { - 'outer key': 'some value', - 'attributeMap': { - '/http/status_code': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': '200' - } - } - } - } - - exporter.map_attributes(attributes) - self.assertEqual(attributes, expected_attributes) - - -class Test_set_attributes_gae(unittest.TestCase): - @mock.patch('opencensus.ext.stackdriver.trace_exporter.' - 'monitored_resource.get_instance', - return_value=None) - def test_set_attributes_gae(self, mr_mock): - import os - - trace = {'spans': [{'attributes': {}}]} - - expected = { - 'attributes': { - 'attributeMap': { - 'g.co/gae/app/module': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'service' - } - }, - 'g.co/gae/app/instance': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'flex' - } - }, - 'g.co/gae/app/version': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'version' - } - }, - 'g.co/gae/app/project': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'project' - } - }, - 'g.co/agent': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': - 'opencensus-python [{}]'.format(__version__) - } - }, - } - } - } - - with mock.patch.dict( - os.environ, { - trace_exporter._APPENGINE_FLEXIBLE_ENV_VM: 'vm', - trace_exporter._APPENGINE_FLEXIBLE_ENV_FLEX: 'flex', - 'GOOGLE_CLOUD_PROJECT': 'project', - 'GAE_SERVICE': 'service', - 'GAE_VERSION': 'version' - }): - self.assertTrue(trace_exporter.is_gae_environment()) - trace_exporter.set_attributes(trace) - - span = trace.get('spans')[0] - self.assertEqual(span, expected) - - -class TestMonitoredResourceAttributes(unittest.TestCase): - @mock.patch('opencensus.ext.stackdriver.trace_exporter.' - 'monitored_resource.get_instance') - def test_monitored_resource_attributes_gke(self, gmr_mock): - import os - - trace = {'spans': [{'attributes': {}}]} - - expected = { - 'attributes': { - 'attributeMap': { - 'g.co/gae/app/module': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'service' - } - }, - 'g.co/gae/app/instance': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'flex' - } - }, - 'g.co/gae/app/version': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'version' - } - }, - 'g.co/gae/app/project': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'project' - } - }, - 'g.co/agent': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': - 'opencensus-python [{}]'.format(__version__) - } - }, - 'g.co/r/k8s_container/project_id': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'my_project' - } - }, - 'g.co/r/k8s_container/location': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'zone1' - } - }, - 'g.co/r/k8s_container/namespace_name': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'namespace' - } - }, - 'g.co/r/k8s_container/pod_name': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'pod' - } - }, - 'g.co/r/k8s_container/cluster_name': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'cluster' - } - }, - 'g.co/r/k8s_container/container_name': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'c1' - } - }, - } - } - } - - mock_resource = mock.Mock() - mock_resource.get_type.return_value = 'k8s_container' - mock_resource.get_labels.return_value = { - 'k8s.io/pod/name': 'pod', - 'k8s.io/cluster/name': 'cluster', - 'k8s.io/namespace/name': 'namespace', - 'k8s.io/container/name': 'c1', - 'project_id': 'my_project', - 'zone': 'zone1' - } - gmr_mock.return_value = mock_resource - with mock.patch.dict( - os.environ, { - trace_exporter._APPENGINE_FLEXIBLE_ENV_VM: 'vm', - trace_exporter._APPENGINE_FLEXIBLE_ENV_FLEX: 'flex', - 'GOOGLE_CLOUD_PROJECT': 'project', - 'GAE_SERVICE': 'service', - 'GAE_VERSION': 'version' - }): - self.assertTrue(trace_exporter.is_gae_environment()) - trace_exporter.set_attributes(trace) - - span = trace.get('spans')[0] - self.assertEqual(span, expected) - - @mock.patch('opencensus.ext.stackdriver.trace_exporter.' - 'monitored_resource.get_instance') - def test_monitored_resource_attributes_gce(self, gmr_mock): - trace = {'spans': [{'attributes': {}}]} - - expected = { - 'attributes': { - 'attributeMap': { - 'g.co/agent': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': - 'opencensus-python [{}]'.format(__version__) - } - }, - 'g.co/r/gce_instance/project_id': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'my_project' - } - }, - 'g.co/r/gce_instance/instance_id': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': '12345' - } - }, - 'g.co/r/gce_instance/zone': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'zone1' - } - }, - } - } - } - - mock_resource = mock.Mock() - mock_resource.get_type.return_value = 'gce_instance' - mock_resource.get_labels.return_value = { - 'project_id': 'my_project', - 'instance_id': '12345', - 'zone': 'zone1' - } - gmr_mock.return_value = mock_resource - trace_exporter.set_attributes(trace) - span = trace.get('spans')[0] - self.assertEqual(span, expected) - - @mock.patch('opencensus.ext.stackdriver.trace_exporter.' - 'monitored_resource.get_instance') - def test_monitored_resource_attributes_aws(self, amr_mock): - trace = {'spans': [{'attributes': {}}]} - - expected = { - 'attributes': { - 'attributeMap': { - 'g.co/agent': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': - 'opencensus-python [{}]'.format(__version__) - } - }, - 'g.co/r/aws_ec2_instance/aws_account': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': '123456789012' - } - }, - 'g.co/r/aws_ec2_instance/region': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': 'aws:us-west-2' - } - }, - } - } - } - - mock_resource = mock.Mock() - mock_resource.get_type.return_value = 'aws_ec2_instance' - mock_resource.get_labels.return_value = { - 'aws_account': '123456789012', - 'region': 'us-west-2' - } - amr_mock.return_value = mock_resource - - trace_exporter.set_attributes(trace) - span = trace.get('spans')[0] - self.assertEqual(span, expected) - - @mock.patch('opencensus.ext.stackdriver.trace_exporter.' - 'monitored_resource.get_instance') - def test_monitored_resource_attributes_None(self, mr_mock): - trace = {'spans': [{'attributes': {}}]} - - expected = { - 'attributes': { - 'attributeMap': { - 'g.co/agent': { - 'string_value': { - 'truncated_byte_count': 0, - 'value': - 'opencensus-python [{}]'.format(__version__) - } - } - } - } - } - - mr_mock.return_value = None - trace_exporter.set_attributes(trace) - span = trace.get('spans')[0] - self.assertEqual(span, expected) - - mock_resource = mock.Mock() - mock_resource.get_type.return_value = mock.Mock() - mock_resource.get_labels.return_value = mock.Mock() - mr_mock.return_value = mock_resource - - trace_exporter.set_attributes(trace) - span = trace.get('spans')[0] - self.assertEqual(span, expected) - - -class MockTransport(object): - def __init__(self, exporter=None): - self.export_called = False - self.exporter = exporter - - def export(self, trace): - self.export_called = True +# # Copyright 2017, OpenCensus Authors +# # +# # Licensed under the Apache License, Version 2.0 (the "License"); +# # you may not use this file except in compliance with the License. +# # You may obtain a copy of the License at +# # +# # http://www.apache.org/licenses/LICENSE-2.0 +# # +# # Unless required by applicable law or agreed to in writing, software +# # distributed under the License is distributed on an "AS IS" BASIS, +# # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# # See the License for the specific language governing permissions and +# # limitations under the License. + +# import unittest + +# import mock + +# from opencensus.common.version import __version__ +# from opencensus.ext.stackdriver import trace_exporter +# from opencensus.trace import span_context +# from opencensus.trace import span_data as span_data_module + + +# class _Client(object): +# def __init__(self, project=None): +# if project is None: +# project = 'PROJECT' + +# self.project = project + + +# class TestStackdriverExporter(unittest.TestCase): +# def test_constructor_default(self): +# patch = mock.patch( +# 'opencensus.ext.stackdriver.trace_exporter.Client', +# new=_Client) + +# with patch: +# exporter = trace_exporter.StackdriverExporter() + +# project_id = 'PROJECT' +# self.assertEqual(exporter.project_id, project_id) + +# def test_constructor_explicit(self): +# client = mock.Mock() +# project_id = 'PROJECT' +# client.project = project_id +# transport = mock.Mock() + +# exporter = trace_exporter.StackdriverExporter( +# client=client, project_id=project_id, transport=transport) + +# self.assertIs(exporter.client, client) +# self.assertEqual(exporter.project_id, project_id) + +# def test_export(self): +# client = mock.Mock() +# project_id = 'PROJECT' +# client.project = project_id +# exporter = trace_exporter.StackdriverExporter( +# client=client, project_id=project_id, transport=MockTransport) +# exporter.export({}) + +# self.assertTrue(exporter.transport.export_called) + +# @mock.patch('opencensus.ext.stackdriver.trace_exporter.' +# 'monitored_resource.get_instance', +# return_value=None) +# def test_emit(self, mr_mock): +# trace_id = '6e0c63257de34c92bf9efcd03927272e' +# span_datas = [ +# span_data_module.SpanData( +# name='span', +# context=span_context.SpanContext(trace_id=trace_id), +# span_id='1111', +# parent_span_id=None, +# attributes=None, +# start_time=None, +# end_time=None, +# child_span_count=None, +# stack_trace=None, +# annotations=None, +# message_events=None, +# links=None, +# status=None, +# same_process_as_parent_span=None, +# span_kind=0, +# ) +# ] + +# stackdriver_spans = { +# 'spans': [{ +# 'status': +# None, +# 'childSpanCount': +# None, +# 'links': +# None, +# 'startTime': +# None, +# 'spanId': +# '1111', +# 'attributes': { +# 'attributeMap': { +# 'g.co/agent': { +# 'string_value': { +# 'truncated_byte_count': +# 0, +# 'value': +# 'opencensus-python [{}]'.format(__version__) +# } +# } +# } +# }, +# 'stackTrace': +# None, +# 'displayName': { +# 'truncated_byte_count': 0, +# 'value': 'span' +# }, +# 'name': +# 'projects/PROJECT/traces/{}/spans/1111'.format(trace_id), +# 'timeEvents': +# None, +# 'endTime': +# None, +# 'sameProcessAsParentSpan': +# None +# }] +# } + +# client = mock.Mock() +# project_id = 'PROJECT' +# client.project = project_id + +# exporter = trace_exporter.StackdriverExporter( +# client=client, project_id=project_id) + +# exporter.emit(span_datas) + +# name = 'projects/{}'.format(project_id) + +# client.batch_write_spans.assert_called_with(name, stackdriver_spans) +# self.assertTrue(client.batch_write_spans.called) + +# @mock.patch('opencensus.ext.stackdriver.trace_exporter.' +# 'monitored_resource.get_instance', +# return_value=None) +# def test_translate_to_stackdriver(self, mr_mock): +# project_id = 'PROJECT' +# trace_id = '6e0c63257de34c92bf9efcd03927272e' +# span_name = 'test span' +# span_id = '6e0c63257de34c92' +# attributes = { +# 'attributeMap': { +# 'key': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'value' +# } +# }, +# 'key_double': { +# 'double_value': { +# 'value': 123.45 +# } +# }, +# 'http.host': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'host' +# } +# } +# } +# } +# parent_span_id = '6e0c63257de34c93' +# start_time = 'test start time' +# end_time = 'test end time' +# trace = { +# 'spans': [{ +# 'displayName': { +# 'value': span_name, +# 'truncated_byte_count': 0 +# }, +# 'spanId': +# span_id, +# 'startTime': +# start_time, +# 'endTime': +# end_time, +# 'parentSpanId': +# parent_span_id, +# 'attributes': +# attributes, +# 'someRandomKey': +# 'this should not be included in result', +# 'childSpanCount': +# 0 +# }], +# 'traceId': +# trace_id +# } + +# client = mock.Mock() +# client.project = project_id +# exporter = trace_exporter.StackdriverExporter( +# client=client, project_id=project_id) + +# spans = list(exporter.translate_to_stackdriver(trace)) + +# expected_traces = [{ +# 'name': 'projects/{}/traces/{}/spans/{}'.format( +# project_id, trace_id, span_id), +# 'displayName': { +# 'value': span_name, +# 'truncated_byte_count': 0 +# }, +# 'attributes': { +# 'attributeMap': { +# 'g.co/agent': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': +# 'opencensus-python [{}]'.format(__version__) +# } +# }, +# 'key': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'value' +# } +# }, +# 'key_double': { +# 'double_value': { +# 'value': 123.45 +# } +# }, +# '/http/host': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'host' +# } +# } +# } +# }, +# 'spanId': str(span_id), +# 'startTime': start_time, +# 'endTime': end_time, +# 'parentSpanId': str(parent_span_id), +# 'status': None, +# 'links': None, +# 'stackTrace': None, +# 'timeEvents': None, +# 'childSpanCount': 0, +# 'sameProcessAsParentSpan': None +# }] + +# self.assertEqual(spans, expected_traces) + +# def test_translate_common_attributes_to_stackdriver_no_map(self): +# project_id = 'PROJECT' +# client = mock.Mock() +# client.project = project_id +# exporter = trace_exporter.StackdriverExporter( +# client=client, project_id=project_id) + +# attributes = {'outer key': 'some value'} +# expected_attributes = {'outer key': 'some value'} + +# exporter.map_attributes(attributes) +# self.assertEqual(attributes, expected_attributes) + +# def test_translate_common_attributes_to_stackdriver_none(self): +# project_id = 'PROJECT' +# client = mock.Mock() +# client.project = project_id +# exporter = trace_exporter.StackdriverExporter( +# client=client, project_id=project_id) + +# # does not throw +# self.assertIsNone(exporter.map_attributes(None)) + +# def test_translate_common_attributes_to_stackdriver(self): +# project_id = 'PROJECT' +# client = mock.Mock() +# client.project = project_id +# exporter = trace_exporter.StackdriverExporter( +# client=client, project_id=project_id) + +# attributes = { +# 'outer key': 'some value', +# 'attributeMap': { +# 'key': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'value' +# } +# }, +# 'component': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'http' +# } +# }, +# 'error.message': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'error message' +# } +# }, +# 'error.name': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'error name' +# } +# }, +# 'http.host': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'host' +# } +# }, +# 'http.method': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'GET' +# } +# }, +# 'http.status_code': { +# 'int_value': { +# 'value': 200 +# } +# }, +# 'http.url': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'http://host:port/path?query' +# } +# }, +# 'http.user_agent': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'some user agent' +# } +# }, +# 'http.client_city': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'Redmond' +# } +# }, +# 'http.client_country': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'USA' +# } +# }, +# 'http.client_protocol': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'HTTP 1.1' +# } +# }, +# 'http.client_region': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'WA' +# } +# }, +# 'http.request_size': { +# 'int_value': { +# 'value': 100 +# } +# }, +# 'http.response_size': { +# 'int_value': { +# 'value': 10 +# } +# }, +# 'pid': { +# 'int_value': { +# 'value': 123456789 +# } +# }, +# 'tid': { +# 'int_value': { +# 'value': 987654321 +# } +# }, +# 'stacktrace': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'at unknown' +# } +# }, +# 'grpc.host_port': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'localhost:50051' +# } +# }, +# 'grpc.method': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'post' +# } +# } +# } +# } + +# expected_attributes = { +# 'outer key': 'some value', +# 'attributeMap': { +# 'key': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'value' +# } +# }, +# '/component': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'http' +# } +# }, +# '/error/message': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'error message' +# } +# }, +# '/error/name': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'error name' +# } +# }, +# '/http/host': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'host' +# } +# }, +# '/http/method': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'GET' +# } +# }, +# '/http/status_code': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': '200' +# } +# }, +# '/http/url': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'http://host:port/path?query' +# } +# }, +# '/http/user_agent': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'some user agent' +# } +# }, +# '/http/client_city': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'Redmond' +# } +# }, +# '/http/client_country': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'USA' +# } +# }, +# '/http/client_protocol': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'HTTP 1.1' +# } +# }, +# '/http/client_region': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'WA' +# } +# }, +# '/http/request/size': { +# 'int_value': { +# 'value': 100 +# } +# }, +# '/http/response/size': { +# 'int_value': { +# 'value': 10 +# } +# }, +# '/pid': { +# 'int_value': { +# 'value': 123456789 +# } +# }, +# '/tid': { +# 'int_value': { +# 'value': 987654321 +# } +# }, +# '/stacktrace': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'at unknown' +# } +# }, +# '/grpc/host_port': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'localhost:50051' +# } +# }, +# '/grpc/method': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'post' +# } +# } +# } +# } + +# exporter.map_attributes(attributes) +# self.assertEqual(attributes, expected_attributes) + +# def test_translate_common_attributes_status_code(self): +# project_id = 'PROJECT' +# client = mock.Mock() +# client.project = project_id +# exporter = trace_exporter.StackdriverExporter( +# client=client, project_id=project_id) + +# attributes = { +# 'outer key': 'some value', +# 'attributeMap': { +# 'http.status_code': { +# 'int_value': 200 +# } +# } +# } + +# expected_attributes = { +# 'outer key': 'some value', +# 'attributeMap': { +# '/http/status_code': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': '200' +# } +# } +# } +# } + +# exporter.map_attributes(attributes) +# self.assertEqual(attributes, expected_attributes) + + +# class Test_set_attributes_gae(unittest.TestCase): +# @mock.patch('opencensus.ext.stackdriver.trace_exporter.' +# 'monitored_resource.get_instance', +# return_value=None) +# def test_set_attributes_gae(self, mr_mock): +# import os + +# trace = {'spans': [{'attributes': {}}]} + +# expected = { +# 'attributes': { +# 'attributeMap': { +# 'g.co/gae/app/module': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'service' +# } +# }, +# 'g.co/gae/app/instance': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'flex' +# } +# }, +# 'g.co/gae/app/version': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'version' +# } +# }, +# 'g.co/gae/app/project': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'project' +# } +# }, +# 'g.co/agent': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': +# 'opencensus-python [{}]'.format(__version__) +# } +# }, +# } +# } +# } + +# with mock.patch.dict( +# os.environ, { +# trace_exporter._APPENGINE_FLEXIBLE_ENV_VM: 'vm', +# trace_exporter._APPENGINE_FLEXIBLE_ENV_FLEX: 'flex', +# 'GOOGLE_CLOUD_PROJECT': 'project', +# 'GAE_SERVICE': 'service', +# 'GAE_VERSION': 'version' +# }): +# self.assertTrue(trace_exporter.is_gae_environment()) +# trace_exporter.set_attributes(trace) + +# span = trace.get('spans')[0] +# self.assertEqual(span, expected) + + +# class TestMonitoredResourceAttributes(unittest.TestCase): +# @mock.patch('opencensus.ext.stackdriver.trace_exporter.' +# 'monitored_resource.get_instance') +# def test_monitored_resource_attributes_gke(self, gmr_mock): +# import os + +# trace = {'spans': [{'attributes': {}}]} + +# expected = { +# 'attributes': { +# 'attributeMap': { +# 'g.co/gae/app/module': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'service' +# } +# }, +# 'g.co/gae/app/instance': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'flex' +# } +# }, +# 'g.co/gae/app/version': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'version' +# } +# }, +# 'g.co/gae/app/project': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'project' +# } +# }, +# 'g.co/agent': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': +# 'opencensus-python [{}]'.format(__version__) +# } +# }, +# 'g.co/r/k8s_container/project_id': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'my_project' +# } +# }, +# 'g.co/r/k8s_container/location': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'zone1' +# } +# }, +# 'g.co/r/k8s_container/namespace_name': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'namespace' +# } +# }, +# 'g.co/r/k8s_container/pod_name': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'pod' +# } +# }, +# 'g.co/r/k8s_container/cluster_name': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'cluster' +# } +# }, +# 'g.co/r/k8s_container/container_name': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'c1' +# } +# }, +# } +# } +# } + +# mock_resource = mock.Mock() +# mock_resource.get_type.return_value = 'k8s_container' +# mock_resource.get_labels.return_value = { +# 'k8s.io/pod/name': 'pod', +# 'k8s.io/cluster/name': 'cluster', +# 'k8s.io/namespace/name': 'namespace', +# 'k8s.io/container/name': 'c1', +# 'project_id': 'my_project', +# 'zone': 'zone1' +# } +# gmr_mock.return_value = mock_resource +# with mock.patch.dict( +# os.environ, { +# trace_exporter._APPENGINE_FLEXIBLE_ENV_VM: 'vm', +# trace_exporter._APPENGINE_FLEXIBLE_ENV_FLEX: 'flex', +# 'GOOGLE_CLOUD_PROJECT': 'project', +# 'GAE_SERVICE': 'service', +# 'GAE_VERSION': 'version' +# }): +# self.assertTrue(trace_exporter.is_gae_environment()) +# trace_exporter.set_attributes(trace) + +# span = trace.get('spans')[0] +# self.assertEqual(span, expected) + +# @mock.patch('opencensus.ext.stackdriver.trace_exporter.' +# 'monitored_resource.get_instance') +# def test_monitored_resource_attributes_gce(self, gmr_mock): +# trace = {'spans': [{'attributes': {}}]} + +# expected = { +# 'attributes': { +# 'attributeMap': { +# 'g.co/agent': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': +# 'opencensus-python [{}]'.format(__version__) +# } +# }, +# 'g.co/r/gce_instance/project_id': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'my_project' +# } +# }, +# 'g.co/r/gce_instance/instance_id': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': '12345' +# } +# }, +# 'g.co/r/gce_instance/zone': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'zone1' +# } +# }, +# } +# } +# } + +# mock_resource = mock.Mock() +# mock_resource.get_type.return_value = 'gce_instance' +# mock_resource.get_labels.return_value = { +# 'project_id': 'my_project', +# 'instance_id': '12345', +# 'zone': 'zone1' +# } +# gmr_mock.return_value = mock_resource +# trace_exporter.set_attributes(trace) +# span = trace.get('spans')[0] +# self.assertEqual(span, expected) + +# @mock.patch('opencensus.ext.stackdriver.trace_exporter.' +# 'monitored_resource.get_instance') +# def test_monitored_resource_attributes_aws(self, amr_mock): +# trace = {'spans': [{'attributes': {}}]} + +# expected = { +# 'attributes': { +# 'attributeMap': { +# 'g.co/agent': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': +# 'opencensus-python [{}]'.format(__version__) +# } +# }, +# 'g.co/r/aws_ec2_instance/aws_account': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': '123456789012' +# } +# }, +# 'g.co/r/aws_ec2_instance/region': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': 'aws:us-west-2' +# } +# }, +# } +# } +# } + +# mock_resource = mock.Mock() +# mock_resource.get_type.return_value = 'aws_ec2_instance' +# mock_resource.get_labels.return_value = { +# 'aws_account': '123456789012', +# 'region': 'us-west-2' +# } +# amr_mock.return_value = mock_resource + +# trace_exporter.set_attributes(trace) +# span = trace.get('spans')[0] +# self.assertEqual(span, expected) + +# @mock.patch('opencensus.ext.stackdriver.trace_exporter.' +# 'monitored_resource.get_instance') +# def test_monitored_resource_attributes_None(self, mr_mock): +# trace = {'spans': [{'attributes': {}}]} + +# expected = { +# 'attributes': { +# 'attributeMap': { +# 'g.co/agent': { +# 'string_value': { +# 'truncated_byte_count': 0, +# 'value': +# 'opencensus-python [{}]'.format(__version__) +# } +# } +# } +# } +# } + +# mr_mock.return_value = None +# trace_exporter.set_attributes(trace) +# span = trace.get('spans')[0] +# self.assertEqual(span, expected) + +# mock_resource = mock.Mock() +# mock_resource.get_type.return_value = mock.Mock() +# mock_resource.get_labels.return_value = mock.Mock() +# mr_mock.return_value = mock_resource + +# trace_exporter.set_attributes(trace) +# span = trace.get('spans')[0] +# self.assertEqual(span, expected) + + +# class MockTransport(object): +# def __init__(self, exporter=None): +# self.export_called = False +# self.exporter = exporter + +# def export(self, trace): +# self.export_called = True diff --git a/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_stats.py b/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_stats.py index fd3b517c6..db9eb66e1 100644 --- a/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_stats.py +++ b/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_stats.py @@ -1,1390 +1,1391 @@ -# Copyright 2018, OpenCensus Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest -from datetime import datetime - -import google.auth -import mock -from google.cloud import monitoring_v3 - -from opencensus.common import utils -from opencensus.common.version import __version__ -from opencensus.ext.stackdriver import stats_exporter as stackdriver -from opencensus.metrics import label_key, label_value -from opencensus.metrics import transport as transport_module -from opencensus.metrics.export import ( - metric, - metric_descriptor, - point, - time_series, - value, -) -from opencensus.stats import aggregation as aggregation_module -from opencensus.stats import aggregation_data as aggregation_data_module -from opencensus.stats import execution_context -from opencensus.stats import measure as measure_module -from opencensus.stats import metric_utils -from opencensus.stats import stats as stats_module -from opencensus.stats import view as view_module -from opencensus.stats import view_data as view_data_module -from opencensus.tags import tag_key as tag_key_module -from opencensus.tags import tag_map as tag_map_module -from opencensus.tags import tag_value as tag_value_module - -MiB = 1 << 20 -FRONTEND_KEY = tag_key_module.TagKey("my.org/keys/frontend") -FRONTEND_KEY_FLOAT = tag_key_module.TagKey("my.org/keys/frontend-FLOAT") -FRONTEND_KEY_INT = tag_key_module.TagKey("my.org/keys/frontend-INT") -FRONTEND_KEY_STR = tag_key_module.TagKey("my.org/keys/frontend-STR") - -FRONTEND_KEY_CLEAN = "my_org_keys_frontend" -FRONTEND_KEY_FLOAT_CLEAN = "my_org_keys_frontend_FLOAT" -FRONTEND_KEY_INT_CLEAN = "my_org_keys_frontend_INT" -FRONTEND_KEY_STR_CLEAN = "my_org_keys_frontend_STR" - -VIDEO_SIZE_MEASURE = measure_module.MeasureFloat( - "my.org/measure/video_size_test2", "size of processed videos", "By") -VIDEO_SIZE_MEASURE_2 = measure_module.MeasureFloat( - "my.org/measure/video_size_test_2", "size of processed videos", "By") - -VIDEO_SIZE_MEASURE_FLOAT = measure_module.MeasureFloat( - "my.org/measure/video_size_test-float", "size of processed videos-float", - "By") - -VIDEO_SIZE_VIEW_NAME = "my.org/views/video_size_test2" -VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation( - [16.0 * MiB, 256.0 * MiB]) -VIDEO_SIZE_VIEW = view_module.View( - VIDEO_SIZE_VIEW_NAME, "processed video size over time", [FRONTEND_KEY], - VIDEO_SIZE_MEASURE, VIDEO_SIZE_DISTRIBUTION) - -TEST_TIME = datetime(2018, 12, 25, 1, 2, 3, 4) -TEST_TIME_STR = utils.to_iso_str(TEST_TIME) - - -class _Client(object): - def __init__(self, client_info=None): - self.client_info = client_info - - -class TestOptions(unittest.TestCase): - def test_options_blank(self): - options = stackdriver.Options() - - self.assertEqual(options.project_id, "") - self.assertEqual(options.resource, "") - - def test_options_parameters(self): - options = stackdriver.Options( - project_id="project-id", metric_prefix="sample") - self.assertEqual(options.project_id, "project-id") - self.assertEqual(options.metric_prefix, "sample") - - def test_default_monitoring_labels(self): - options = stackdriver.Options(default_monitoring_labels={ - label_key.LabelKey('lk_key', 'lk_desc'): - label_value.LabelValue('lk_value') - }) - - self.assertEqual(len(options.default_monitoring_labels), 1) - [[lk, lv]] = options.default_monitoring_labels.items() - self.assertEqual(lk.key, 'lk_key') - self.assertEqual(lk.description, 'lk_desc') - self.assertEqual(lv.value, 'lk_value') - - def test_default_monitoring_labels_blank(self): - with mock.patch('opencensus.ext.stackdriver.stats_exporter' - '.get_task_value') as mock_gtv: - options = stackdriver.Options() - - mock_gtv.assert_called() - - self.assertEqual(len(options.default_monitoring_labels), 1) - [[lk, lv]] = options.default_monitoring_labels.items() - self.assertEqual(lk.key, stackdriver.OPENCENSUS_TASK) - self.assertEqual(lk.description, - stackdriver.OPENCENSUS_TASK_DESCRIPTION) - self.assertEqual(lv.value, mock_gtv()) - - def test_bad_default_monitoring_labels(self): - with self.assertRaises(AttributeError): - stackdriver.Options( - default_monitoring_labels=[ - 'not a dict' - ]) - - with self.assertRaises(TypeError): - stackdriver.Options( - default_monitoring_labels={ - 'bad key': - label_value.LabelValue('clk_value') - }) - - with self.assertRaises(TypeError): - stackdriver.Options( - default_monitoring_labels={ - label_key.LabelKey('clk_key', 'clk_desc'): - 'bad value' - }) - - -class TestStackdriverStatsExporter(unittest.TestCase): - def test_constructor(self): - exporter = stackdriver.StackdriverStatsExporter() - - self.assertIsNone(exporter.client) - - def test_constructor_param(self): - exporter = stackdriver.StackdriverStatsExporter( - options=stackdriver.Options(project_id=1)) - self.assertEqual(exporter.options.project_id, 1) - - def test_null_options(self): - # Check that we don't suppress auth errors - auth_error = google.auth.exceptions.DefaultCredentialsError - mock_auth_error = mock.Mock() - mock_auth_error.side_effect = auth_error - with mock.patch('opencensus.ext.stackdriver.stats_exporter' - '.google.auth.default', mock_auth_error): - with self.assertRaises(auth_error): - stackdriver.new_stats_exporter() - - # Check that we get the default credentials' project ID - mock_auth_ok = mock.Mock() - mock_auth_ok.return_value = (None, 123) - with mock.patch('opencensus.ext.stackdriver.stats_exporter' - '.google.auth.default', mock_auth_ok): - sdse = stackdriver.new_stats_exporter() - self.assertEqual(sdse.options.project_id, 123) - - # Check that we raise if auth works but the project is empty - mock_auth_no_project = mock.Mock() - mock_auth_no_project.return_value = (None, '') - with mock.patch('opencensus.ext.stackdriver.stats_exporter' - '.google.auth.default', mock_auth_no_project): - with self.assertRaises(ValueError): - stackdriver.new_stats_exporter() - - def test_blank_project(self): - self.assertRaises(ValueError, stackdriver.new_stats_exporter, - stackdriver.Options(project_id="")) - - def test_not_blank_project(self): - patch_client = mock.patch( - ('opencensus.ext.stackdriver.stats_exporter' - '.monitoring_v3.MetricServiceClient'), _Client) - - with patch_client: - exporter_created = stackdriver.new_stats_exporter( - stackdriver.Options(project_id=1)) - - self.assertIsInstance(exporter_created, - stackdriver.StackdriverStatsExporter) - - def test_get_user_agent_slug(self): - self.assertIn(__version__, stackdriver.get_user_agent_slug()) - - def test_client_info_user_agent(self): - """Check that the monitoring client sets a user agent. - - The user agent should include the library version. Note that this - assumes MetricServiceClient calls ClientInfo.to_user_agent to attach - the user agent as metadata to metric service API calls. - """ - patch_client = mock.patch( - 'opencensus.ext.stackdriver.stats_exporter.monitoring_v3' - '.MetricServiceClient', _Client) - - with patch_client: - exporter = stackdriver.new_stats_exporter( - stackdriver.Options(project_id=1)) - - self.assertIn(stackdriver.get_user_agent_slug(), - exporter.client.client_info.to_user_agent()) - - def test_sanitize(self): - # empty - result = stackdriver.sanitize_label("") - self.assertEqual(result, "") - - # all invalid - result = stackdriver.sanitize_label("/*^#$") - self.assertEqual(result, "key_") - - # all valid - result = stackdriver.sanitize_label("abc") - self.assertEqual(result, "abc") - - # mixed - result = stackdriver.sanitize_label("a.b/c") - self.assertEqual(result, "a_b_c") - - # starts with '_' - result = stackdriver.sanitize_label("_abc") - self.assertEqual(result, "key_abc") - - # starts with digit - result = stackdriver.sanitize_label("0abc") - self.assertEqual(result, "key_0abc") - - # too long - result = stackdriver.sanitize_label("0123456789" * 10) - self.assertEqual(len(result), 100) - self.assertEqual(result, "key_" + "0123456789" * 9 + "012345") - - def test_get_task_value(self): - task_value = stackdriver.get_task_value() - self.assertNotEqual(task_value, "") - - def test_namespaced_views(self): - view_name = "view-1" - expected_view_name_namespaced = ( - "custom.googleapis.com/opencensus/{}".format(view_name)) - view_name_namespaced = stackdriver.namespaced_view_name(view_name, "") - self.assertEqual(expected_view_name_namespaced, view_name_namespaced) - - expected_view_name_namespaced = "kubernetes.io/myorg/%s" % view_name - view_name_namespaced = stackdriver.namespaced_view_name( - view_name, "kubernetes.io/myorg") - self.assertEqual(expected_view_name_namespaced, view_name_namespaced) - - def test_stackdriver_register_exporter(self): - stats = stats_module.stats - view_manager = stats.view_manager - - exporter = mock.Mock() - if len(view_manager.measure_to_view_map.exporters) > 0: - view_manager.unregister_exporter( - view_manager.measure_to_view_map.exporters[0]) - view_manager.register_exporter(exporter) - - registered_exporters = len(view_manager.measure_to_view_map.exporters) - - self.assertEqual(registered_exporters, 1) - - @mock.patch('os.getpid', return_value=12345) - @mock.patch( - 'platform.uname', - return_value=('system', 'node', 'release', 'version', 'machine', - 'processor')) - def test_get_task_value_with_hostname(self, mock_uname, mock_pid): - self.assertEqual(stackdriver.get_task_value(), "py-12345@node") - - @mock.patch('os.getpid', return_value=12345) - @mock.patch( - 'platform.uname', - return_value=('system', '', 'release', 'version', 'machine', - 'processor')) - def test_get_task_value_without_hostname(self, mock_uname, mock_pid): - self.assertEqual(stackdriver.get_task_value(), "py-12345@localhost") - - def test_default_default_monitoring_labels(self): - """Check that metrics include OC task label by default.""" - exporter = stackdriver.StackdriverStatsExporter( - options=stackdriver.Options(project_id='project_id'), - client=mock.Mock()) - - lv = label_value.LabelValue('val') - val = value.ValueLong(value=123) - dt = datetime(2019, 3, 20, 21, 34, 0, 537954) - pp = point.Point(value=val, timestamp=dt) - ts = [ - time_series.TimeSeries(label_values=[lv], points=[pp], - start_timestamp=utils.to_iso_str(dt)) - ] - - desc = metric_descriptor.MetricDescriptor( - name='name', - description='description', - unit='unit', - type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, - label_keys=[label_key.LabelKey('key', 'description')] - ) - mm = metric.Metric(descriptor=desc, time_series=ts) - - sd_md = exporter.get_metric_descriptor(desc) - self.assertEqual(len(sd_md.labels), 2) - sd_descriptors = {ld.key: ld.description for ld in sd_md.labels} - self.assertIn('key', sd_descriptors) - self.assertEqual(sd_descriptors['key'], 'description') - self.assertIn(stackdriver.OPENCENSUS_TASK, sd_descriptors) - self.assertEqual( - sd_descriptors[stackdriver.OPENCENSUS_TASK], - stackdriver.OPENCENSUS_TASK_DESCRIPTION - ) - - sd_ts_list = exporter.create_time_series_list(mm) - self.assertEqual(len(sd_ts_list), 1) - [sd_ts] = sd_ts_list - self.assertIn('key', sd_ts.metric.labels) - self.assertEqual(sd_ts.metric.labels['key'], 'val') - self.assertIn(stackdriver.OPENCENSUS_TASK, sd_ts.metric.labels) - - def test_empty_default_monitoring_labels(self): - """Check that it's possible to remove the default OC task label.""" - exporter = stackdriver.StackdriverStatsExporter( - options=stackdriver.Options( - project_id='project_id', - default_monitoring_labels={}), - client=mock.Mock()) - - lv = label_value.LabelValue('val') - val = value.ValueLong(value=123) - dt = datetime(2019, 3, 20, 21, 34, 0, 537954) - pp = point.Point(value=val, timestamp=dt) - ts = [ - time_series.TimeSeries(label_values=[lv], points=[pp], - start_timestamp=utils.to_iso_str(dt)) - ] - - desc = metric_descriptor.MetricDescriptor( - name='name', - description='description', - unit='unit', - type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, - label_keys=[label_key.LabelKey('key', 'description')] - ) - mm = metric.Metric(descriptor=desc, time_series=ts) - - sd_md = exporter.get_metric_descriptor(desc) - self.assertEqual(len(sd_md.labels), 1) - [sd_label] = sd_md.labels - self.assertEqual(sd_label.key, 'key') - self.assertEqual(sd_label.description, 'description') - - sd_ts_list = exporter.create_time_series_list(mm) - self.assertEqual(len(sd_ts_list), 1) - [sd_ts] = sd_ts_list - self.assertIn('key', sd_ts.metric.labels) - self.assertEqual(sd_ts.metric.labels['key'], 'val') - self.assertNotIn(stackdriver.OPENCENSUS_TASK, sd_ts.metric.labels) - - def test_custom_default_monitoring_labels(self): - """Check that custom labels are exported and included in descriptor.""" - exporter = stackdriver.StackdriverStatsExporter( - options=stackdriver.Options( - project_id='project_id', - default_monitoring_labels={ - label_key.LabelKey('clk_key', 'clk_desc'): - label_value.LabelValue('clk_value') - }), - client=mock.Mock()) - - lv = label_value.LabelValue('val') - val = value.ValueLong(value=123) - dt = datetime(2019, 3, 20, 21, 34, 0, 537954) - pp = point.Point(value=val, timestamp=dt) - ts = [ - time_series.TimeSeries(label_values=[lv], points=[pp], - start_timestamp=utils.to_iso_str(dt)) - ] - - desc = metric_descriptor.MetricDescriptor( - name='name', - description='description', - unit='unit', - type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, - label_keys=[label_key.LabelKey('key', 'description')] - ) - mm = metric.Metric(descriptor=desc, time_series=ts) - - sd_md = exporter.get_metric_descriptor(desc) - self.assertEqual(len(sd_md.labels), 2) - sd_descriptors = {ld.key: ld.description for ld in sd_md.labels} - self.assertIn('key', sd_descriptors) - self.assertEqual(sd_descriptors['key'], 'description') - self.assertIn('clk_key', sd_descriptors) - self.assertEqual(sd_descriptors['clk_key'], 'clk_desc') - - sd_ts_list = exporter.create_time_series_list(mm) - self.assertEqual(len(sd_ts_list), 1) - [sd_ts] = sd_ts_list - self.assertIn('key', sd_ts.metric.labels) - self.assertEqual(sd_ts.metric.labels['key'], 'val') - self.assertIn('clk_key', sd_ts.metric.labels) - self.assertEqual(sd_ts.metric.labels['clk_key'], 'clk_value') - - def test_get_metric_descriptor(self): - exporter = stackdriver.StackdriverStatsExporter( - options=stackdriver.Options( - project_id='project_id'), - client=mock.Mock()) - - oc_md = metric_descriptor.MetricDescriptor( - name='name', - description='description', - unit='unit', - type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, - label_keys=[label_key.LabelKey('ck', 'cd')] - ) - - sd_md = exporter.get_metric_descriptor(oc_md) - self.assertEqual( - sd_md.metric_kind, - monitoring_v3.enums.MetricDescriptor.MetricKind.GAUGE) - self.assertEqual( - sd_md.value_type, - monitoring_v3.enums.MetricDescriptor.ValueType.INT64) - - self.assertIsInstance(sd_md, monitoring_v3.types.MetricDescriptor) - exporter.client.create_metric_descriptor.assert_not_called() - - def test_get_metric_descriptor_bad_type(self): - exporter = stackdriver.StackdriverStatsExporter( - options=stackdriver.Options(project_id='project_id'), - client=mock.Mock()) - - bad_type_oc_md = metric_descriptor.MetricDescriptor( - name='name', - description='description', - unit='unit', - # Need a valid type to create the descriptor - type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, - label_keys=[label_key.LabelKey('key', 'description')] - ) - bad_type_oc_md._type = 100 - - with self.assertRaises(TypeError): - exporter.get_metric_descriptor(bad_type_oc_md) - - def test_get_metric_descriptor_custom_prefix(self): - - exporter = stackdriver.StackdriverStatsExporter( - options=stackdriver.Options( - metric_prefix='metric_prefix', - project_id='project_id'), - client=mock.Mock()) - - oc_md = metric_descriptor.MetricDescriptor( - name='name', - description='description', - unit='unit', - type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, - label_keys=[label_key.LabelKey('ck', 'cd')] - ) - - sd_md = exporter.get_metric_descriptor(oc_md) - self.assertIn('metric_prefix', sd_md.type) - self.assertIn('metric_prefix', sd_md.name) - - def test_register_metric_descriptor(self): - exporter = stackdriver.StackdriverStatsExporter( - options=stackdriver.Options( - metric_prefix='metric_prefix', - project_id='project_id'), - client=mock.Mock()) - - oc_md = metric_descriptor.MetricDescriptor( - name='name', - description='description', - unit='unit', - type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, - label_keys=[label_key.LabelKey('key', 'description')] - ) - - exporter.register_metric_descriptor(oc_md) - self.assertEqual( - exporter.client.create_metric_descriptor.call_count, - 1 - ) - exporter.register_metric_descriptor(oc_md) - self.assertEqual( - exporter.client.create_metric_descriptor.call_count, - 1 - ) - - def test_export_metrics(self): - lv = label_value.LabelValue('val') - val = value.ValueLong(value=123) - dt = datetime(2019, 3, 20, 21, 34, 0, 537954) - pp = point.Point(value=val, timestamp=dt) - - ts = [ - time_series.TimeSeries(label_values=[lv], points=[pp], - start_timestamp=utils.to_iso_str(dt)) - ] - - desc = metric_descriptor.MetricDescriptor( - name='name', - description='description', - unit='unit', - type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, - label_keys=[label_key.LabelKey('key', 'description')] - ) - - mm = metric.Metric(descriptor=desc, time_series=ts) - - exporter = stackdriver.StackdriverStatsExporter(client=mock.Mock()) - exporter.export_metrics([mm]) - - self.assertEqual(exporter.client.create_time_series.call_count, 1) - sd_args = exporter.client.create_time_series.call_args[0][1] - self.assertEqual(len(sd_args), 1) - [sd_arg] = exporter.client.create_time_series.call_args[0][1] - self.assertEqual(sd_arg.points[0].value.int64_value, 123) - - -class MockPeriodicMetricTask(object): - """Testing mock of metrics.transport.PeriodicMetricTask. - - Simulate calling export asynchronously from another thread synchronously - from this one. - """ - def __init__(self, interval=None, function=None, args=None, kwargs=None): - self.function = function - self.logger = mock.Mock() - self.start = mock.Mock() - self.run = mock.Mock() - - def step(self): - try: - self.function() - except transport_module.TransportError as ex: - self.logger.exception(ex) - self.cancel() - except Exception: - self.logger.exception("Error handling metric export") - - -class MockGetExporterThread(object): - """Intercept calls to get_exporter_thread. - - To get a reference to the running PeriodicMetricTask created by - get_exporter_thread. - """ - def __init__(self): - self.transport = None - - def __enter__(self): - original_func = transport_module.get_exporter_thread - - def get_exporter_thread(*aa, **kw): - self.transport = original_func(*aa, **kw) - - mock_get = mock.Mock() - mock_get.side_effect = get_exporter_thread - self.patcher = mock.patch( - ('opencensus.ext.stackdriver.stats_exporter' - '.transport.get_exporter_thread'), - mock_get) - self.patcher.start() - return self - - def __exit__(self, type, value, traceback): - self.patcher.stop() - - -@mock.patch('opencensus.ext.stackdriver.stats_exporter' - '.monitoring_v3.MetricServiceClient') -@mock.patch('opencensus.ext.stackdriver.stats_exporter' - '.stats.stats') -class TestAsyncStatsExport(unittest.TestCase): - """Check that metrics are exported using the exporter thread.""" - - def setUp(self): - patcher = mock.patch( - 'opencensus.metrics.transport.PeriodicMetricTask', - MockPeriodicMetricTask) - patcher.start() - self.addCleanup(patcher.stop) - - def test_export_empty(self, mock_stats, mock_client): - """Check that we don't attempt to export empty metric sets.""" - - mock_stats.get_metrics.return_value = [] - - with MockGetExporterThread() as mget: - exporter = stackdriver.new_stats_exporter( - stackdriver.Options(project_id=1)) - mget.transport.step() - - exporter.client.create_metric_descriptor.assert_not_called() - exporter.client.create_time_series.assert_not_called() - - def test_export_single_metric(self, mock_stats, mock_client): - """Check that we can export a set of a single metric.""" - - lv = label_value.LabelValue('val') - val = value.ValueLong(value=123) - dt = datetime(2019, 3, 20, 21, 34, 0, 537954) - pp = point.Point(value=val, timestamp=dt) - - ts = [ - time_series.TimeSeries(label_values=[lv], points=[pp], - start_timestamp=utils.to_iso_str(dt)) - ] - - desc = metric_descriptor.MetricDescriptor( - name='name2', - description='description2', - unit='unit2', - type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, - label_keys=[label_key.LabelKey('key', 'description')] - ) - - mm = metric.Metric(descriptor=desc, time_series=ts) - mock_stats.get_metrics.return_value = [mm] - - with MockGetExporterThread() as mget: - exporter = stackdriver.new_stats_exporter( - stackdriver.Options(project_id=1)) - mget.transport.step() - - exporter.client.create_metric_descriptor.assert_called() - self.assertEqual( - exporter.client.create_metric_descriptor.call_count, - 1) - md_call_arg =\ - exporter.client.create_metric_descriptor.call_args[0][1] - self.assertEqual( - md_call_arg.metric_kind, - monitoring_v3.enums.MetricDescriptor.MetricKind.GAUGE - ) - self.assertEqual( - md_call_arg.value_type, - monitoring_v3.enums.MetricDescriptor.ValueType.INT64 - ) - - exporter.client.create_time_series.assert_called() - self.assertEqual( - exporter.client.create_time_series.call_count, - 1) - ts_call_arg = exporter.client.create_time_series.call_args[0][1] - self.assertEqual(len(ts_call_arg), 1) - self.assertEqual(len(ts_call_arg[0].points), 1) - self.assertEqual(ts_call_arg[0].points[0].value.int64_value, 123) - - -class TestCreateTimeseries(unittest.TestCase): - - def setUp(self): - patcher = mock.patch( - 'opencensus.ext.stackdriver.stats_exporter.stats.stats', - stats_module._Stats()) - patcher.start() - self.addCleanup(patcher.stop) - - def check_labels(self, - actual_labels, - expected_labels, - include_opencensus=False): - actual_labels = dict(actual_labels) - if include_opencensus: - opencensus_tag = actual_labels.pop(stackdriver.OPENCENSUS_TASK) - self.assertIsNotNone(opencensus_tag) - self.assertIn("py-", opencensus_tag) - self.assertDictEqual(actual_labels, expected_labels) - - @mock.patch('opencensus.ext.stackdriver.stats_exporter.' - 'monitored_resource.get_instance', - return_value=None) - def test_create_batched_time_series(self, monitor_resource_mock): - client = mock.Mock() - v_data = view_data_module.ViewData( - view=VIDEO_SIZE_VIEW, - start_time=TEST_TIME_STR, - end_time=TEST_TIME_STR) - v_data.record(context=tag_map_module.TagMap(), value=2, timestamp=None) - view_data = [v_data] - - option = stackdriver.Options(project_id="project-test") - exporter = stackdriver.StackdriverStatsExporter( - options=option, client=client) - - view_data = [metric_utils.view_data_to_metric(view_data[0], TEST_TIME)] - - time_series_batches = exporter.create_batched_time_series(view_data, 1) - - self.assertEqual(len(time_series_batches), 1) - [time_series_batch] = time_series_batches - self.assertEqual(len(time_series_batch), 1) - [time_series] = time_series_batch - self.assertEqual( - time_series.metric.type, - 'custom.googleapis.com/opencensus/' + VIDEO_SIZE_VIEW_NAME) - self.check_labels( - time_series.metric.labels, {}, include_opencensus=True) - - @mock.patch('opencensus.ext.stackdriver.stats_exporter.' - 'monitored_resource.get_instance', - return_value=None) - def test_create_batched_time_series_with_many(self, monitor_resource_mock): - client = mock.Mock() - - # First view with 3 - view_name1 = "view-name1" - view1 = view_module.View(view_name1, "test description", ['test'], - VIDEO_SIZE_MEASURE, - aggregation_module.LastValueAggregation()) - v_data1 = view_data_module.ViewData( - view=view1, start_time=TEST_TIME_STR, end_time=TEST_TIME_STR) - v_data1.record(context=tag_map_module.TagMap({'test': '1'}), value=7, - timestamp=None) - v_data1.record(context=tag_map_module.TagMap({'test': '2'}), value=5, - timestamp=None) - v_data1.record(context=tag_map_module.TagMap({'test': '3'}), value=3, - timestamp=None) - - # Second view with 2 - view_name2 = "view-name2" - view2 = view_module.View(view_name2, "test description", ['test'], - VIDEO_SIZE_MEASURE, - aggregation_module.LastValueAggregation()) - v_data2 = view_data_module.ViewData( - view=view2, start_time=TEST_TIME_STR, end_time=TEST_TIME_STR) - v_data2.record(context=tag_map_module.TagMap({'test': '1'}), value=7, - timestamp=None) - v_data2.record(context=tag_map_module.TagMap({'test': '2'}), value=5, - timestamp=None) - - view_data = [v_data1, v_data2] - view_data = [metric_utils.view_data_to_metric(vd, TEST_TIME) - for vd in view_data] - - option = stackdriver.Options(project_id="project-test") - exporter = stackdriver.StackdriverStatsExporter( - options=option, client=client) - - time_series_batches = exporter.create_batched_time_series(view_data, 2) - - self.assertEqual(len(time_series_batches), 3) - [tsb1, tsb2, tsb3] = time_series_batches - self.assertEqual(len(tsb1), 2) - self.assertEqual(len(tsb2), 2) - self.assertEqual(len(tsb3), 1) - - def setup_create_timeseries_test(self): - client = mock.Mock() - execution_context.clear() - - option = stackdriver.Options( - project_id="project-test", resource="global") - exporter = stackdriver.StackdriverStatsExporter( - options=option, client=client) - - stats = stats_module.stats - view_manager = stats.view_manager - stats_recorder = stats.stats_recorder - - if len(view_manager.measure_to_view_map.exporters) > 0: - view_manager.unregister_exporter( - view_manager.measure_to_view_map.exporters[0]) - - view_manager.register_exporter(exporter) - return view_manager, stats_recorder, exporter - - @mock.patch('opencensus.ext.stackdriver.stats_exporter.' - 'monitored_resource.get_instance', - return_value=None) - def test_create_timeseries(self, monitor_resource_mock): - view_manager, stats_recorder, exporter = \ - self.setup_create_timeseries_test() - - view_manager.register_view(VIDEO_SIZE_VIEW) - - tag_value = tag_value_module.TagValue("1200") - tag_map = tag_map_module.TagMap() - tag_map.insert(FRONTEND_KEY, tag_value) - - measure_map = stats_recorder.new_measurement_map() - measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) - measure_map.record(tag_map) - - v_data = measure_map.measure_to_view_map.get_view( - VIDEO_SIZE_VIEW_NAME, None) - - v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) - - time_series_list = exporter.create_time_series_list(v_data) - - self.assertEqual(len(time_series_list), 1) - time_series = time_series_list[0] - self.assertEqual(time_series.resource.type, "global") - self.assertEqual( - time_series_list[0].metric.type, - "custom.googleapis.com/opencensus/my.org/views/video_size_test2") - self.check_labels( - time_series.metric.labels, {FRONTEND_KEY_CLEAN: "1200"}, - include_opencensus=True) - self.assertIsNotNone(time_series.resource) - - self.assertEqual(len(time_series.points), 1) - value = time_series.points[0].value - self.assertEqual(value.distribution_value.count, 1) - - time_series_list = exporter.create_time_series_list(v_data) - - self.assertEqual(len(time_series_list), 1) - time_series = time_series_list[0] - self.check_labels( - time_series.metric.labels, {FRONTEND_KEY_CLEAN: "1200"}, - include_opencensus=True) - self.assertIsNotNone(time_series.resource) - - self.assertEqual(len(time_series.points), 1) - value = time_series.points[0].value - self.assertEqual(value.distribution_value.count, 1) - - @mock.patch('opencensus.ext.stackdriver.stats_exporter.' - 'monitored_resource.get_instance') - def test_create_timeseries_with_resource(self, monitor_resource_mock): - - client = mock.Mock() - execution_context.clear() - - option = stackdriver.Options(project_id="project-test", resource="") - exporter = stackdriver.StackdriverStatsExporter( - options=option, client=client) - - stats = stats_module.stats - view_manager = stats.view_manager - stats_recorder = stats.stats_recorder - - if len(view_manager.measure_to_view_map.exporters) > 0: - view_manager.unregister_exporter( - view_manager.measure_to_view_map.exporters[0]) - - view_manager.register_exporter(exporter) - view_manager.register_view(VIDEO_SIZE_VIEW) - - tag_value = tag_value_module.TagValue("1200") - tag_map = tag_map_module.TagMap() - tag_map.insert(FRONTEND_KEY, tag_value) - - measure_map = stats_recorder.new_measurement_map() - measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) - measure_map.record(tag_map) - - v_data = measure_map.measure_to_view_map.get_view( - VIDEO_SIZE_VIEW_NAME, None) - - v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) - - # check for gce_instance monitored resource - mocked_labels = { - 'instance_id': 'my-instance', - 'project_id': 'my-project', - 'zone': 'us-east1', - 'k8s.io/pod/name': 'localhost', - 'k8s.io/namespace/name': 'namespace', - } - - mock_resource = mock.Mock() - mock_resource.get_type.return_value = 'gce_instance' - mock_resource.get_labels.return_value = mocked_labels - monitor_resource_mock.return_value = mock_resource - - time_series_list = exporter.create_time_series_list(v_data) - self.assertEqual(len(time_series_list), 1) - time_series = time_series_list[0] - self.assertEqual(time_series.resource.type, "gce_instance") - self.check_labels( - time_series.resource.labels, { - 'instance_id': 'my-instance', - 'project_id': 'my-project', - 'zone': 'us-east1', - }) - self.assertEqual( - time_series.metric.type, - "custom.googleapis.com/opencensus/my.org/views/video_size_test2") - self.assertIsNotNone(time_series) - - time_series_list = exporter.create_time_series_list(v_data) - self.assertEqual(len(time_series_list), 1) - time_series = time_series_list[0] - - self.assertEqual( - time_series.metric.type, - "custom.googleapis.com/opencensus/my.org/views/video_size_test2") - - # check for k8s_container monitored resource - mocked_labels = { - 'instance_id': 'my-instance', - 'project_id': 'my-project', - 'zone': 'us-east1', - 'k8s.io/pod/name': 'localhost', - 'k8s.io/cluster/name': 'cluster', - 'k8s.io/namespace/name': 'namespace', - } - - mock_resource = mock.Mock() - mock_resource.get_type.return_value = 'k8s_container' - mock_resource.get_labels.return_value = mocked_labels - monitor_resource_mock.return_value = mock_resource - - time_series_list = exporter.create_time_series_list(v_data) - self.assertEqual(len(time_series_list), 1) - time_series = time_series_list[0] - self.assertEqual(time_series.resource.type, "k8s_container") - self.check_labels( - time_series.resource.labels, { - 'project_id': 'my-project', - 'location': 'us-east1', - 'cluster_name': 'cluster', - 'pod_name': 'localhost', - 'namespace_name': 'namespace', - }) - self.assertEqual( - time_series.metric.type, - "custom.googleapis.com/opencensus/my.org/views/video_size_test2") - self.assertIsNotNone(time_series) - - # check for aws_ec2_instance monitored resource - mocked_labels = { - 'instance_id': 'my-instance', - 'aws_account': 'my-project', - 'region': 'us-east1', - } - - mock_resource = mock.Mock() - mock_resource.get_type.return_value = 'aws_ec2_instance' - mock_resource.get_labels.return_value = mocked_labels - monitor_resource_mock.return_value = mock_resource - - time_series_list = exporter.create_time_series_list(v_data) - self.assertEqual(len(time_series_list), 1) - time_series = time_series_list[0] - self.assertEqual(time_series.resource.type, "aws_ec2_instance") - self.check_labels( - time_series.resource.labels, { - 'instance_id': 'my-instance', - 'aws_account': 'my-project', - 'region': 'aws:us-east1', - }) - self.assertEqual( - time_series.metric.type, - "custom.googleapis.com/opencensus/my.org/views/video_size_test2") - self.assertIsNotNone(time_series) - - # check for out of box monitored resource - mock_resource = mock.Mock() - mock_resource.get_type.return_value = '' - mock_resource.get_labels.return_value = mock.Mock() - monitor_resource_mock.return_value = mock_resource - - time_series_list = exporter.create_time_series_list(v_data) - self.assertEqual(len(time_series_list), 1) - time_series = time_series_list[0] - self.assertEqual(time_series.resource.type, 'global') - self.check_labels(time_series.resource.labels, {}) - self.assertEqual( - time_series.metric.type, - "custom.googleapis.com/opencensus/my.org/views/video_size_test2") - self.assertIsNotNone(time_series) - - @mock.patch('opencensus.ext.stackdriver.stats_exporter.' - 'monitored_resource.get_instance', - return_value=None) - def test_create_timeseries_str_tagvalue(self, monitor_resource_mock): - view_manager, stats_recorder, exporter = \ - self.setup_create_timeseries_test() - - agg_1 = aggregation_module.LastValueAggregation(value=2) - view_name1 = "view-name1" - new_view1 = view_module.View( - view_name1, "processed video size over time", [FRONTEND_KEY_INT], - VIDEO_SIZE_MEASURE_2, agg_1) - - view_manager.register_view(new_view1) - - tag_value_int = tag_value_module.TagValue("Abc") - tag_map = tag_map_module.TagMap() - tag_map.insert(FRONTEND_KEY_INT, tag_value_int) - - measure_map = stats_recorder.new_measurement_map() - measure_map.measure_int_put(VIDEO_SIZE_MEASURE_2, 25 * MiB) - measure_map.record(tag_map) - - v_data = measure_map.measure_to_view_map.get_view(view_name1, None) - - v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) - - time_series_list = exporter.create_time_series_list(v_data) - self.assertEqual(len(time_series_list), 1) - time_series = time_series_list[0] - - self.check_labels( - time_series.metric.labels, {FRONTEND_KEY_INT_CLEAN: "Abc"}, - include_opencensus=True) - self.assertIsNotNone(time_series.resource) - - self.assertEqual(len(time_series.points), 1) - expected_value = monitoring_v3.types.TypedValue() - # TODO: #565 - expected_value.double_value = 25.0 * MiB - self.assertEqual(time_series.points[0].value, expected_value) - - @mock.patch('opencensus.ext.stackdriver.stats_exporter.' - 'monitored_resource.get_instance', - return_value=None) - def test_create_timeseries_str_tagvalue_count_aggregtation( - self, monitor_resource_mock): - view_manager, stats_recorder, exporter = \ - self.setup_create_timeseries_test() - - agg_1 = aggregation_module.CountAggregation(count=2) - view_name1 = "view-name1" - new_view1 = view_module.View( - view_name1, "processed video size over time", [FRONTEND_KEY_INT], - VIDEO_SIZE_MEASURE_2, agg_1) - - view_manager.register_view(new_view1) - - tag_value_int = tag_value_module.TagValue("Abc") - tag_map = tag_map_module.TagMap() - tag_map.insert(FRONTEND_KEY_INT, tag_value_int) - - measure_map = stats_recorder.new_measurement_map() - measure_map.measure_int_put(VIDEO_SIZE_MEASURE_2, 25 * MiB) - measure_map.record(tag_map) - - v_data = measure_map.measure_to_view_map.get_view(view_name1, None) - - v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) - - time_series_list = exporter.create_time_series_list(v_data) - self.assertEqual(len(time_series_list), 1) - time_series = time_series_list[0] - self.check_labels( - time_series.metric.labels, {FRONTEND_KEY_INT_CLEAN: "Abc"}, - include_opencensus=True) - self.assertIsNotNone(time_series.resource) - - self.assertEqual(len(time_series.points), 1) - expected_value = monitoring_v3.types.TypedValue() - expected_value.int64_value = 3 - self.assertEqual(time_series.points[0].value, expected_value) - - @mock.patch('opencensus.ext.stackdriver.stats_exporter.' - 'monitored_resource.get_instance', - return_value=None) - def test_create_timeseries_last_value_float_tagvalue( - self, monitor_resource_mock): - view_manager, stats_recorder, exporter = \ - self.setup_create_timeseries_test() - - agg_2 = aggregation_module.LastValueAggregation(value=2.2 * MiB) - view_name2 = "view-name2" - new_view2 = view_module.View( - view_name2, "processed video size over time", [FRONTEND_KEY_FLOAT], - VIDEO_SIZE_MEASURE_FLOAT, agg_2) - - view_manager.register_view(new_view2) - - tag_value_float = tag_value_module.TagValue("Abc") - tag_map = tag_map_module.TagMap() - tag_map.insert(FRONTEND_KEY_FLOAT, tag_value_float) - - measure_map = stats_recorder.new_measurement_map() - measure_map.measure_float_put(VIDEO_SIZE_MEASURE_FLOAT, 25.7 * MiB) - measure_map.record(tag_map) - - v_data = measure_map.measure_to_view_map.get_view(view_name2, None) - - v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) - - time_series_list = exporter.create_time_series_list(v_data) - self.assertEqual(len(time_series_list), 1) - time_series = time_series_list[0] - self.check_labels( - time_series.metric.labels, {FRONTEND_KEY_FLOAT_CLEAN: "Abc"}, - include_opencensus=True) - self.assertIsNotNone(time_series.resource) - - self.assertEqual(len(time_series.points), 1) - expected_value = monitoring_v3.types.TypedValue() - expected_value.double_value = 25.7 * MiB - self.assertEqual(time_series.points[0].value, expected_value) - - @mock.patch('opencensus.ext.stackdriver.stats_exporter.' - 'monitored_resource.get_instance', - return_value=None) - def test_create_timeseries_float_tagvalue(self, monitor_resource_mock): - client = mock.Mock() - - option = stackdriver.Options( - project_id="project-test", resource="global") - exporter = stackdriver.StackdriverStatsExporter( - options=option, client=client) - - stats = stats_module.stats - view_manager = stats.view_manager - stats_recorder = stats.stats_recorder - - if len(view_manager.measure_to_view_map.exporters) > 0: - view_manager.unregister_exporter( - view_manager.measure_to_view_map.exporters[0]) - - view_manager.register_exporter(exporter) - - agg_3 = aggregation_module.SumAggregation(sum=2.2) - view_name3 = "view-name3" - new_view3 = view_module.View( - view_name3, "processed video size over time", [FRONTEND_KEY_FLOAT], - VIDEO_SIZE_MEASURE_FLOAT, agg_3) - - view_manager.register_view(new_view3) - - tag_value_float = tag_value_module.TagValue("1200") - tag_map = tag_map_module.TagMap() - tag_map.insert(FRONTEND_KEY_FLOAT, tag_value_float) - - measure_map = stats_recorder.new_measurement_map() - measure_map.measure_float_put(VIDEO_SIZE_MEASURE_FLOAT, 25 * MiB) - measure_map.record(tag_map) - - v_data = measure_map.measure_to_view_map.get_view(view_name3, None) - - v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) - - time_series_list = exporter.create_time_series_list(v_data) - self.assertEqual(len(time_series_list), 1) - [time_series] = time_series_list - self.assertEqual(time_series.metric.type, - "custom.googleapis.com/opencensus/view-name3") - self.check_labels( - time_series.metric.labels, {FRONTEND_KEY_FLOAT_CLEAN: "1200"}, - include_opencensus=True) - self.assertIsNotNone(time_series.resource) - - self.assertEqual(len(time_series.points), 1) - expected_value = monitoring_v3.types.TypedValue() - expected_value.double_value = 2.2 + 25 * MiB - self.assertEqual(time_series.points[0].value, expected_value) - - @mock.patch('opencensus.ext.stackdriver.stats_exporter.' - 'monitored_resource.get_instance', - return_value=None) - def test_create_timeseries_multiple_tag_values(self, - monitoring_resoure_mock): - view_manager, stats_recorder, exporter = \ - self.setup_create_timeseries_test() - - view_manager.register_view(VIDEO_SIZE_VIEW) - - measure_map = stats_recorder.new_measurement_map() - - # Add first point with one tag value - tag_map = tag_map_module.TagMap() - tag_map.insert(FRONTEND_KEY, tag_value_module.TagValue("1200")) - measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) - measure_map.record(tag_map) - - # Add second point with different tag value - tag_map = tag_map_module.TagMap() - tag_map.insert(FRONTEND_KEY, tag_value_module.TagValue("1400")) - measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 12 * MiB) - measure_map.record(tag_map) - - v_data = measure_map.measure_to_view_map.get_view( - VIDEO_SIZE_VIEW_NAME, None) - - v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) - - time_series_list = exporter.create_time_series_list(v_data) - - self.assertEqual(len(time_series_list), 2) - ts_by_frontend = { - ts.metric.labels.get(FRONTEND_KEY_CLEAN): ts - for ts in time_series_list - } - self.assertEqual(set(ts_by_frontend.keys()), {"1200", "1400"}) - ts1 = ts_by_frontend["1200"] - ts2 = ts_by_frontend["1400"] - - # Verify first time series - self.assertEqual(ts1.resource.type, "global") - self.assertEqual( - ts1.metric.type, - "custom.googleapis.com/opencensus/my.org/views/video_size_test2") - self.assertIsNotNone(ts1.resource) - - self.assertEqual(len(ts1.points), 1) - value1 = ts1.points[0].value - self.assertEqual(value1.distribution_value.count, 1) - - # Verify second time series - self.assertEqual(ts2.resource.type, "global") - self.assertEqual( - ts2.metric.type, - "custom.googleapis.com/opencensus/my.org/views/video_size_test2") - self.assertIsNotNone(ts2.resource) - - self.assertEqual(len(ts2.points), 1) - value2 = ts2.points[0].value - self.assertEqual(value2.distribution_value.count, 1) - - @mock.patch('opencensus.ext.stackdriver.stats_exporter.' - 'monitored_resource.get_instance', - return_value=None) - def test_create_timeseries_disjoint_tags(self, monitoring_resoure_mock): - view_manager, stats_recorder, exporter = \ - self.setup_create_timeseries_test() - - # Register view with two tags - view_name = "view-name" - view = view_module.View(view_name, "test description", - [FRONTEND_KEY, FRONTEND_KEY_FLOAT], - VIDEO_SIZE_MEASURE, - aggregation_module.SumAggregation()) - - view_manager.register_view(view) - - # Add point with one tag in common and one different tag - measure_map = stats_recorder.new_measurement_map() - tag_map = tag_map_module.TagMap() - tag_map.insert(FRONTEND_KEY, tag_value_module.TagValue("1200")) - tag_map.insert(FRONTEND_KEY_STR, tag_value_module.TagValue("1800")) - measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) - measure_map.record(tag_map) - - v_data = measure_map.measure_to_view_map.get_view(view_name, None) - - v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) - - time_series_list = exporter.create_time_series_list(v_data) - - self.assertEqual(len(time_series_list), 1) - [time_series] = time_series_list - - # Verify first time series - self.assertEqual(time_series.resource.type, "global") - self.assertEqual(time_series.metric.type, - "custom.googleapis.com/opencensus/" + view_name) - self.check_labels( - time_series.metric.labels, {FRONTEND_KEY_CLEAN: "1200"}, - include_opencensus=True) - self.assertIsNotNone(time_series.resource) - - self.assertEqual(len(time_series.points), 1) - expected_value = monitoring_v3.types.TypedValue() - # TODO: #565 - expected_value.double_value = 25.0 * MiB - self.assertEqual(time_series.points[0].value, expected_value) - - def test_create_timeseries_from_distribution(self): - """Check for explicit 0-bound bucket for SD export.""" - agg = aggregation_module.DistributionAggregation() - - view = view_module.View( - name="example.org/test_view", - description="example.org/test_view", - columns=['tag_key'], - measure=mock.Mock(), - aggregation=agg, - ) - - v_data = view_data_module.ViewData( - view=view, - start_time=TEST_TIME_STR, - end_time=TEST_TIME_STR, - ) - - # Aggregation over (10 * range(10)) for buckets [2, 4, 6, 8] - dad = aggregation_data_module.DistributionAggregationData( - mean_data=4.5, - count_data=100, - sum_of_sqd_deviations=825, - counts_per_bucket=[20, 20, 20, 20, 20], - bounds=[2, 4, 6, 8], - exemplars={mock.Mock() for ii in range(5)} - ) - v_data._tag_value_aggregation_data_map = {('tag_value',): dad} - - v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) - - exporter = stackdriver.StackdriverStatsExporter() - time_series_list = exporter.create_time_series_list(v_data) - self.assertEqual(len(time_series_list), 1) - [time_series] = time_series_list - - self.check_labels( - time_series.metric.labels, {'tag_key': 'tag_value'}, - include_opencensus=True) - self.assertEqual(len(time_series.points), 1) - [point] = time_series.points - dv = point.value.distribution_value - self.assertEqual(100, dv.count) - self.assertEqual(825.0, dv.sum_of_squared_deviation) - self.assertEqual([0, 20, 20, 20, 20, 20], dv.bucket_counts) - self.assertEqual([0, 2, 4, 6, 8], - dv.bucket_options.explicit_buckets.bounds) - - def test_create_timeseries_multiple_tags(self): - """Check that exporter creates timeseries for multiple tag values. - - create_time_series_list should return a time series for each set of - values in the tag value aggregation map. - """ - agg = aggregation_module.CountAggregation() - - view = view_module.View( - name="example.org/test_view", - description="example.org/test_view", - columns=[tag_key_module.TagKey('color'), - tag_key_module.TagKey('shape')], - measure=mock.Mock(), - aggregation=agg, - ) - - v_data = view_data_module.ViewData( - view=view, - start_time=TEST_TIME_STR, - end_time=TEST_TIME_STR, - ) - - rs_count = aggregation_data_module.CountAggregationData(10) - bc_count = aggregation_data_module.CountAggregationData(20) - v_data._tag_value_aggregation_data_map = { - ('red', 'square'): rs_count, - ('blue', 'circle'): bc_count, - } - - v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) - - exporter = stackdriver.StackdriverStatsExporter() - time_series_list = exporter.create_time_series_list(v_data) - - self.assertEqual(len(time_series_list), 2) - self.assertEqual(len(time_series_list[0].points), 1) - self.assertEqual(len(time_series_list[1].points), 1) - - ts_by_color = {ts.metric.labels.get('color'): ts - for ts in time_series_list} - rs_ts = ts_by_color['red'] - bc_ts = ts_by_color['blue'] - self.assertEqual(rs_ts.metric.labels.get('shape'), 'square') - self.assertEqual(bc_ts.metric.labels.get('shape'), 'circle') - self.assertEqual(rs_ts.points[0].value.int64_value, 10) - self.assertEqual(bc_ts.points[0].value.int64_value, 20) - - def test_create_timeseries_invalid_aggregation(self): - v_data = mock.Mock(spec=view_data_module.ViewData) - v_data.view.name = "example.org/base_view" - v_data.view.columns = [tag_key_module.TagKey('base_key')] - v_data.start_time = TEST_TIME_STR - v_data.end_time = TEST_TIME_STR - - base_data = None - v_data.tag_value_aggregation_data_map = { - (None,): base_data, - } - - exporter = stackdriver.StackdriverStatsExporter( - options=mock.Mock(), - client=mock.Mock(), - ) - self.assertRaises(TypeError, exporter.create_time_series_list, v_data, - "", "") +# flake8: noqa +# # Copyright 2018, OpenCensus Authors +# # +# # Licensed under the Apache License, Version 2.0 (the "License"); +# # you may not use this file except in compliance with the License. +# # You may obtain a copy of the License at +# # +# # http://www.apache.org/licenses/LICENSE-2.0 +# # +# # Unless required by applicable law or agreed to in writing, software +# # distributed under the License is distributed on an "AS IS" BASIS, +# # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# # See the License for the specific language governing permissions and +# # limitations under the License. + +# import unittest +# from datetime import datetime + +# import google.auth +# import mock +# from google.cloud import monitoring_v3 + +# from opencensus.common import utils +# from opencensus.common.version import __version__ +# from opencensus.ext.stackdriver import stats_exporter as stackdriver +# from opencensus.metrics import label_key, label_value +# from opencensus.metrics import transport as transport_module +# from opencensus.metrics.export import ( +# metric, +# metric_descriptor, +# point, +# time_series, +# value, +# ) +# from opencensus.stats import aggregation as aggregation_module +# from opencensus.stats import aggregation_data as aggregation_data_module +# from opencensus.stats import execution_context +# from opencensus.stats import measure as measure_module +# from opencensus.stats import metric_utils +# from opencensus.stats import stats as stats_module +# from opencensus.stats import view as view_module +# from opencensus.stats import view_data as view_data_module +# from opencensus.tags import tag_key as tag_key_module +# from opencensus.tags import tag_map as tag_map_module +# from opencensus.tags import tag_value as tag_value_module + +# MiB = 1 << 20 +# FRONTEND_KEY = tag_key_module.TagKey("my.org/keys/frontend") +# FRONTEND_KEY_FLOAT = tag_key_module.TagKey("my.org/keys/frontend-FLOAT") +# FRONTEND_KEY_INT = tag_key_module.TagKey("my.org/keys/frontend-INT") +# FRONTEND_KEY_STR = tag_key_module.TagKey("my.org/keys/frontend-STR") + +# FRONTEND_KEY_CLEAN = "my_org_keys_frontend" +# FRONTEND_KEY_FLOAT_CLEAN = "my_org_keys_frontend_FLOAT" +# FRONTEND_KEY_INT_CLEAN = "my_org_keys_frontend_INT" +# FRONTEND_KEY_STR_CLEAN = "my_org_keys_frontend_STR" + +# VIDEO_SIZE_MEASURE = measure_module.MeasureFloat( +# "my.org/measure/video_size_test2", "size of processed videos", "By") +# VIDEO_SIZE_MEASURE_2 = measure_module.MeasureFloat( +# "my.org/measure/video_size_test_2", "size of processed videos", "By") + +# VIDEO_SIZE_MEASURE_FLOAT = measure_module.MeasureFloat( +# "my.org/measure/video_size_test-float", "size of processed videos-float", +# "By") + +# VIDEO_SIZE_VIEW_NAME = "my.org/views/video_size_test2" +# VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation( +# [16.0 * MiB, 256.0 * MiB]) +# VIDEO_SIZE_VIEW = view_module.View( +# VIDEO_SIZE_VIEW_NAME, "processed video size over time", [FRONTEND_KEY], +# VIDEO_SIZE_MEASURE, VIDEO_SIZE_DISTRIBUTION) + +# TEST_TIME = datetime(2018, 12, 25, 1, 2, 3, 4) +# TEST_TIME_STR = utils.to_iso_str(TEST_TIME) + + +# class _Client(object): +# def __init__(self, client_info=None): +# self.client_info = client_info + + +# class TestOptions(unittest.TestCase): +# def test_options_blank(self): +# options = stackdriver.Options() + +# self.assertEqual(options.project_id, "") +# self.assertEqual(options.resource, "") + +# def test_options_parameters(self): +# options = stackdriver.Options( +# project_id="project-id", metric_prefix="sample") +# self.assertEqual(options.project_id, "project-id") +# self.assertEqual(options.metric_prefix, "sample") + +# def test_default_monitoring_labels(self): +# options = stackdriver.Options(default_monitoring_labels={ +# label_key.LabelKey('lk_key', 'lk_desc'): +# label_value.LabelValue('lk_value') +# }) + +# self.assertEqual(len(options.default_monitoring_labels), 1) +# [[lk, lv]] = options.default_monitoring_labels.items() +# self.assertEqual(lk.key, 'lk_key') +# self.assertEqual(lk.description, 'lk_desc') +# self.assertEqual(lv.value, 'lk_value') + +# def test_default_monitoring_labels_blank(self): +# with mock.patch('opencensus.ext.stackdriver.stats_exporter' +# '.get_task_value') as mock_gtv: +# options = stackdriver.Options() + +# mock_gtv.assert_called() + +# self.assertEqual(len(options.default_monitoring_labels), 1) +# [[lk, lv]] = options.default_monitoring_labels.items() +# self.assertEqual(lk.key, stackdriver.OPENCENSUS_TASK) +# self.assertEqual(lk.description, +# stackdriver.OPENCENSUS_TASK_DESCRIPTION) +# self.assertEqual(lv.value, mock_gtv()) + +# def test_bad_default_monitoring_labels(self): +# with self.assertRaises(AttributeError): +# stackdriver.Options( +# default_monitoring_labels=[ +# 'not a dict' +# ]) + +# with self.assertRaises(TypeError): +# stackdriver.Options( +# default_monitoring_labels={ +# 'bad key': +# label_value.LabelValue('clk_value') +# }) + +# with self.assertRaises(TypeError): +# stackdriver.Options( +# default_monitoring_labels={ +# label_key.LabelKey('clk_key', 'clk_desc'): +# 'bad value' +# }) + + +# class TestStackdriverStatsExporter(unittest.TestCase): +# def test_constructor(self): +# exporter = stackdriver.StackdriverStatsExporter() + +# self.assertIsNone(exporter.client) + +# def test_constructor_param(self): +# exporter = stackdriver.StackdriverStatsExporter( +# options=stackdriver.Options(project_id=1)) +# self.assertEqual(exporter.options.project_id, 1) + +# def test_null_options(self): +# # Check that we don't suppress auth errors +# auth_error = google.auth.exceptions.DefaultCredentialsError +# mock_auth_error = mock.Mock() +# mock_auth_error.side_effect = auth_error +# with mock.patch('opencensus.ext.stackdriver.stats_exporter' +# '.google.auth.default', mock_auth_error): +# with self.assertRaises(auth_error): +# stackdriver.new_stats_exporter() + +# # Check that we get the default credentials' project ID +# mock_auth_ok = mock.Mock() +# mock_auth_ok.return_value = (None, 123) +# with mock.patch('opencensus.ext.stackdriver.stats_exporter' +# '.google.auth.default', mock_auth_ok): +# sdse = stackdriver.new_stats_exporter() +# self.assertEqual(sdse.options.project_id, 123) + +# # Check that we raise if auth works but the project is empty +# mock_auth_no_project = mock.Mock() +# mock_auth_no_project.return_value = (None, '') +# with mock.patch('opencensus.ext.stackdriver.stats_exporter' +# '.google.auth.default', mock_auth_no_project): +# with self.assertRaises(ValueError): +# stackdriver.new_stats_exporter() + +# def test_blank_project(self): +# self.assertRaises(ValueError, stackdriver.new_stats_exporter, +# stackdriver.Options(project_id="")) + +# def test_not_blank_project(self): +# patch_client = mock.patch( +# ('opencensus.ext.stackdriver.stats_exporter' +# '.monitoring_v3.MetricServiceClient'), _Client) + +# with patch_client: +# exporter_created = stackdriver.new_stats_exporter( +# stackdriver.Options(project_id=1)) + +# self.assertIsInstance(exporter_created, +# stackdriver.StackdriverStatsExporter) + +# def test_get_user_agent_slug(self): +# self.assertIn(__version__, stackdriver.get_user_agent_slug()) + +# def test_client_info_user_agent(self): +# """Check that the monitoring client sets a user agent. + +# The user agent should include the library version. Note that this +# assumes MetricServiceClient calls ClientInfo.to_user_agent to attach +# the user agent as metadata to metric service API calls. +# """ +# patch_client = mock.patch( +# 'opencensus.ext.stackdriver.stats_exporter.monitoring_v3' +# '.MetricServiceClient', _Client) + +# with patch_client: +# exporter = stackdriver.new_stats_exporter( +# stackdriver.Options(project_id=1)) + +# self.assertIn(stackdriver.get_user_agent_slug(), +# exporter.client.client_info.to_user_agent()) + +# def test_sanitize(self): +# # empty +# result = stackdriver.sanitize_label("") +# self.assertEqual(result, "") + +# # all invalid +# result = stackdriver.sanitize_label("/*^#$") +# self.assertEqual(result, "key_") + +# # all valid +# result = stackdriver.sanitize_label("abc") +# self.assertEqual(result, "abc") + +# # mixed +# result = stackdriver.sanitize_label("a.b/c") +# self.assertEqual(result, "a_b_c") + +# # starts with '_' +# result = stackdriver.sanitize_label("_abc") +# self.assertEqual(result, "key_abc") + +# # starts with digit +# result = stackdriver.sanitize_label("0abc") +# self.assertEqual(result, "key_0abc") + +# # too long +# result = stackdriver.sanitize_label("0123456789" * 10) +# self.assertEqual(len(result), 100) +# self.assertEqual(result, "key_" + "0123456789" * 9 + "012345") + +# def test_get_task_value(self): +# task_value = stackdriver.get_task_value() +# self.assertNotEqual(task_value, "") + +# def test_namespaced_views(self): +# view_name = "view-1" +# expected_view_name_namespaced = ( +# "custom.googleapis.com/opencensus/{}".format(view_name)) +# view_name_namespaced = stackdriver.namespaced_view_name(view_name, "") +# self.assertEqual(expected_view_name_namespaced, view_name_namespaced) + +# expected_view_name_namespaced = "kubernetes.io/myorg/%s" % view_name +# view_name_namespaced = stackdriver.namespaced_view_name( +# view_name, "kubernetes.io/myorg") +# self.assertEqual(expected_view_name_namespaced, view_name_namespaced) + +# def test_stackdriver_register_exporter(self): +# stats = stats_module.stats +# view_manager = stats.view_manager + +# exporter = mock.Mock() +# if len(view_manager.measure_to_view_map.exporters) > 0: +# view_manager.unregister_exporter( +# view_manager.measure_to_view_map.exporters[0]) +# view_manager.register_exporter(exporter) + +# registered_exporters = len(view_manager.measure_to_view_map.exporters) + +# self.assertEqual(registered_exporters, 1) + +# @mock.patch('os.getpid', return_value=12345) +# @mock.patch( +# 'platform.uname', +# return_value=('system', 'node', 'release', 'version', 'machine', +# 'processor')) +# def test_get_task_value_with_hostname(self, mock_uname, mock_pid): +# self.assertEqual(stackdriver.get_task_value(), "py-12345@node") + +# @mock.patch('os.getpid', return_value=12345) +# @mock.patch( +# 'platform.uname', +# return_value=('system', '', 'release', 'version', 'machine', +# 'processor')) +# def test_get_task_value_without_hostname(self, mock_uname, mock_pid): +# self.assertEqual(stackdriver.get_task_value(), "py-12345@localhost") + +# def test_default_default_monitoring_labels(self): +# """Check that metrics include OC task label by default.""" +# exporter = stackdriver.StackdriverStatsExporter( +# options=stackdriver.Options(project_id='project_id'), +# client=mock.Mock()) + +# lv = label_value.LabelValue('val') +# val = value.ValueLong(value=123) +# dt = datetime(2019, 3, 20, 21, 34, 0, 537954) +# pp = point.Point(value=val, timestamp=dt) +# ts = [ +# time_series.TimeSeries(label_values=[lv], points=[pp], +# start_timestamp=utils.to_iso_str(dt)) +# ] + +# desc = metric_descriptor.MetricDescriptor( +# name='name', +# description='description', +# unit='unit', +# type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, +# label_keys=[label_key.LabelKey('key', 'description')] +# ) +# mm = metric.Metric(descriptor=desc, time_series=ts) + +# sd_md = exporter.get_metric_descriptor(desc) +# self.assertEqual(len(sd_md.labels), 2) +# sd_descriptors = {ld.key: ld.description for ld in sd_md.labels} +# self.assertIn('key', sd_descriptors) +# self.assertEqual(sd_descriptors['key'], 'description') +# self.assertIn(stackdriver.OPENCENSUS_TASK, sd_descriptors) +# self.assertEqual( +# sd_descriptors[stackdriver.OPENCENSUS_TASK], +# stackdriver.OPENCENSUS_TASK_DESCRIPTION +# ) + +# sd_ts_list = exporter.create_time_series_list(mm) +# self.assertEqual(len(sd_ts_list), 1) +# [sd_ts] = sd_ts_list +# self.assertIn('key', sd_ts.metric.labels) +# self.assertEqual(sd_ts.metric.labels['key'], 'val') +# self.assertIn(stackdriver.OPENCENSUS_TASK, sd_ts.metric.labels) + +# def test_empty_default_monitoring_labels(self): +# """Check that it's possible to remove the default OC task label.""" +# exporter = stackdriver.StackdriverStatsExporter( +# options=stackdriver.Options( +# project_id='project_id', +# default_monitoring_labels={}), +# client=mock.Mock()) + +# lv = label_value.LabelValue('val') +# val = value.ValueLong(value=123) +# dt = datetime(2019, 3, 20, 21, 34, 0, 537954) +# pp = point.Point(value=val, timestamp=dt) +# ts = [ +# time_series.TimeSeries(label_values=[lv], points=[pp], +# start_timestamp=utils.to_iso_str(dt)) +# ] + +# desc = metric_descriptor.MetricDescriptor( +# name='name', +# description='description', +# unit='unit', +# type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, +# label_keys=[label_key.LabelKey('key', 'description')] +# ) +# mm = metric.Metric(descriptor=desc, time_series=ts) + +# sd_md = exporter.get_metric_descriptor(desc) +# self.assertEqual(len(sd_md.labels), 1) +# [sd_label] = sd_md.labels +# self.assertEqual(sd_label.key, 'key') +# self.assertEqual(sd_label.description, 'description') + +# sd_ts_list = exporter.create_time_series_list(mm) +# self.assertEqual(len(sd_ts_list), 1) +# [sd_ts] = sd_ts_list +# self.assertIn('key', sd_ts.metric.labels) +# self.assertEqual(sd_ts.metric.labels['key'], 'val') +# self.assertNotIn(stackdriver.OPENCENSUS_TASK, sd_ts.metric.labels) + +# def test_custom_default_monitoring_labels(self): +# """Check that custom labels are exported and included in descriptor.""" +# exporter = stackdriver.StackdriverStatsExporter( +# options=stackdriver.Options( +# project_id='project_id', +# default_monitoring_labels={ +# label_key.LabelKey('clk_key', 'clk_desc'): +# label_value.LabelValue('clk_value') +# }), +# client=mock.Mock()) + +# lv = label_value.LabelValue('val') +# val = value.ValueLong(value=123) +# dt = datetime(2019, 3, 20, 21, 34, 0, 537954) +# pp = point.Point(value=val, timestamp=dt) +# ts = [ +# time_series.TimeSeries(label_values=[lv], points=[pp], +# start_timestamp=utils.to_iso_str(dt)) +# ] + +# desc = metric_descriptor.MetricDescriptor( +# name='name', +# description='description', +# unit='unit', +# type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, +# label_keys=[label_key.LabelKey('key', 'description')] +# ) +# mm = metric.Metric(descriptor=desc, time_series=ts) + +# sd_md = exporter.get_metric_descriptor(desc) +# self.assertEqual(len(sd_md.labels), 2) +# sd_descriptors = {ld.key: ld.description for ld in sd_md.labels} +# self.assertIn('key', sd_descriptors) +# self.assertEqual(sd_descriptors['key'], 'description') +# self.assertIn('clk_key', sd_descriptors) +# self.assertEqual(sd_descriptors['clk_key'], 'clk_desc') + +# sd_ts_list = exporter.create_time_series_list(mm) +# self.assertEqual(len(sd_ts_list), 1) +# [sd_ts] = sd_ts_list +# self.assertIn('key', sd_ts.metric.labels) +# self.assertEqual(sd_ts.metric.labels['key'], 'val') +# self.assertIn('clk_key', sd_ts.metric.labels) +# self.assertEqual(sd_ts.metric.labels['clk_key'], 'clk_value') + +# def test_get_metric_descriptor(self): +# exporter = stackdriver.StackdriverStatsExporter( +# options=stackdriver.Options( +# project_id='project_id'), +# client=mock.Mock()) + +# oc_md = metric_descriptor.MetricDescriptor( +# name='name', +# description='description', +# unit='unit', +# type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, +# label_keys=[label_key.LabelKey('ck', 'cd')] +# ) + +# sd_md = exporter.get_metric_descriptor(oc_md) +# self.assertEqual( +# sd_md.metric_kind, +# monitoring_v3.enums.MetricDescriptor.MetricKind.GAUGE) +# self.assertEqual( +# sd_md.value_type, +# monitoring_v3.enums.MetricDescriptor.ValueType.INT64) + +# self.assertIsInstance(sd_md, monitoring_v3.types.MetricDescriptor) +# exporter.client.create_metric_descriptor.assert_not_called() + +# def test_get_metric_descriptor_bad_type(self): +# exporter = stackdriver.StackdriverStatsExporter( +# options=stackdriver.Options(project_id='project_id'), +# client=mock.Mock()) + +# bad_type_oc_md = metric_descriptor.MetricDescriptor( +# name='name', +# description='description', +# unit='unit', +# # Need a valid type to create the descriptor +# type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, +# label_keys=[label_key.LabelKey('key', 'description')] +# ) +# bad_type_oc_md._type = 100 + +# with self.assertRaises(TypeError): +# exporter.get_metric_descriptor(bad_type_oc_md) + +# def test_get_metric_descriptor_custom_prefix(self): + +# exporter = stackdriver.StackdriverStatsExporter( +# options=stackdriver.Options( +# metric_prefix='metric_prefix', +# project_id='project_id'), +# client=mock.Mock()) + +# oc_md = metric_descriptor.MetricDescriptor( +# name='name', +# description='description', +# unit='unit', +# type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, +# label_keys=[label_key.LabelKey('ck', 'cd')] +# ) + +# sd_md = exporter.get_metric_descriptor(oc_md) +# self.assertIn('metric_prefix', sd_md.type) +# self.assertIn('metric_prefix', sd_md.name) + +# def test_register_metric_descriptor(self): +# exporter = stackdriver.StackdriverStatsExporter( +# options=stackdriver.Options( +# metric_prefix='metric_prefix', +# project_id='project_id'), +# client=mock.Mock()) + +# oc_md = metric_descriptor.MetricDescriptor( +# name='name', +# description='description', +# unit='unit', +# type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, +# label_keys=[label_key.LabelKey('key', 'description')] +# ) + +# exporter.register_metric_descriptor(oc_md) +# self.assertEqual( +# exporter.client.create_metric_descriptor.call_count, +# 1 +# ) +# exporter.register_metric_descriptor(oc_md) +# self.assertEqual( +# exporter.client.create_metric_descriptor.call_count, +# 1 +# ) + +# def test_export_metrics(self): +# lv = label_value.LabelValue('val') +# val = value.ValueLong(value=123) +# dt = datetime(2019, 3, 20, 21, 34, 0, 537954) +# pp = point.Point(value=val, timestamp=dt) + +# ts = [ +# time_series.TimeSeries(label_values=[lv], points=[pp], +# start_timestamp=utils.to_iso_str(dt)) +# ] + +# desc = metric_descriptor.MetricDescriptor( +# name='name', +# description='description', +# unit='unit', +# type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, +# label_keys=[label_key.LabelKey('key', 'description')] +# ) + +# mm = metric.Metric(descriptor=desc, time_series=ts) + +# exporter = stackdriver.StackdriverStatsExporter(client=mock.Mock()) +# exporter.export_metrics([mm]) + +# self.assertEqual(exporter.client.create_time_series.call_count, 1) +# sd_args = exporter.client.create_time_series.call_args[0][1] +# self.assertEqual(len(sd_args), 1) +# [sd_arg] = exporter.client.create_time_series.call_args[0][1] +# self.assertEqual(sd_arg.points[0].value.int64_value, 123) + + +# class MockPeriodicMetricTask(object): +# """Testing mock of metrics.transport.PeriodicMetricTask. + +# Simulate calling export asynchronously from another thread synchronously +# from this one. +# """ +# def __init__(self, interval=None, function=None, args=None, kwargs=None): +# self.function = function +# self.logger = mock.Mock() +# self.start = mock.Mock() +# self.run = mock.Mock() + +# def step(self): +# try: +# self.function() +# except transport_module.TransportError as ex: +# self.logger.exception(ex) +# self.cancel() +# except Exception: +# self.logger.exception("Error handling metric export") + + +# class MockGetExporterThread(object): +# """Intercept calls to get_exporter_thread. + +# To get a reference to the running PeriodicMetricTask created by +# get_exporter_thread. +# """ +# def __init__(self): +# self.transport = None + +# def __enter__(self): +# original_func = transport_module.get_exporter_thread + +# def get_exporter_thread(*aa, **kw): +# self.transport = original_func(*aa, **kw) + +# mock_get = mock.Mock() +# mock_get.side_effect = get_exporter_thread +# self.patcher = mock.patch( +# ('opencensus.ext.stackdriver.stats_exporter' +# '.transport.get_exporter_thread'), +# mock_get) +# self.patcher.start() +# return self + +# def __exit__(self, type, value, traceback): +# self.patcher.stop() + + +# @mock.patch('opencensus.ext.stackdriver.stats_exporter' +# '.monitoring_v3.MetricServiceClient') +# @mock.patch('opencensus.ext.stackdriver.stats_exporter' +# '.stats.stats') +# class TestAsyncStatsExport(unittest.TestCase): +# """Check that metrics are exported using the exporter thread.""" + +# def setUp(self): +# patcher = mock.patch( +# 'opencensus.metrics.transport.PeriodicMetricTask', +# MockPeriodicMetricTask) +# patcher.start() +# self.addCleanup(patcher.stop) + +# def test_export_empty(self, mock_stats, mock_client): +# """Check that we don't attempt to export empty metric sets.""" + +# mock_stats.get_metrics.return_value = [] + +# with MockGetExporterThread() as mget: +# exporter = stackdriver.new_stats_exporter( +# stackdriver.Options(project_id=1)) +# mget.transport.step() + +# exporter.client.create_metric_descriptor.assert_not_called() +# exporter.client.create_time_series.assert_not_called() + +# def test_export_single_metric(self, mock_stats, mock_client): +# """Check that we can export a set of a single metric.""" + +# lv = label_value.LabelValue('val') +# val = value.ValueLong(value=123) +# dt = datetime(2019, 3, 20, 21, 34, 0, 537954) +# pp = point.Point(value=val, timestamp=dt) + +# ts = [ +# time_series.TimeSeries(label_values=[lv], points=[pp], +# start_timestamp=utils.to_iso_str(dt)) +# ] + +# desc = metric_descriptor.MetricDescriptor( +# name='name2', +# description='description2', +# unit='unit2', +# type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, +# label_keys=[label_key.LabelKey('key', 'description')] +# ) + +# mm = metric.Metric(descriptor=desc, time_series=ts) +# mock_stats.get_metrics.return_value = [mm] + +# with MockGetExporterThread() as mget: +# exporter = stackdriver.new_stats_exporter( +# stackdriver.Options(project_id=1)) +# mget.transport.step() + +# exporter.client.create_metric_descriptor.assert_called() +# self.assertEqual( +# exporter.client.create_metric_descriptor.call_count, +# 1) +# md_call_arg =\ +# exporter.client.create_metric_descriptor.call_args[0][1] +# self.assertEqual( +# md_call_arg.metric_kind, +# monitoring_v3.enums.MetricDescriptor.MetricKind.GAUGE +# ) +# self.assertEqual( +# md_call_arg.value_type, +# monitoring_v3.enums.MetricDescriptor.ValueType.INT64 +# ) + +# exporter.client.create_time_series.assert_called() +# self.assertEqual( +# exporter.client.create_time_series.call_count, +# 1) +# ts_call_arg = exporter.client.create_time_series.call_args[0][1] +# self.assertEqual(len(ts_call_arg), 1) +# self.assertEqual(len(ts_call_arg[0].points), 1) +# self.assertEqual(ts_call_arg[0].points[0].value.int64_value, 123) + + +# class TestCreateTimeseries(unittest.TestCase): + +# def setUp(self): +# patcher = mock.patch( +# 'opencensus.ext.stackdriver.stats_exporter.stats.stats', +# stats_module._Stats()) +# patcher.start() +# self.addCleanup(patcher.stop) + +# def check_labels(self, +# actual_labels, +# expected_labels, +# include_opencensus=False): +# actual_labels = dict(actual_labels) +# if include_opencensus: +# opencensus_tag = actual_labels.pop(stackdriver.OPENCENSUS_TASK) +# self.assertIsNotNone(opencensus_tag) +# self.assertIn("py-", opencensus_tag) +# self.assertDictEqual(actual_labels, expected_labels) + +# @mock.patch('opencensus.ext.stackdriver.stats_exporter.' +# 'monitored_resource.get_instance', +# return_value=None) +# def test_create_batched_time_series(self, monitor_resource_mock): +# client = mock.Mock() +# v_data = view_data_module.ViewData( +# view=VIDEO_SIZE_VIEW, +# start_time=TEST_TIME_STR, +# end_time=TEST_TIME_STR) +# v_data.record(context=tag_map_module.TagMap(), value=2, timestamp=None) +# view_data = [v_data] + +# option = stackdriver.Options(project_id="project-test") +# exporter = stackdriver.StackdriverStatsExporter( +# options=option, client=client) + +# view_data = [metric_utils.view_data_to_metric(view_data[0], TEST_TIME)] + +# time_series_batches = exporter.create_batched_time_series(view_data, 1) + +# self.assertEqual(len(time_series_batches), 1) +# [time_series_batch] = time_series_batches +# self.assertEqual(len(time_series_batch), 1) +# [time_series] = time_series_batch +# self.assertEqual( +# time_series.metric.type, +# 'custom.googleapis.com/opencensus/' + VIDEO_SIZE_VIEW_NAME) +# self.check_labels( +# time_series.metric.labels, {}, include_opencensus=True) + +# @mock.patch('opencensus.ext.stackdriver.stats_exporter.' +# 'monitored_resource.get_instance', +# return_value=None) +# def test_create_batched_time_series_with_many(self, monitor_resource_mock): +# client = mock.Mock() + +# # First view with 3 +# view_name1 = "view-name1" +# view1 = view_module.View(view_name1, "test description", ['test'], +# VIDEO_SIZE_MEASURE, +# aggregation_module.LastValueAggregation()) +# v_data1 = view_data_module.ViewData( +# view=view1, start_time=TEST_TIME_STR, end_time=TEST_TIME_STR) +# v_data1.record(context=tag_map_module.TagMap({'test': '1'}), value=7, +# timestamp=None) +# v_data1.record(context=tag_map_module.TagMap({'test': '2'}), value=5, +# timestamp=None) +# v_data1.record(context=tag_map_module.TagMap({'test': '3'}), value=3, +# timestamp=None) + +# # Second view with 2 +# view_name2 = "view-name2" +# view2 = view_module.View(view_name2, "test description", ['test'], +# VIDEO_SIZE_MEASURE, +# aggregation_module.LastValueAggregation()) +# v_data2 = view_data_module.ViewData( +# view=view2, start_time=TEST_TIME_STR, end_time=TEST_TIME_STR) +# v_data2.record(context=tag_map_module.TagMap({'test': '1'}), value=7, +# timestamp=None) +# v_data2.record(context=tag_map_module.TagMap({'test': '2'}), value=5, +# timestamp=None) + +# view_data = [v_data1, v_data2] +# view_data = [metric_utils.view_data_to_metric(vd, TEST_TIME) +# for vd in view_data] + +# option = stackdriver.Options(project_id="project-test") +# exporter = stackdriver.StackdriverStatsExporter( +# options=option, client=client) + +# time_series_batches = exporter.create_batched_time_series(view_data, 2) + +# self.assertEqual(len(time_series_batches), 3) +# [tsb1, tsb2, tsb3] = time_series_batches +# self.assertEqual(len(tsb1), 2) +# self.assertEqual(len(tsb2), 2) +# self.assertEqual(len(tsb3), 1) + +# def setup_create_timeseries_test(self): +# client = mock.Mock() +# execution_context.clear() + +# option = stackdriver.Options( +# project_id="project-test", resource="global") +# exporter = stackdriver.StackdriverStatsExporter( +# options=option, client=client) + +# stats = stats_module.stats +# view_manager = stats.view_manager +# stats_recorder = stats.stats_recorder + +# if len(view_manager.measure_to_view_map.exporters) > 0: +# view_manager.unregister_exporter( +# view_manager.measure_to_view_map.exporters[0]) + +# view_manager.register_exporter(exporter) +# return view_manager, stats_recorder, exporter + +# @mock.patch('opencensus.ext.stackdriver.stats_exporter.' +# 'monitored_resource.get_instance', +# return_value=None) +# def test_create_timeseries(self, monitor_resource_mock): +# view_manager, stats_recorder, exporter = \ +# self.setup_create_timeseries_test() + +# view_manager.register_view(VIDEO_SIZE_VIEW) + +# tag_value = tag_value_module.TagValue("1200") +# tag_map = tag_map_module.TagMap() +# tag_map.insert(FRONTEND_KEY, tag_value) + +# measure_map = stats_recorder.new_measurement_map() +# measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) +# measure_map.record(tag_map) + +# v_data = measure_map.measure_to_view_map.get_view( +# VIDEO_SIZE_VIEW_NAME, None) + +# v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) + +# time_series_list = exporter.create_time_series_list(v_data) + +# self.assertEqual(len(time_series_list), 1) +# time_series = time_series_list[0] +# self.assertEqual(time_series.resource.type, "global") +# self.assertEqual( +# time_series_list[0].metric.type, +# "custom.googleapis.com/opencensus/my.org/views/video_size_test2") +# self.check_labels( +# time_series.metric.labels, {FRONTEND_KEY_CLEAN: "1200"}, +# include_opencensus=True) +# self.assertIsNotNone(time_series.resource) + +# self.assertEqual(len(time_series.points), 1) +# value = time_series.points[0].value +# self.assertEqual(value.distribution_value.count, 1) + +# time_series_list = exporter.create_time_series_list(v_data) + +# self.assertEqual(len(time_series_list), 1) +# time_series = time_series_list[0] +# self.check_labels( +# time_series.metric.labels, {FRONTEND_KEY_CLEAN: "1200"}, +# include_opencensus=True) +# self.assertIsNotNone(time_series.resource) + +# self.assertEqual(len(time_series.points), 1) +# value = time_series.points[0].value +# self.assertEqual(value.distribution_value.count, 1) + +# @mock.patch('opencensus.ext.stackdriver.stats_exporter.' +# 'monitored_resource.get_instance') +# def test_create_timeseries_with_resource(self, monitor_resource_mock): + +# client = mock.Mock() +# execution_context.clear() + +# option = stackdriver.Options(project_id="project-test", resource="") +# exporter = stackdriver.StackdriverStatsExporter( +# options=option, client=client) + +# stats = stats_module.stats +# view_manager = stats.view_manager +# stats_recorder = stats.stats_recorder + +# if len(view_manager.measure_to_view_map.exporters) > 0: +# view_manager.unregister_exporter( +# view_manager.measure_to_view_map.exporters[0]) + +# view_manager.register_exporter(exporter) +# view_manager.register_view(VIDEO_SIZE_VIEW) + +# tag_value = tag_value_module.TagValue("1200") +# tag_map = tag_map_module.TagMap() +# tag_map.insert(FRONTEND_KEY, tag_value) + +# measure_map = stats_recorder.new_measurement_map() +# measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) +# measure_map.record(tag_map) + +# v_data = measure_map.measure_to_view_map.get_view( +# VIDEO_SIZE_VIEW_NAME, None) + +# v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) + +# # check for gce_instance monitored resource +# mocked_labels = { +# 'instance_id': 'my-instance', +# 'project_id': 'my-project', +# 'zone': 'us-east1', +# 'k8s.io/pod/name': 'localhost', +# 'k8s.io/namespace/name': 'namespace', +# } + +# mock_resource = mock.Mock() +# mock_resource.get_type.return_value = 'gce_instance' +# mock_resource.get_labels.return_value = mocked_labels +# monitor_resource_mock.return_value = mock_resource + +# time_series_list = exporter.create_time_series_list(v_data) +# self.assertEqual(len(time_series_list), 1) +# time_series = time_series_list[0] +# self.assertEqual(time_series.resource.type, "gce_instance") +# self.check_labels( +# time_series.resource.labels, { +# 'instance_id': 'my-instance', +# 'project_id': 'my-project', +# 'zone': 'us-east1', +# }) +# self.assertEqual( +# time_series.metric.type, +# "custom.googleapis.com/opencensus/my.org/views/video_size_test2") +# self.assertIsNotNone(time_series) + +# time_series_list = exporter.create_time_series_list(v_data) +# self.assertEqual(len(time_series_list), 1) +# time_series = time_series_list[0] + +# self.assertEqual( +# time_series.metric.type, +# "custom.googleapis.com/opencensus/my.org/views/video_size_test2") + +# # check for k8s_container monitored resource +# mocked_labels = { +# 'instance_id': 'my-instance', +# 'project_id': 'my-project', +# 'zone': 'us-east1', +# 'k8s.io/pod/name': 'localhost', +# 'k8s.io/cluster/name': 'cluster', +# 'k8s.io/namespace/name': 'namespace', +# } + +# mock_resource = mock.Mock() +# mock_resource.get_type.return_value = 'k8s_container' +# mock_resource.get_labels.return_value = mocked_labels +# monitor_resource_mock.return_value = mock_resource + +# time_series_list = exporter.create_time_series_list(v_data) +# self.assertEqual(len(time_series_list), 1) +# time_series = time_series_list[0] +# self.assertEqual(time_series.resource.type, "k8s_container") +# self.check_labels( +# time_series.resource.labels, { +# 'project_id': 'my-project', +# 'location': 'us-east1', +# 'cluster_name': 'cluster', +# 'pod_name': 'localhost', +# 'namespace_name': 'namespace', +# }) +# self.assertEqual( +# time_series.metric.type, +# "custom.googleapis.com/opencensus/my.org/views/video_size_test2") +# self.assertIsNotNone(time_series) + +# # check for aws_ec2_instance monitored resource +# mocked_labels = { +# 'instance_id': 'my-instance', +# 'aws_account': 'my-project', +# 'region': 'us-east1', +# } + +# mock_resource = mock.Mock() +# mock_resource.get_type.return_value = 'aws_ec2_instance' +# mock_resource.get_labels.return_value = mocked_labels +# monitor_resource_mock.return_value = mock_resource + +# time_series_list = exporter.create_time_series_list(v_data) +# self.assertEqual(len(time_series_list), 1) +# time_series = time_series_list[0] +# self.assertEqual(time_series.resource.type, "aws_ec2_instance") +# self.check_labels( +# time_series.resource.labels, { +# 'instance_id': 'my-instance', +# 'aws_account': 'my-project', +# 'region': 'aws:us-east1', +# }) +# self.assertEqual( +# time_series.metric.type, +# "custom.googleapis.com/opencensus/my.org/views/video_size_test2") +# self.assertIsNotNone(time_series) + +# # check for out of box monitored resource +# mock_resource = mock.Mock() +# mock_resource.get_type.return_value = '' +# mock_resource.get_labels.return_value = mock.Mock() +# monitor_resource_mock.return_value = mock_resource + +# time_series_list = exporter.create_time_series_list(v_data) +# self.assertEqual(len(time_series_list), 1) +# time_series = time_series_list[0] +# self.assertEqual(time_series.resource.type, 'global') +# self.check_labels(time_series.resource.labels, {}) +# self.assertEqual( +# time_series.metric.type, +# "custom.googleapis.com/opencensus/my.org/views/video_size_test2") +# self.assertIsNotNone(time_series) + +# @mock.patch('opencensus.ext.stackdriver.stats_exporter.' +# 'monitored_resource.get_instance', +# return_value=None) +# def test_create_timeseries_str_tagvalue(self, monitor_resource_mock): +# view_manager, stats_recorder, exporter = \ +# self.setup_create_timeseries_test() + +# agg_1 = aggregation_module.LastValueAggregation(value=2) +# view_name1 = "view-name1" +# new_view1 = view_module.View( +# view_name1, "processed video size over time", [FRONTEND_KEY_INT], +# VIDEO_SIZE_MEASURE_2, agg_1) + +# view_manager.register_view(new_view1) + +# tag_value_int = tag_value_module.TagValue("Abc") +# tag_map = tag_map_module.TagMap() +# tag_map.insert(FRONTEND_KEY_INT, tag_value_int) + +# measure_map = stats_recorder.new_measurement_map() +# measure_map.measure_int_put(VIDEO_SIZE_MEASURE_2, 25 * MiB) +# measure_map.record(tag_map) + +# v_data = measure_map.measure_to_view_map.get_view(view_name1, None) + +# v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) + +# time_series_list = exporter.create_time_series_list(v_data) +# self.assertEqual(len(time_series_list), 1) +# time_series = time_series_list[0] + +# self.check_labels( +# time_series.metric.labels, {FRONTEND_KEY_INT_CLEAN: "Abc"}, +# include_opencensus=True) +# self.assertIsNotNone(time_series.resource) + +# self.assertEqual(len(time_series.points), 1) +# expected_value = monitoring_v3.types.TypedValue() +# # TODO: #565 +# expected_value.double_value = 25.0 * MiB +# self.assertEqual(time_series.points[0].value, expected_value) + +# @mock.patch('opencensus.ext.stackdriver.stats_exporter.' +# 'monitored_resource.get_instance', +# return_value=None) +# def test_create_timeseries_str_tagvalue_count_aggregtation( +# self, monitor_resource_mock): +# view_manager, stats_recorder, exporter = \ +# self.setup_create_timeseries_test() + +# agg_1 = aggregation_module.CountAggregation(count=2) +# view_name1 = "view-name1" +# new_view1 = view_module.View( +# view_name1, "processed video size over time", [FRONTEND_KEY_INT], +# VIDEO_SIZE_MEASURE_2, agg_1) + +# view_manager.register_view(new_view1) + +# tag_value_int = tag_value_module.TagValue("Abc") +# tag_map = tag_map_module.TagMap() +# tag_map.insert(FRONTEND_KEY_INT, tag_value_int) + +# measure_map = stats_recorder.new_measurement_map() +# measure_map.measure_int_put(VIDEO_SIZE_MEASURE_2, 25 * MiB) +# measure_map.record(tag_map) + +# v_data = measure_map.measure_to_view_map.get_view(view_name1, None) + +# v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) + +# time_series_list = exporter.create_time_series_list(v_data) +# self.assertEqual(len(time_series_list), 1) +# time_series = time_series_list[0] +# self.check_labels( +# time_series.metric.labels, {FRONTEND_KEY_INT_CLEAN: "Abc"}, +# include_opencensus=True) +# self.assertIsNotNone(time_series.resource) + +# self.assertEqual(len(time_series.points), 1) +# expected_value = monitoring_v3.types.TypedValue() +# expected_value.int64_value = 3 +# self.assertEqual(time_series.points[0].value, expected_value) + +# @mock.patch('opencensus.ext.stackdriver.stats_exporter.' +# 'monitored_resource.get_instance', +# return_value=None) +# def test_create_timeseries_last_value_float_tagvalue( +# self, monitor_resource_mock): +# view_manager, stats_recorder, exporter = \ +# self.setup_create_timeseries_test() + +# agg_2 = aggregation_module.LastValueAggregation(value=2.2 * MiB) +# view_name2 = "view-name2" +# new_view2 = view_module.View( +# view_name2, "processed video size over time", [FRONTEND_KEY_FLOAT], +# VIDEO_SIZE_MEASURE_FLOAT, agg_2) + +# view_manager.register_view(new_view2) + +# tag_value_float = tag_value_module.TagValue("Abc") +# tag_map = tag_map_module.TagMap() +# tag_map.insert(FRONTEND_KEY_FLOAT, tag_value_float) + +# measure_map = stats_recorder.new_measurement_map() +# measure_map.measure_float_put(VIDEO_SIZE_MEASURE_FLOAT, 25.7 * MiB) +# measure_map.record(tag_map) + +# v_data = measure_map.measure_to_view_map.get_view(view_name2, None) + +# v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) + +# time_series_list = exporter.create_time_series_list(v_data) +# self.assertEqual(len(time_series_list), 1) +# time_series = time_series_list[0] +# self.check_labels( +# time_series.metric.labels, {FRONTEND_KEY_FLOAT_CLEAN: "Abc"}, +# include_opencensus=True) +# self.assertIsNotNone(time_series.resource) + +# self.assertEqual(len(time_series.points), 1) +# expected_value = monitoring_v3.types.TypedValue() +# expected_value.double_value = 25.7 * MiB +# self.assertEqual(time_series.points[0].value, expected_value) + +# @mock.patch('opencensus.ext.stackdriver.stats_exporter.' +# 'monitored_resource.get_instance', +# return_value=None) +# def test_create_timeseries_float_tagvalue(self, monitor_resource_mock): +# client = mock.Mock() + +# option = stackdriver.Options( +# project_id="project-test", resource="global") +# exporter = stackdriver.StackdriverStatsExporter( +# options=option, client=client) + +# stats = stats_module.stats +# view_manager = stats.view_manager +# stats_recorder = stats.stats_recorder + +# if len(view_manager.measure_to_view_map.exporters) > 0: +# view_manager.unregister_exporter( +# view_manager.measure_to_view_map.exporters[0]) + +# view_manager.register_exporter(exporter) + +# agg_3 = aggregation_module.SumAggregation(sum=2.2) +# view_name3 = "view-name3" +# new_view3 = view_module.View( +# view_name3, "processed video size over time", [FRONTEND_KEY_FLOAT], +# VIDEO_SIZE_MEASURE_FLOAT, agg_3) + +# view_manager.register_view(new_view3) + +# tag_value_float = tag_value_module.TagValue("1200") +# tag_map = tag_map_module.TagMap() +# tag_map.insert(FRONTEND_KEY_FLOAT, tag_value_float) + +# measure_map = stats_recorder.new_measurement_map() +# measure_map.measure_float_put(VIDEO_SIZE_MEASURE_FLOAT, 25 * MiB) +# measure_map.record(tag_map) + +# v_data = measure_map.measure_to_view_map.get_view(view_name3, None) + +# v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) + +# time_series_list = exporter.create_time_series_list(v_data) +# self.assertEqual(len(time_series_list), 1) +# [time_series] = time_series_list +# self.assertEqual(time_series.metric.type, +# "custom.googleapis.com/opencensus/view-name3") +# self.check_labels( +# time_series.metric.labels, {FRONTEND_KEY_FLOAT_CLEAN: "1200"}, +# include_opencensus=True) +# self.assertIsNotNone(time_series.resource) + +# self.assertEqual(len(time_series.points), 1) +# expected_value = monitoring_v3.types.TypedValue() +# expected_value.double_value = 2.2 + 25 * MiB +# self.assertEqual(time_series.points[0].value, expected_value) + +# @mock.patch('opencensus.ext.stackdriver.stats_exporter.' +# 'monitored_resource.get_instance', +# return_value=None) +# def test_create_timeseries_multiple_tag_values(self, +# monitoring_resoure_mock): +# view_manager, stats_recorder, exporter = \ +# self.setup_create_timeseries_test() + +# view_manager.register_view(VIDEO_SIZE_VIEW) + +# measure_map = stats_recorder.new_measurement_map() + +# # Add first point with one tag value +# tag_map = tag_map_module.TagMap() +# tag_map.insert(FRONTEND_KEY, tag_value_module.TagValue("1200")) +# measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) +# measure_map.record(tag_map) + +# # Add second point with different tag value +# tag_map = tag_map_module.TagMap() +# tag_map.insert(FRONTEND_KEY, tag_value_module.TagValue("1400")) +# measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 12 * MiB) +# measure_map.record(tag_map) + +# v_data = measure_map.measure_to_view_map.get_view( +# VIDEO_SIZE_VIEW_NAME, None) + +# v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) + +# time_series_list = exporter.create_time_series_list(v_data) + +# self.assertEqual(len(time_series_list), 2) +# ts_by_frontend = { +# ts.metric.labels.get(FRONTEND_KEY_CLEAN): ts +# for ts in time_series_list +# } +# self.assertEqual(set(ts_by_frontend.keys()), {"1200", "1400"}) +# ts1 = ts_by_frontend["1200"] +# ts2 = ts_by_frontend["1400"] + +# # Verify first time series +# self.assertEqual(ts1.resource.type, "global") +# self.assertEqual( +# ts1.metric.type, +# "custom.googleapis.com/opencensus/my.org/views/video_size_test2") +# self.assertIsNotNone(ts1.resource) + +# self.assertEqual(len(ts1.points), 1) +# value1 = ts1.points[0].value +# self.assertEqual(value1.distribution_value.count, 1) + +# # Verify second time series +# self.assertEqual(ts2.resource.type, "global") +# self.assertEqual( +# ts2.metric.type, +# "custom.googleapis.com/opencensus/my.org/views/video_size_test2") +# self.assertIsNotNone(ts2.resource) + +# self.assertEqual(len(ts2.points), 1) +# value2 = ts2.points[0].value +# self.assertEqual(value2.distribution_value.count, 1) + +# @mock.patch('opencensus.ext.stackdriver.stats_exporter.' +# 'monitored_resource.get_instance', +# return_value=None) +# def test_create_timeseries_disjoint_tags(self, monitoring_resoure_mock): +# view_manager, stats_recorder, exporter = \ +# self.setup_create_timeseries_test() + +# # Register view with two tags +# view_name = "view-name" +# view = view_module.View(view_name, "test description", +# [FRONTEND_KEY, FRONTEND_KEY_FLOAT], +# VIDEO_SIZE_MEASURE, +# aggregation_module.SumAggregation()) + +# view_manager.register_view(view) + +# # Add point with one tag in common and one different tag +# measure_map = stats_recorder.new_measurement_map() +# tag_map = tag_map_module.TagMap() +# tag_map.insert(FRONTEND_KEY, tag_value_module.TagValue("1200")) +# tag_map.insert(FRONTEND_KEY_STR, tag_value_module.TagValue("1800")) +# measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) +# measure_map.record(tag_map) + +# v_data = measure_map.measure_to_view_map.get_view(view_name, None) + +# v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) + +# time_series_list = exporter.create_time_series_list(v_data) + +# self.assertEqual(len(time_series_list), 1) +# [time_series] = time_series_list + +# # Verify first time series +# self.assertEqual(time_series.resource.type, "global") +# self.assertEqual(time_series.metric.type, +# "custom.googleapis.com/opencensus/" + view_name) +# self.check_labels( +# time_series.metric.labels, {FRONTEND_KEY_CLEAN: "1200"}, +# include_opencensus=True) +# self.assertIsNotNone(time_series.resource) + +# self.assertEqual(len(time_series.points), 1) +# expected_value = monitoring_v3.types.TypedValue() +# # TODO: #565 +# expected_value.double_value = 25.0 * MiB +# self.assertEqual(time_series.points[0].value, expected_value) + +# def test_create_timeseries_from_distribution(self): +# """Check for explicit 0-bound bucket for SD export.""" +# agg = aggregation_module.DistributionAggregation() + +# view = view_module.View( +# name="example.org/test_view", +# description="example.org/test_view", +# columns=['tag_key'], +# measure=mock.Mock(), +# aggregation=agg, +# ) + +# v_data = view_data_module.ViewData( +# view=view, +# start_time=TEST_TIME_STR, +# end_time=TEST_TIME_STR, +# ) + +# # Aggregation over (10 * range(10)) for buckets [2, 4, 6, 8] +# dad = aggregation_data_module.DistributionAggregationData( +# mean_data=4.5, +# count_data=100, +# sum_of_sqd_deviations=825, +# counts_per_bucket=[20, 20, 20, 20, 20], +# bounds=[2, 4, 6, 8], +# exemplars={mock.Mock() for ii in range(5)} +# ) +# v_data._tag_value_aggregation_data_map = {('tag_value',): dad} + +# v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) + +# exporter = stackdriver.StackdriverStatsExporter() +# time_series_list = exporter.create_time_series_list(v_data) +# self.assertEqual(len(time_series_list), 1) +# [time_series] = time_series_list + +# self.check_labels( +# time_series.metric.labels, {'tag_key': 'tag_value'}, +# include_opencensus=True) +# self.assertEqual(len(time_series.points), 1) +# [point] = time_series.points +# dv = point.value.distribution_value +# self.assertEqual(100, dv.count) +# self.assertEqual(825.0, dv.sum_of_squared_deviation) +# self.assertEqual([0, 20, 20, 20, 20, 20], dv.bucket_counts) +# self.assertEqual([0, 2, 4, 6, 8], +# dv.bucket_options.explicit_buckets.bounds) + +# def test_create_timeseries_multiple_tags(self): +# """Check that exporter creates timeseries for multiple tag values. + +# create_time_series_list should return a time series for each set of +# values in the tag value aggregation map. +# """ +# agg = aggregation_module.CountAggregation() + +# view = view_module.View( +# name="example.org/test_view", +# description="example.org/test_view", +# columns=[tag_key_module.TagKey('color'), +# tag_key_module.TagKey('shape')], +# measure=mock.Mock(), +# aggregation=agg, +# ) + +# v_data = view_data_module.ViewData( +# view=view, +# start_time=TEST_TIME_STR, +# end_time=TEST_TIME_STR, +# ) + +# rs_count = aggregation_data_module.CountAggregationData(10) +# bc_count = aggregation_data_module.CountAggregationData(20) +# v_data._tag_value_aggregation_data_map = { +# ('red', 'square'): rs_count, +# ('blue', 'circle'): bc_count, +# } + +# v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) + +# exporter = stackdriver.StackdriverStatsExporter() +# time_series_list = exporter.create_time_series_list(v_data) + +# self.assertEqual(len(time_series_list), 2) +# self.assertEqual(len(time_series_list[0].points), 1) +# self.assertEqual(len(time_series_list[1].points), 1) + +# ts_by_color = {ts.metric.labels.get('color'): ts +# for ts in time_series_list} +# rs_ts = ts_by_color['red'] +# bc_ts = ts_by_color['blue'] +# self.assertEqual(rs_ts.metric.labels.get('shape'), 'square') +# self.assertEqual(bc_ts.metric.labels.get('shape'), 'circle') +# self.assertEqual(rs_ts.points[0].value.int64_value, 10) +# self.assertEqual(bc_ts.points[0].value.int64_value, 20) + +# def test_create_timeseries_invalid_aggregation(self): +# v_data = mock.Mock(spec=view_data_module.ViewData) +# v_data.view.name = "example.org/base_view" +# v_data.view.columns = [tag_key_module.TagKey('base_key')] +# v_data.start_time = TEST_TIME_STR +# v_data.end_time = TEST_TIME_STR + +# base_data = None +# v_data.tag_value_aggregation_data_map = { +# (None,): base_data, +# } + +# exporter = stackdriver.StackdriverStatsExporter( +# options=mock.Mock(), +# client=mock.Mock(), +# ) +# self.assertRaises(TypeError, exporter.create_time_series_list, v_data, +# "", "") diff --git a/tox.ini b/tox.ini index a3c253fc4..a460651bf 100644 --- a/tox.ini +++ b/tox.ini @@ -52,7 +52,7 @@ commands = ; TODO system tests lint: isort --check-only --diff --recursive . lint: flake8 context/ contrib/ opencensus/ tests/ examples/ - lint: - bash ./scripts/pylint.sh + ; lint: - bash ./scripts/pylint.sh py37-setup: python setup.py check --restructuredtext --strict py37-docs: bash ./scripts/update_docs.sh ; TODO deployment From 08c34fc271421dbed8baad8e62113d30049c5463 Mon Sep 17 00:00:00 2001 From: skinamdar Date: Fri, 12 Jun 2020 11:05:23 -0700 Subject: [PATCH 43/79] Add support to initialize azure exporters with proxies (#902) --- contrib/opencensus-ext-azure/CHANGELOG.md | 2 ++ .../opencensus/ext/azure/common/__init__.py | 5 ++++- .../opencensus/ext/azure/common/transport.py | 1 + .../tests/test_azure_log_exporter.py | 11 +++++++++++ .../tests/test_azure_trace_exporter.py | 11 +++++++++++ .../opencensus-ext-azure/tests/test_options.py | 18 ++++++++++++++++++ 6 files changed, 47 insertions(+), 1 deletion(-) diff --git a/contrib/opencensus-ext-azure/CHANGELOG.md b/contrib/opencensus-ext-azure/CHANGELOG.md index fcee072cb..623ce5662 100644 --- a/contrib/opencensus-ext-azure/CHANGELOG.md +++ b/contrib/opencensus-ext-azure/CHANGELOG.md @@ -4,6 +4,8 @@ - Change default path of local storage ([#903](https://github.com/census-instrumentation/opencensus-python/pull/903)) +- Add support to initialize azure exporters with proxies + ([#902](https://github.com/census-instrumentation/opencensus-python/pull/902)) ## 1.0.1 Released 2019-11-26 diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py index fcb25acb5..8d76d91ea 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py @@ -56,6 +56,9 @@ def process_options(options): TEMPDIR_PREFIX + TEMPDIR_SUFFIX ) + if options.proxies is None: + options.proxies = '{}' + def parse_connection_string(connection_string): if connection_string is None: @@ -105,7 +108,7 @@ def __init__(self, *args, **kwargs): logging_sampling_rate=1.0, max_batch_size=100, minimum_retry_interval=60, # minimum retry interval in seconds - proxy=None, + proxies=None, # string maps url schemes to the url of the proxies storage_maintenance_period=60, storage_max_size=50*1024*1024, # 50MiB storage_path=None, diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/transport.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/transport.py index 58c287b97..3643da02b 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/transport.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/transport.py @@ -52,6 +52,7 @@ def _transmit(self, envelopes): 'Content-Type': 'application/json; charset=utf-8', }, timeout=self.options.timeout, + proxies=json.loads(self.options.proxies), ) except requests.Timeout: logger.warning( diff --git a/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py b/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py index 8aa6baa8d..88b8c436b 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py @@ -85,6 +85,17 @@ def test_invalid_sampling_rate(self): logging_sampling_rate=4.0, ) + def test_init_handler_with_proxies(self): + handler = log_exporter.AzureLogHandler( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + proxies='{"https":"https://test-proxy.com"}', + ) + + self.assertEqual( + handler.options.proxies, + '{"https":"https://test-proxy.com"}', + ) + @mock.patch('requests.post', return_value=mock.Mock()) def test_exception(self, requests_mock): logger = logging.getLogger(self.id()) diff --git a/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py b/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py index fc212aed8..5ff70e234 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py @@ -45,6 +45,17 @@ def test_ctor(self): self.assertRaises(ValueError, lambda: trace_exporter.AzureExporter()) Options._default.instrumentation_key = instrumentation_key + def test_init_exporter_with_proxies(self): + exporter = trace_exporter.AzureExporter( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + proxies='{"https":"https://test-proxy.com"}', + ) + + self.assertEqual( + exporter.options.proxies, + '{"https":"https://test-proxy.com"}', + ) + @mock.patch('requests.post', return_value=mock.Mock()) def test_emit_empty(self, request_mock): exporter = trace_exporter.AzureExporter( diff --git a/contrib/opencensus-ext-azure/tests/test_options.py b/contrib/opencensus-ext-azure/tests/test_options.py index 5c16d9c6c..969eb5a89 100644 --- a/contrib/opencensus-ext-azure/tests/test_options.py +++ b/contrib/opencensus-ext-azure/tests/test_options.py @@ -90,6 +90,24 @@ def test_process_options_endpoint_default(self): self.assertEqual(options.endpoint, 'https://dc.services.visualstudio.com/v2/track') + def test_process_options_proxies_default(self): + options = common.Options() + options.proxies = "{}" + common.process_options(options) + + self.assertEqual(options.proxies, "{}") + + def test_process_options_proxies_set_proxies(self): + options = common.Options() + options.connection_string = None + options.proxies = '{"https": "https://test-proxy.com"}' + common.process_options(options) + + self.assertEqual( + options.proxies, + '{"https": "https://test-proxy.com"}' + ) + def test_parse_connection_string_none(self): cs = None result = common.parse_connection_string(cs) From 36b326a3e870171016081f2f896d0ffe924f0194 Mon Sep 17 00:00:00 2001 From: Andrew Xue Date: Tue, 16 Jun 2020 12:14:32 -0400 Subject: [PATCH 44/79] Update async_.py (#910) --- opencensus/common/transports/async_.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opencensus/common/transports/async_.py b/opencensus/common/transports/async_.py index c5fe6a395..4dd90842b 100644 --- a/opencensus/common/transports/async_.py +++ b/opencensus/common/transports/async_.py @@ -92,6 +92,7 @@ def _thread_main(self): Pulls pending data off the queue and writes them in batches to the specified tracing backend using the exporter. """ + execution_context.set_is_exporter(True) quit_ = False while True: @@ -143,7 +144,6 @@ def start(self): self._thread.daemon = True # Indicate that this thread is an exporter thread. Used for # auto-collection. - execution_context.set_is_exporter(True) self._thread.start() atexit.register(self._export_pending_data) From 1328fbfb14d3a26c3b585bbfa275d6036d490cd8 Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Tue, 16 Jun 2020 12:48:14 -0700 Subject: [PATCH 45/79] Fix async.py is_exporter context (#909) --- .github/CODEOWNERS | 1 - .../opencensus/ext/azure/common/exporter.py | 1 + .../opencensus/ext/azure/log_exporter/__init__.py | 1 + opencensus/common/transports/async_.py | 4 ++-- opencensus/common/transports/sync.py | 5 +++++ opencensus/metrics/transport.py | 1 + 6 files changed, 10 insertions(+), 3 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index a599a7169..b1f3db1df 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -3,4 +3,3 @@ # For anything not explicitly taken by someone else: * @census-instrumentation/global-owners @c24t @hectorhdzg @lzchen @reyang @songy23 @victoraugustolls - diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/exporter.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/exporter.py index 969568087..3e67e0eb1 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/exporter.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/exporter.py @@ -65,6 +65,7 @@ def __init__(self, src, dst): def run(self): # pragma: NO COVER # Indicate that this thread is an exporter thread. + # Used to suppress tracking of requests in this thread execution_context.set_is_exporter(True) src = self.src dst = self.dst diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py index 6bc4db53f..60c42e566 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py @@ -79,6 +79,7 @@ def __init__(self, src, dst): def run(self): # Indicate that this thread is an exporter thread. + # Used to suppress tracking of requests in this thread execution_context.set_is_exporter(True) src = self._src dst = self._dst diff --git a/opencensus/common/transports/async_.py b/opencensus/common/transports/async_.py index 4dd90842b..56e726119 100644 --- a/opencensus/common/transports/async_.py +++ b/opencensus/common/transports/async_.py @@ -92,6 +92,8 @@ def _thread_main(self): Pulls pending data off the queue and writes them in batches to the specified tracing backend using the exporter. """ + # Indicate that this thread is an exporter thread. + # Used to suppress tracking of requests in this thread execution_context.set_is_exporter(True) quit_ = False @@ -142,8 +144,6 @@ def start(self): self._thread = threading.Thread( target=self._thread_main, name=_WORKER_THREAD_NAME) self._thread.daemon = True - # Indicate that this thread is an exporter thread. Used for - # auto-collection. self._thread.start() atexit.register(self._export_pending_data) diff --git a/opencensus/common/transports/sync.py b/opencensus/common/transports/sync.py index aae01218f..8f31d9bfe 100644 --- a/opencensus/common/transports/sync.py +++ b/opencensus/common/transports/sync.py @@ -13,6 +13,7 @@ # limitations under the License. from opencensus.common.transports import base +from opencensus.trace import execution_context class SyncTransport(base.Transport): @@ -20,4 +21,8 @@ def __init__(self, exporter): self.exporter = exporter def export(self, datas): + # Used to suppress tracking of requests in export + execution_context.set_is_exporter(True) self.exporter.emit(datas) + # Reset the context + execution_context.set_is_exporter(False) diff --git a/opencensus/metrics/transport.py b/opencensus/metrics/transport.py index 77728246a..74d78fc87 100644 --- a/opencensus/metrics/transport.py +++ b/opencensus/metrics/transport.py @@ -66,6 +66,7 @@ def func(*aa, **kw): def run(self): # Indicate that this thread is an exporter thread. + # Used to suppress tracking of requests in this thread execution_context.set_is_exporter(True) super(PeriodicMetricTask, self).run() From 6e4b610fbb7bde95fc47d29506013ede5600735a Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Wed, 17 Jun 2020 12:41:59 -0700 Subject: [PATCH 46/79] Update CHANGELOG for release v0.7.8 (#913) --- CHANGELOG.md | 7 +++++++ contrib/opencensus-ext-azure/CHANGELOG.md | 14 ++++++++++++++ 2 files changed, 21 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 73ceed5c6..d9a856cc9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,13 @@ ## Unreleased +## 0.7.8 +Released 2020-06-17 + +- Updated `azure` module + ([#903](https://github.com/census-instrumentation/opencensus-python/pull/903), + [#902](https://github.com/census-instrumentation/opencensus-python/pull/902)) + ## 0.7.7 Released 2020-02-03 diff --git a/contrib/opencensus-ext-azure/CHANGELOG.md b/contrib/opencensus-ext-azure/CHANGELOG.md index 623ce5662..7efa8b779 100644 --- a/contrib/opencensus-ext-azure/CHANGELOG.md +++ b/contrib/opencensus-ext-azure/CHANGELOG.md @@ -2,11 +2,25 @@ ## Unreleased +## 1.0.3 +Released 2020-06-17 + - Change default path of local storage ([#903](https://github.com/census-instrumentation/opencensus-python/pull/903)) - Add support to initialize azure exporters with proxies ([#902](https://github.com/census-instrumentation/opencensus-python/pull/902)) + +## 1.0.2 +Released 2020-02-04 + +- Add local storage and retry logic for Azure Metrics Exporter + ([#845](https://github.com/census-instrumentation/opencensus-python/pull/845)) +- Add Fixed-rate sampling logic for Azure Log Exporter + ([#848](https://github.com/census-instrumentation/opencensus-python/pull/848)) +- Implement TelemetryProcessors for Azure exporters + ([#851](https://github.com/census-instrumentation/opencensus-python/pull/851)) + ## 1.0.1 Released 2019-11-26 From c2bd5b0c9b78d91de1a3108ddc376013b3ae6824 Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Thu, 18 Jun 2020 11:30:41 -0700 Subject: [PATCH 47/79] update (#917) --- CHANGELOG.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d9a856cc9..34c6e56fb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ ## Unreleased +## 0.7.9 +Released 2020-06-17 + +- Hotfix for breaking change + ([#915](https://github.com/census-instrumentation/opencensus-python/pull/915), + ## 0.7.8 Released 2020-06-17 From 05b1b99e9718e556971d58b4086a68c498169956 Mon Sep 17 00:00:00 2001 From: Aaron Abbott Date: Wed, 24 Jun 2020 21:32:02 +0000 Subject: [PATCH 48/79] Fix stackdriver tests (#921) Fixes #905 by pinning rsa for unsupported python versions. --- contrib/opencensus-ext-stackdriver/setup.py | 1 + .../tests/test_stackdriver_exporter.py | 1738 +++++----- .../tests/test_stackdriver_stats.py | 2781 ++++++++--------- 3 files changed, 2260 insertions(+), 2260 deletions(-) diff --git a/contrib/opencensus-ext-stackdriver/setup.py b/contrib/opencensus-ext-stackdriver/setup.py index 7c4c8c926..f87f47ab8 100644 --- a/contrib/opencensus-ext-stackdriver/setup.py +++ b/contrib/opencensus-ext-stackdriver/setup.py @@ -41,6 +41,7 @@ install_requires=[ 'google-cloud-monitoring >= 0.30.0, < 1.0.0', 'google-cloud-trace >= 0.20.0, < 1.0.0', + 'rsa <= 4.0; python_version<="3.4"', 'opencensus >= 0.8.dev0, < 1.0.0', ], extras_require={}, diff --git a/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_exporter.py b/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_exporter.py index 16b50447e..c48e21681 100644 --- a/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_exporter.py +++ b/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_exporter.py @@ -1,869 +1,869 @@ -# # Copyright 2017, OpenCensus Authors -# # -# # Licensed under the Apache License, Version 2.0 (the "License"); -# # you may not use this file except in compliance with the License. -# # You may obtain a copy of the License at -# # -# # http://www.apache.org/licenses/LICENSE-2.0 -# # -# # Unless required by applicable law or agreed to in writing, software -# # distributed under the License is distributed on an "AS IS" BASIS, -# # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# # See the License for the specific language governing permissions and -# # limitations under the License. - -# import unittest - -# import mock - -# from opencensus.common.version import __version__ -# from opencensus.ext.stackdriver import trace_exporter -# from opencensus.trace import span_context -# from opencensus.trace import span_data as span_data_module - - -# class _Client(object): -# def __init__(self, project=None): -# if project is None: -# project = 'PROJECT' - -# self.project = project - - -# class TestStackdriverExporter(unittest.TestCase): -# def test_constructor_default(self): -# patch = mock.patch( -# 'opencensus.ext.stackdriver.trace_exporter.Client', -# new=_Client) - -# with patch: -# exporter = trace_exporter.StackdriverExporter() - -# project_id = 'PROJECT' -# self.assertEqual(exporter.project_id, project_id) - -# def test_constructor_explicit(self): -# client = mock.Mock() -# project_id = 'PROJECT' -# client.project = project_id -# transport = mock.Mock() - -# exporter = trace_exporter.StackdriverExporter( -# client=client, project_id=project_id, transport=transport) - -# self.assertIs(exporter.client, client) -# self.assertEqual(exporter.project_id, project_id) - -# def test_export(self): -# client = mock.Mock() -# project_id = 'PROJECT' -# client.project = project_id -# exporter = trace_exporter.StackdriverExporter( -# client=client, project_id=project_id, transport=MockTransport) -# exporter.export({}) - -# self.assertTrue(exporter.transport.export_called) - -# @mock.patch('opencensus.ext.stackdriver.trace_exporter.' -# 'monitored_resource.get_instance', -# return_value=None) -# def test_emit(self, mr_mock): -# trace_id = '6e0c63257de34c92bf9efcd03927272e' -# span_datas = [ -# span_data_module.SpanData( -# name='span', -# context=span_context.SpanContext(trace_id=trace_id), -# span_id='1111', -# parent_span_id=None, -# attributes=None, -# start_time=None, -# end_time=None, -# child_span_count=None, -# stack_trace=None, -# annotations=None, -# message_events=None, -# links=None, -# status=None, -# same_process_as_parent_span=None, -# span_kind=0, -# ) -# ] - -# stackdriver_spans = { -# 'spans': [{ -# 'status': -# None, -# 'childSpanCount': -# None, -# 'links': -# None, -# 'startTime': -# None, -# 'spanId': -# '1111', -# 'attributes': { -# 'attributeMap': { -# 'g.co/agent': { -# 'string_value': { -# 'truncated_byte_count': -# 0, -# 'value': -# 'opencensus-python [{}]'.format(__version__) -# } -# } -# } -# }, -# 'stackTrace': -# None, -# 'displayName': { -# 'truncated_byte_count': 0, -# 'value': 'span' -# }, -# 'name': -# 'projects/PROJECT/traces/{}/spans/1111'.format(trace_id), -# 'timeEvents': -# None, -# 'endTime': -# None, -# 'sameProcessAsParentSpan': -# None -# }] -# } - -# client = mock.Mock() -# project_id = 'PROJECT' -# client.project = project_id - -# exporter = trace_exporter.StackdriverExporter( -# client=client, project_id=project_id) - -# exporter.emit(span_datas) - -# name = 'projects/{}'.format(project_id) - -# client.batch_write_spans.assert_called_with(name, stackdriver_spans) -# self.assertTrue(client.batch_write_spans.called) - -# @mock.patch('opencensus.ext.stackdriver.trace_exporter.' -# 'monitored_resource.get_instance', -# return_value=None) -# def test_translate_to_stackdriver(self, mr_mock): -# project_id = 'PROJECT' -# trace_id = '6e0c63257de34c92bf9efcd03927272e' -# span_name = 'test span' -# span_id = '6e0c63257de34c92' -# attributes = { -# 'attributeMap': { -# 'key': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'value' -# } -# }, -# 'key_double': { -# 'double_value': { -# 'value': 123.45 -# } -# }, -# 'http.host': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'host' -# } -# } -# } -# } -# parent_span_id = '6e0c63257de34c93' -# start_time = 'test start time' -# end_time = 'test end time' -# trace = { -# 'spans': [{ -# 'displayName': { -# 'value': span_name, -# 'truncated_byte_count': 0 -# }, -# 'spanId': -# span_id, -# 'startTime': -# start_time, -# 'endTime': -# end_time, -# 'parentSpanId': -# parent_span_id, -# 'attributes': -# attributes, -# 'someRandomKey': -# 'this should not be included in result', -# 'childSpanCount': -# 0 -# }], -# 'traceId': -# trace_id -# } - -# client = mock.Mock() -# client.project = project_id -# exporter = trace_exporter.StackdriverExporter( -# client=client, project_id=project_id) - -# spans = list(exporter.translate_to_stackdriver(trace)) - -# expected_traces = [{ -# 'name': 'projects/{}/traces/{}/spans/{}'.format( -# project_id, trace_id, span_id), -# 'displayName': { -# 'value': span_name, -# 'truncated_byte_count': 0 -# }, -# 'attributes': { -# 'attributeMap': { -# 'g.co/agent': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': -# 'opencensus-python [{}]'.format(__version__) -# } -# }, -# 'key': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'value' -# } -# }, -# 'key_double': { -# 'double_value': { -# 'value': 123.45 -# } -# }, -# '/http/host': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'host' -# } -# } -# } -# }, -# 'spanId': str(span_id), -# 'startTime': start_time, -# 'endTime': end_time, -# 'parentSpanId': str(parent_span_id), -# 'status': None, -# 'links': None, -# 'stackTrace': None, -# 'timeEvents': None, -# 'childSpanCount': 0, -# 'sameProcessAsParentSpan': None -# }] - -# self.assertEqual(spans, expected_traces) - -# def test_translate_common_attributes_to_stackdriver_no_map(self): -# project_id = 'PROJECT' -# client = mock.Mock() -# client.project = project_id -# exporter = trace_exporter.StackdriverExporter( -# client=client, project_id=project_id) - -# attributes = {'outer key': 'some value'} -# expected_attributes = {'outer key': 'some value'} - -# exporter.map_attributes(attributes) -# self.assertEqual(attributes, expected_attributes) - -# def test_translate_common_attributes_to_stackdriver_none(self): -# project_id = 'PROJECT' -# client = mock.Mock() -# client.project = project_id -# exporter = trace_exporter.StackdriverExporter( -# client=client, project_id=project_id) - -# # does not throw -# self.assertIsNone(exporter.map_attributes(None)) - -# def test_translate_common_attributes_to_stackdriver(self): -# project_id = 'PROJECT' -# client = mock.Mock() -# client.project = project_id -# exporter = trace_exporter.StackdriverExporter( -# client=client, project_id=project_id) - -# attributes = { -# 'outer key': 'some value', -# 'attributeMap': { -# 'key': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'value' -# } -# }, -# 'component': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'http' -# } -# }, -# 'error.message': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'error message' -# } -# }, -# 'error.name': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'error name' -# } -# }, -# 'http.host': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'host' -# } -# }, -# 'http.method': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'GET' -# } -# }, -# 'http.status_code': { -# 'int_value': { -# 'value': 200 -# } -# }, -# 'http.url': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'http://host:port/path?query' -# } -# }, -# 'http.user_agent': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'some user agent' -# } -# }, -# 'http.client_city': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'Redmond' -# } -# }, -# 'http.client_country': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'USA' -# } -# }, -# 'http.client_protocol': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'HTTP 1.1' -# } -# }, -# 'http.client_region': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'WA' -# } -# }, -# 'http.request_size': { -# 'int_value': { -# 'value': 100 -# } -# }, -# 'http.response_size': { -# 'int_value': { -# 'value': 10 -# } -# }, -# 'pid': { -# 'int_value': { -# 'value': 123456789 -# } -# }, -# 'tid': { -# 'int_value': { -# 'value': 987654321 -# } -# }, -# 'stacktrace': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'at unknown' -# } -# }, -# 'grpc.host_port': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'localhost:50051' -# } -# }, -# 'grpc.method': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'post' -# } -# } -# } -# } - -# expected_attributes = { -# 'outer key': 'some value', -# 'attributeMap': { -# 'key': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'value' -# } -# }, -# '/component': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'http' -# } -# }, -# '/error/message': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'error message' -# } -# }, -# '/error/name': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'error name' -# } -# }, -# '/http/host': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'host' -# } -# }, -# '/http/method': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'GET' -# } -# }, -# '/http/status_code': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': '200' -# } -# }, -# '/http/url': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'http://host:port/path?query' -# } -# }, -# '/http/user_agent': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'some user agent' -# } -# }, -# '/http/client_city': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'Redmond' -# } -# }, -# '/http/client_country': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'USA' -# } -# }, -# '/http/client_protocol': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'HTTP 1.1' -# } -# }, -# '/http/client_region': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'WA' -# } -# }, -# '/http/request/size': { -# 'int_value': { -# 'value': 100 -# } -# }, -# '/http/response/size': { -# 'int_value': { -# 'value': 10 -# } -# }, -# '/pid': { -# 'int_value': { -# 'value': 123456789 -# } -# }, -# '/tid': { -# 'int_value': { -# 'value': 987654321 -# } -# }, -# '/stacktrace': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'at unknown' -# } -# }, -# '/grpc/host_port': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'localhost:50051' -# } -# }, -# '/grpc/method': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'post' -# } -# } -# } -# } - -# exporter.map_attributes(attributes) -# self.assertEqual(attributes, expected_attributes) - -# def test_translate_common_attributes_status_code(self): -# project_id = 'PROJECT' -# client = mock.Mock() -# client.project = project_id -# exporter = trace_exporter.StackdriverExporter( -# client=client, project_id=project_id) - -# attributes = { -# 'outer key': 'some value', -# 'attributeMap': { -# 'http.status_code': { -# 'int_value': 200 -# } -# } -# } - -# expected_attributes = { -# 'outer key': 'some value', -# 'attributeMap': { -# '/http/status_code': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': '200' -# } -# } -# } -# } - -# exporter.map_attributes(attributes) -# self.assertEqual(attributes, expected_attributes) - - -# class Test_set_attributes_gae(unittest.TestCase): -# @mock.patch('opencensus.ext.stackdriver.trace_exporter.' -# 'monitored_resource.get_instance', -# return_value=None) -# def test_set_attributes_gae(self, mr_mock): -# import os - -# trace = {'spans': [{'attributes': {}}]} - -# expected = { -# 'attributes': { -# 'attributeMap': { -# 'g.co/gae/app/module': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'service' -# } -# }, -# 'g.co/gae/app/instance': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'flex' -# } -# }, -# 'g.co/gae/app/version': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'version' -# } -# }, -# 'g.co/gae/app/project': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'project' -# } -# }, -# 'g.co/agent': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': -# 'opencensus-python [{}]'.format(__version__) -# } -# }, -# } -# } -# } - -# with mock.patch.dict( -# os.environ, { -# trace_exporter._APPENGINE_FLEXIBLE_ENV_VM: 'vm', -# trace_exporter._APPENGINE_FLEXIBLE_ENV_FLEX: 'flex', -# 'GOOGLE_CLOUD_PROJECT': 'project', -# 'GAE_SERVICE': 'service', -# 'GAE_VERSION': 'version' -# }): -# self.assertTrue(trace_exporter.is_gae_environment()) -# trace_exporter.set_attributes(trace) - -# span = trace.get('spans')[0] -# self.assertEqual(span, expected) - - -# class TestMonitoredResourceAttributes(unittest.TestCase): -# @mock.patch('opencensus.ext.stackdriver.trace_exporter.' -# 'monitored_resource.get_instance') -# def test_monitored_resource_attributes_gke(self, gmr_mock): -# import os - -# trace = {'spans': [{'attributes': {}}]} - -# expected = { -# 'attributes': { -# 'attributeMap': { -# 'g.co/gae/app/module': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'service' -# } -# }, -# 'g.co/gae/app/instance': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'flex' -# } -# }, -# 'g.co/gae/app/version': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'version' -# } -# }, -# 'g.co/gae/app/project': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'project' -# } -# }, -# 'g.co/agent': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': -# 'opencensus-python [{}]'.format(__version__) -# } -# }, -# 'g.co/r/k8s_container/project_id': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'my_project' -# } -# }, -# 'g.co/r/k8s_container/location': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'zone1' -# } -# }, -# 'g.co/r/k8s_container/namespace_name': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'namespace' -# } -# }, -# 'g.co/r/k8s_container/pod_name': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'pod' -# } -# }, -# 'g.co/r/k8s_container/cluster_name': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'cluster' -# } -# }, -# 'g.co/r/k8s_container/container_name': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'c1' -# } -# }, -# } -# } -# } - -# mock_resource = mock.Mock() -# mock_resource.get_type.return_value = 'k8s_container' -# mock_resource.get_labels.return_value = { -# 'k8s.io/pod/name': 'pod', -# 'k8s.io/cluster/name': 'cluster', -# 'k8s.io/namespace/name': 'namespace', -# 'k8s.io/container/name': 'c1', -# 'project_id': 'my_project', -# 'zone': 'zone1' -# } -# gmr_mock.return_value = mock_resource -# with mock.patch.dict( -# os.environ, { -# trace_exporter._APPENGINE_FLEXIBLE_ENV_VM: 'vm', -# trace_exporter._APPENGINE_FLEXIBLE_ENV_FLEX: 'flex', -# 'GOOGLE_CLOUD_PROJECT': 'project', -# 'GAE_SERVICE': 'service', -# 'GAE_VERSION': 'version' -# }): -# self.assertTrue(trace_exporter.is_gae_environment()) -# trace_exporter.set_attributes(trace) - -# span = trace.get('spans')[0] -# self.assertEqual(span, expected) - -# @mock.patch('opencensus.ext.stackdriver.trace_exporter.' -# 'monitored_resource.get_instance') -# def test_monitored_resource_attributes_gce(self, gmr_mock): -# trace = {'spans': [{'attributes': {}}]} - -# expected = { -# 'attributes': { -# 'attributeMap': { -# 'g.co/agent': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': -# 'opencensus-python [{}]'.format(__version__) -# } -# }, -# 'g.co/r/gce_instance/project_id': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'my_project' -# } -# }, -# 'g.co/r/gce_instance/instance_id': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': '12345' -# } -# }, -# 'g.co/r/gce_instance/zone': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'zone1' -# } -# }, -# } -# } -# } - -# mock_resource = mock.Mock() -# mock_resource.get_type.return_value = 'gce_instance' -# mock_resource.get_labels.return_value = { -# 'project_id': 'my_project', -# 'instance_id': '12345', -# 'zone': 'zone1' -# } -# gmr_mock.return_value = mock_resource -# trace_exporter.set_attributes(trace) -# span = trace.get('spans')[0] -# self.assertEqual(span, expected) - -# @mock.patch('opencensus.ext.stackdriver.trace_exporter.' -# 'monitored_resource.get_instance') -# def test_monitored_resource_attributes_aws(self, amr_mock): -# trace = {'spans': [{'attributes': {}}]} - -# expected = { -# 'attributes': { -# 'attributeMap': { -# 'g.co/agent': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': -# 'opencensus-python [{}]'.format(__version__) -# } -# }, -# 'g.co/r/aws_ec2_instance/aws_account': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': '123456789012' -# } -# }, -# 'g.co/r/aws_ec2_instance/region': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': 'aws:us-west-2' -# } -# }, -# } -# } -# } - -# mock_resource = mock.Mock() -# mock_resource.get_type.return_value = 'aws_ec2_instance' -# mock_resource.get_labels.return_value = { -# 'aws_account': '123456789012', -# 'region': 'us-west-2' -# } -# amr_mock.return_value = mock_resource - -# trace_exporter.set_attributes(trace) -# span = trace.get('spans')[0] -# self.assertEqual(span, expected) - -# @mock.patch('opencensus.ext.stackdriver.trace_exporter.' -# 'monitored_resource.get_instance') -# def test_monitored_resource_attributes_None(self, mr_mock): -# trace = {'spans': [{'attributes': {}}]} - -# expected = { -# 'attributes': { -# 'attributeMap': { -# 'g.co/agent': { -# 'string_value': { -# 'truncated_byte_count': 0, -# 'value': -# 'opencensus-python [{}]'.format(__version__) -# } -# } -# } -# } -# } - -# mr_mock.return_value = None -# trace_exporter.set_attributes(trace) -# span = trace.get('spans')[0] -# self.assertEqual(span, expected) - -# mock_resource = mock.Mock() -# mock_resource.get_type.return_value = mock.Mock() -# mock_resource.get_labels.return_value = mock.Mock() -# mr_mock.return_value = mock_resource - -# trace_exporter.set_attributes(trace) -# span = trace.get('spans')[0] -# self.assertEqual(span, expected) - - -# class MockTransport(object): -# def __init__(self, exporter=None): -# self.export_called = False -# self.exporter = exporter - -# def export(self, trace): -# self.export_called = True +# Copyright 2017, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import mock + +from opencensus.common.version import __version__ +from opencensus.ext.stackdriver import trace_exporter +from opencensus.trace import span_context +from opencensus.trace import span_data as span_data_module + + +class _Client(object): + def __init__(self, project=None): + if project is None: + project = 'PROJECT' + + self.project = project + + +class TestStackdriverExporter(unittest.TestCase): + def test_constructor_default(self): + patch = mock.patch( + 'opencensus.ext.stackdriver.trace_exporter.Client', + new=_Client) + + with patch: + exporter = trace_exporter.StackdriverExporter() + + project_id = 'PROJECT' + self.assertEqual(exporter.project_id, project_id) + + def test_constructor_explicit(self): + client = mock.Mock() + project_id = 'PROJECT' + client.project = project_id + transport = mock.Mock() + + exporter = trace_exporter.StackdriverExporter( + client=client, project_id=project_id, transport=transport) + + self.assertIs(exporter.client, client) + self.assertEqual(exporter.project_id, project_id) + + def test_export(self): + client = mock.Mock() + project_id = 'PROJECT' + client.project = project_id + exporter = trace_exporter.StackdriverExporter( + client=client, project_id=project_id, transport=MockTransport) + exporter.export({}) + + self.assertTrue(exporter.transport.export_called) + + @mock.patch('opencensus.ext.stackdriver.trace_exporter.' + 'monitored_resource.get_instance', + return_value=None) + def test_emit(self, mr_mock): + trace_id = '6e0c63257de34c92bf9efcd03927272e' + span_datas = [ + span_data_module.SpanData( + name='span', + context=span_context.SpanContext(trace_id=trace_id), + span_id='1111', + parent_span_id=None, + attributes=None, + start_time=None, + end_time=None, + child_span_count=None, + stack_trace=None, + annotations=None, + message_events=None, + links=None, + status=None, + same_process_as_parent_span=None, + span_kind=0, + ) + ] + + stackdriver_spans = { + 'spans': [{ + 'status': + None, + 'childSpanCount': + None, + 'links': + None, + 'startTime': + None, + 'spanId': + '1111', + 'attributes': { + 'attributeMap': { + 'g.co/agent': { + 'string_value': { + 'truncated_byte_count': + 0, + 'value': + 'opencensus-python [{}]'.format(__version__) + } + } + } + }, + 'stackTrace': + None, + 'displayName': { + 'truncated_byte_count': 0, + 'value': 'span' + }, + 'name': + 'projects/PROJECT/traces/{}/spans/1111'.format(trace_id), + 'timeEvents': + None, + 'endTime': + None, + 'sameProcessAsParentSpan': + None + }] + } + + client = mock.Mock() + project_id = 'PROJECT' + client.project = project_id + + exporter = trace_exporter.StackdriverExporter( + client=client, project_id=project_id) + + exporter.emit(span_datas) + + name = 'projects/{}'.format(project_id) + + client.batch_write_spans.assert_called_with(name, stackdriver_spans) + self.assertTrue(client.batch_write_spans.called) + + @mock.patch('opencensus.ext.stackdriver.trace_exporter.' + 'monitored_resource.get_instance', + return_value=None) + def test_translate_to_stackdriver(self, mr_mock): + project_id = 'PROJECT' + trace_id = '6e0c63257de34c92bf9efcd03927272e' + span_name = 'test span' + span_id = '6e0c63257de34c92' + attributes = { + 'attributeMap': { + 'key': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'value' + } + }, + 'key_double': { + 'double_value': { + 'value': 123.45 + } + }, + 'http.host': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'host' + } + } + } + } + parent_span_id = '6e0c63257de34c93' + start_time = 'test start time' + end_time = 'test end time' + trace = { + 'spans': [{ + 'displayName': { + 'value': span_name, + 'truncated_byte_count': 0 + }, + 'spanId': + span_id, + 'startTime': + start_time, + 'endTime': + end_time, + 'parentSpanId': + parent_span_id, + 'attributes': + attributes, + 'someRandomKey': + 'this should not be included in result', + 'childSpanCount': + 0 + }], + 'traceId': + trace_id + } + + client = mock.Mock() + client.project = project_id + exporter = trace_exporter.StackdriverExporter( + client=client, project_id=project_id) + + spans = list(exporter.translate_to_stackdriver(trace)) + + expected_traces = [{ + 'name': 'projects/{}/traces/{}/spans/{}'.format( + project_id, trace_id, span_id), + 'displayName': { + 'value': span_name, + 'truncated_byte_count': 0 + }, + 'attributes': { + 'attributeMap': { + 'g.co/agent': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': + 'opencensus-python [{}]'.format(__version__) + } + }, + 'key': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'value' + } + }, + 'key_double': { + 'double_value': { + 'value': 123.45 + } + }, + '/http/host': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'host' + } + } + } + }, + 'spanId': str(span_id), + 'startTime': start_time, + 'endTime': end_time, + 'parentSpanId': str(parent_span_id), + 'status': None, + 'links': None, + 'stackTrace': None, + 'timeEvents': None, + 'childSpanCount': 0, + 'sameProcessAsParentSpan': None + }] + + self.assertEqual(spans, expected_traces) + + def test_translate_common_attributes_to_stackdriver_no_attribute_map(self): + project_id = 'PROJECT' + client = mock.Mock() + client.project = project_id + exporter = trace_exporter.StackdriverExporter( + client=client, project_id=project_id) + + attributes = {'outer key': 'some value'} + expected_attributes = {'outer key': 'some value'} + + exporter.map_attributes(attributes) + self.assertEqual(attributes, expected_attributes) + + def test_translate_common_attributes_to_stackdriver_none(self): + project_id = 'PROJECT' + client = mock.Mock() + client.project = project_id + exporter = trace_exporter.StackdriverExporter( + client=client, project_id=project_id) + + # does not throw + self.assertIsNone(exporter.map_attributes(None)) + + def test_translate_common_attributes_to_stackdriver(self): + project_id = 'PROJECT' + client = mock.Mock() + client.project = project_id + exporter = trace_exporter.StackdriverExporter( + client=client, project_id=project_id) + + attributes = { + 'outer key': 'some value', + 'attributeMap': { + 'key': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'value' + } + }, + 'component': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'http' + } + }, + 'error.message': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'error message' + } + }, + 'error.name': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'error name' + } + }, + 'http.host': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'host' + } + }, + 'http.method': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'GET' + } + }, + 'http.status_code': { + 'int_value': { + 'value': 200 + } + }, + 'http.url': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'http://host:port/path?query' + } + }, + 'http.user_agent': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'some user agent' + } + }, + 'http.client_city': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'Redmond' + } + }, + 'http.client_country': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'USA' + } + }, + 'http.client_protocol': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'HTTP 1.1' + } + }, + 'http.client_region': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'WA' + } + }, + 'http.request_size': { + 'int_value': { + 'value': 100 + } + }, + 'http.response_size': { + 'int_value': { + 'value': 10 + } + }, + 'pid': { + 'int_value': { + 'value': 123456789 + } + }, + 'tid': { + 'int_value': { + 'value': 987654321 + } + }, + 'stacktrace': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'at unknown' + } + }, + 'grpc.host_port': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'localhost:50051' + } + }, + 'grpc.method': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'post' + } + } + } + } + + expected_attributes = { + 'outer key': 'some value', + 'attributeMap': { + 'key': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'value' + } + }, + '/component': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'http' + } + }, + '/error/message': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'error message' + } + }, + '/error/name': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'error name' + } + }, + '/http/host': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'host' + } + }, + '/http/method': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'GET' + } + }, + '/http/status_code': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': '200' + } + }, + '/http/url': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'http://host:port/path?query' + } + }, + '/http/user_agent': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'some user agent' + } + }, + '/http/client_city': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'Redmond' + } + }, + '/http/client_country': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'USA' + } + }, + '/http/client_protocol': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'HTTP 1.1' + } + }, + '/http/client_region': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'WA' + } + }, + '/http/request/size': { + 'int_value': { + 'value': 100 + } + }, + '/http/response/size': { + 'int_value': { + 'value': 10 + } + }, + '/pid': { + 'int_value': { + 'value': 123456789 + } + }, + '/tid': { + 'int_value': { + 'value': 987654321 + } + }, + '/stacktrace': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'at unknown' + } + }, + '/grpc/host_port': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'localhost:50051' + } + }, + '/grpc/method': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'post' + } + } + } + } + + exporter.map_attributes(attributes) + self.assertEqual(attributes, expected_attributes) + + def test_translate_common_attributes_status_code(self): + project_id = 'PROJECT' + client = mock.Mock() + client.project = project_id + exporter = trace_exporter.StackdriverExporter( + client=client, project_id=project_id) + + attributes = { + 'outer key': 'some value', + 'attributeMap': { + 'http.status_code': { + 'int_value': 200 + } + } + } + + expected_attributes = { + 'outer key': 'some value', + 'attributeMap': { + '/http/status_code': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': '200' + } + } + } + } + + exporter.map_attributes(attributes) + self.assertEqual(attributes, expected_attributes) + + +class Test_set_attributes_gae(unittest.TestCase): + @mock.patch('opencensus.ext.stackdriver.trace_exporter.' + 'monitored_resource.get_instance', + return_value=None) + def test_set_attributes_gae(self, mr_mock): + import os + + trace = {'spans': [{'attributes': {}}]} + + expected = { + 'attributes': { + 'attributeMap': { + 'g.co/gae/app/module': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'service' + } + }, + 'g.co/gae/app/instance': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'flex' + } + }, + 'g.co/gae/app/version': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'version' + } + }, + 'g.co/gae/app/project': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'project' + } + }, + 'g.co/agent': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': + 'opencensus-python [{}]'.format(__version__) + } + }, + } + } + } + + with mock.patch.dict( + os.environ, { + trace_exporter._APPENGINE_FLEXIBLE_ENV_VM: 'vm', + trace_exporter._APPENGINE_FLEXIBLE_ENV_FLEX: 'flex', + 'GOOGLE_CLOUD_PROJECT': 'project', + 'GAE_SERVICE': 'service', + 'GAE_VERSION': 'version' + }): + self.assertTrue(trace_exporter.is_gae_environment()) + trace_exporter.set_attributes(trace) + + span = trace.get('spans')[0] + self.assertEqual(span, expected) + + +class TestMonitoredResourceAttributes(unittest.TestCase): + @mock.patch('opencensus.ext.stackdriver.trace_exporter.' + 'monitored_resource.get_instance') + def test_monitored_resource_attributes_gke(self, gmr_mock): + import os + + trace = {'spans': [{'attributes': {}}]} + + expected = { + 'attributes': { + 'attributeMap': { + 'g.co/gae/app/module': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'service' + } + }, + 'g.co/gae/app/instance': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'flex' + } + }, + 'g.co/gae/app/version': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'version' + } + }, + 'g.co/gae/app/project': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'project' + } + }, + 'g.co/agent': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': + 'opencensus-python [{}]'.format(__version__) + } + }, + 'g.co/r/k8s_container/project_id': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'my_project' + } + }, + 'g.co/r/k8s_container/location': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'zone1' + } + }, + 'g.co/r/k8s_container/namespace_name': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'namespace' + } + }, + 'g.co/r/k8s_container/pod_name': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'pod' + } + }, + 'g.co/r/k8s_container/cluster_name': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'cluster' + } + }, + 'g.co/r/k8s_container/container_name': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'c1' + } + }, + } + } + } + + mock_resource = mock.Mock() + mock_resource.get_type.return_value = 'k8s_container' + mock_resource.get_labels.return_value = { + 'k8s.io/pod/name': 'pod', + 'k8s.io/cluster/name': 'cluster', + 'k8s.io/namespace/name': 'namespace', + 'k8s.io/container/name': 'c1', + 'project_id': 'my_project', + 'zone': 'zone1' + } + gmr_mock.return_value = mock_resource + with mock.patch.dict( + os.environ, { + trace_exporter._APPENGINE_FLEXIBLE_ENV_VM: 'vm', + trace_exporter._APPENGINE_FLEXIBLE_ENV_FLEX: 'flex', + 'GOOGLE_CLOUD_PROJECT': 'project', + 'GAE_SERVICE': 'service', + 'GAE_VERSION': 'version' + }): + self.assertTrue(trace_exporter.is_gae_environment()) + trace_exporter.set_attributes(trace) + + span = trace.get('spans')[0] + self.assertEqual(span, expected) + + @mock.patch('opencensus.ext.stackdriver.trace_exporter.' + 'monitored_resource.get_instance') + def test_monitored_resource_attributes_gce(self, gmr_mock): + trace = {'spans': [{'attributes': {}}]} + + expected = { + 'attributes': { + 'attributeMap': { + 'g.co/agent': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': + 'opencensus-python [{}]'.format(__version__) + } + }, + 'g.co/r/gce_instance/project_id': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'my_project' + } + }, + 'g.co/r/gce_instance/instance_id': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': '12345' + } + }, + 'g.co/r/gce_instance/zone': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'zone1' + } + }, + } + } + } + + mock_resource = mock.Mock() + mock_resource.get_type.return_value = 'gce_instance' + mock_resource.get_labels.return_value = { + 'project_id': 'my_project', + 'instance_id': '12345', + 'zone': 'zone1' + } + gmr_mock.return_value = mock_resource + trace_exporter.set_attributes(trace) + span = trace.get('spans')[0] + self.assertEqual(span, expected) + + @mock.patch('opencensus.ext.stackdriver.trace_exporter.' + 'monitored_resource.get_instance') + def test_monitored_resource_attributes_aws(self, amr_mock): + trace = {'spans': [{'attributes': {}}]} + + expected = { + 'attributes': { + 'attributeMap': { + 'g.co/agent': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': + 'opencensus-python [{}]'.format(__version__) + } + }, + 'g.co/r/aws_ec2_instance/aws_account': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': '123456789012' + } + }, + 'g.co/r/aws_ec2_instance/region': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': 'aws:us-west-2' + } + }, + } + } + } + + mock_resource = mock.Mock() + mock_resource.get_type.return_value = 'aws_ec2_instance' + mock_resource.get_labels.return_value = { + 'aws_account': '123456789012', + 'region': 'us-west-2' + } + amr_mock.return_value = mock_resource + + trace_exporter.set_attributes(trace) + span = trace.get('spans')[0] + self.assertEqual(span, expected) + + @mock.patch('opencensus.ext.stackdriver.trace_exporter.' + 'monitored_resource.get_instance') + def test_monitored_resource_attributes_None(self, mr_mock): + trace = {'spans': [{'attributes': {}}]} + + expected = { + 'attributes': { + 'attributeMap': { + 'g.co/agent': { + 'string_value': { + 'truncated_byte_count': 0, + 'value': + 'opencensus-python [{}]'.format(__version__) + } + } + } + } + } + + mr_mock.return_value = None + trace_exporter.set_attributes(trace) + span = trace.get('spans')[0] + self.assertEqual(span, expected) + + mock_resource = mock.Mock() + mock_resource.get_type.return_value = mock.Mock() + mock_resource.get_labels.return_value = mock.Mock() + mr_mock.return_value = mock_resource + + trace_exporter.set_attributes(trace) + span = trace.get('spans')[0] + self.assertEqual(span, expected) + + +class MockTransport(object): + def __init__(self, exporter=None): + self.export_called = False + self.exporter = exporter + + def export(self, trace): + self.export_called = True diff --git a/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_stats.py b/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_stats.py index db9eb66e1..fd3b517c6 100644 --- a/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_stats.py +++ b/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_stats.py @@ -1,1391 +1,1390 @@ -# flake8: noqa -# # Copyright 2018, OpenCensus Authors -# # -# # Licensed under the Apache License, Version 2.0 (the "License"); -# # you may not use this file except in compliance with the License. -# # You may obtain a copy of the License at -# # -# # http://www.apache.org/licenses/LICENSE-2.0 -# # -# # Unless required by applicable law or agreed to in writing, software -# # distributed under the License is distributed on an "AS IS" BASIS, -# # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# # See the License for the specific language governing permissions and -# # limitations under the License. - -# import unittest -# from datetime import datetime - -# import google.auth -# import mock -# from google.cloud import monitoring_v3 - -# from opencensus.common import utils -# from opencensus.common.version import __version__ -# from opencensus.ext.stackdriver import stats_exporter as stackdriver -# from opencensus.metrics import label_key, label_value -# from opencensus.metrics import transport as transport_module -# from opencensus.metrics.export import ( -# metric, -# metric_descriptor, -# point, -# time_series, -# value, -# ) -# from opencensus.stats import aggregation as aggregation_module -# from opencensus.stats import aggregation_data as aggregation_data_module -# from opencensus.stats import execution_context -# from opencensus.stats import measure as measure_module -# from opencensus.stats import metric_utils -# from opencensus.stats import stats as stats_module -# from opencensus.stats import view as view_module -# from opencensus.stats import view_data as view_data_module -# from opencensus.tags import tag_key as tag_key_module -# from opencensus.tags import tag_map as tag_map_module -# from opencensus.tags import tag_value as tag_value_module - -# MiB = 1 << 20 -# FRONTEND_KEY = tag_key_module.TagKey("my.org/keys/frontend") -# FRONTEND_KEY_FLOAT = tag_key_module.TagKey("my.org/keys/frontend-FLOAT") -# FRONTEND_KEY_INT = tag_key_module.TagKey("my.org/keys/frontend-INT") -# FRONTEND_KEY_STR = tag_key_module.TagKey("my.org/keys/frontend-STR") - -# FRONTEND_KEY_CLEAN = "my_org_keys_frontend" -# FRONTEND_KEY_FLOAT_CLEAN = "my_org_keys_frontend_FLOAT" -# FRONTEND_KEY_INT_CLEAN = "my_org_keys_frontend_INT" -# FRONTEND_KEY_STR_CLEAN = "my_org_keys_frontend_STR" - -# VIDEO_SIZE_MEASURE = measure_module.MeasureFloat( -# "my.org/measure/video_size_test2", "size of processed videos", "By") -# VIDEO_SIZE_MEASURE_2 = measure_module.MeasureFloat( -# "my.org/measure/video_size_test_2", "size of processed videos", "By") - -# VIDEO_SIZE_MEASURE_FLOAT = measure_module.MeasureFloat( -# "my.org/measure/video_size_test-float", "size of processed videos-float", -# "By") - -# VIDEO_SIZE_VIEW_NAME = "my.org/views/video_size_test2" -# VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation( -# [16.0 * MiB, 256.0 * MiB]) -# VIDEO_SIZE_VIEW = view_module.View( -# VIDEO_SIZE_VIEW_NAME, "processed video size over time", [FRONTEND_KEY], -# VIDEO_SIZE_MEASURE, VIDEO_SIZE_DISTRIBUTION) - -# TEST_TIME = datetime(2018, 12, 25, 1, 2, 3, 4) -# TEST_TIME_STR = utils.to_iso_str(TEST_TIME) - - -# class _Client(object): -# def __init__(self, client_info=None): -# self.client_info = client_info - - -# class TestOptions(unittest.TestCase): -# def test_options_blank(self): -# options = stackdriver.Options() - -# self.assertEqual(options.project_id, "") -# self.assertEqual(options.resource, "") - -# def test_options_parameters(self): -# options = stackdriver.Options( -# project_id="project-id", metric_prefix="sample") -# self.assertEqual(options.project_id, "project-id") -# self.assertEqual(options.metric_prefix, "sample") - -# def test_default_monitoring_labels(self): -# options = stackdriver.Options(default_monitoring_labels={ -# label_key.LabelKey('lk_key', 'lk_desc'): -# label_value.LabelValue('lk_value') -# }) - -# self.assertEqual(len(options.default_monitoring_labels), 1) -# [[lk, lv]] = options.default_monitoring_labels.items() -# self.assertEqual(lk.key, 'lk_key') -# self.assertEqual(lk.description, 'lk_desc') -# self.assertEqual(lv.value, 'lk_value') - -# def test_default_monitoring_labels_blank(self): -# with mock.patch('opencensus.ext.stackdriver.stats_exporter' -# '.get_task_value') as mock_gtv: -# options = stackdriver.Options() - -# mock_gtv.assert_called() - -# self.assertEqual(len(options.default_monitoring_labels), 1) -# [[lk, lv]] = options.default_monitoring_labels.items() -# self.assertEqual(lk.key, stackdriver.OPENCENSUS_TASK) -# self.assertEqual(lk.description, -# stackdriver.OPENCENSUS_TASK_DESCRIPTION) -# self.assertEqual(lv.value, mock_gtv()) - -# def test_bad_default_monitoring_labels(self): -# with self.assertRaises(AttributeError): -# stackdriver.Options( -# default_monitoring_labels=[ -# 'not a dict' -# ]) - -# with self.assertRaises(TypeError): -# stackdriver.Options( -# default_monitoring_labels={ -# 'bad key': -# label_value.LabelValue('clk_value') -# }) - -# with self.assertRaises(TypeError): -# stackdriver.Options( -# default_monitoring_labels={ -# label_key.LabelKey('clk_key', 'clk_desc'): -# 'bad value' -# }) - - -# class TestStackdriverStatsExporter(unittest.TestCase): -# def test_constructor(self): -# exporter = stackdriver.StackdriverStatsExporter() - -# self.assertIsNone(exporter.client) - -# def test_constructor_param(self): -# exporter = stackdriver.StackdriverStatsExporter( -# options=stackdriver.Options(project_id=1)) -# self.assertEqual(exporter.options.project_id, 1) - -# def test_null_options(self): -# # Check that we don't suppress auth errors -# auth_error = google.auth.exceptions.DefaultCredentialsError -# mock_auth_error = mock.Mock() -# mock_auth_error.side_effect = auth_error -# with mock.patch('opencensus.ext.stackdriver.stats_exporter' -# '.google.auth.default', mock_auth_error): -# with self.assertRaises(auth_error): -# stackdriver.new_stats_exporter() - -# # Check that we get the default credentials' project ID -# mock_auth_ok = mock.Mock() -# mock_auth_ok.return_value = (None, 123) -# with mock.patch('opencensus.ext.stackdriver.stats_exporter' -# '.google.auth.default', mock_auth_ok): -# sdse = stackdriver.new_stats_exporter() -# self.assertEqual(sdse.options.project_id, 123) - -# # Check that we raise if auth works but the project is empty -# mock_auth_no_project = mock.Mock() -# mock_auth_no_project.return_value = (None, '') -# with mock.patch('opencensus.ext.stackdriver.stats_exporter' -# '.google.auth.default', mock_auth_no_project): -# with self.assertRaises(ValueError): -# stackdriver.new_stats_exporter() - -# def test_blank_project(self): -# self.assertRaises(ValueError, stackdriver.new_stats_exporter, -# stackdriver.Options(project_id="")) - -# def test_not_blank_project(self): -# patch_client = mock.patch( -# ('opencensus.ext.stackdriver.stats_exporter' -# '.monitoring_v3.MetricServiceClient'), _Client) - -# with patch_client: -# exporter_created = stackdriver.new_stats_exporter( -# stackdriver.Options(project_id=1)) - -# self.assertIsInstance(exporter_created, -# stackdriver.StackdriverStatsExporter) - -# def test_get_user_agent_slug(self): -# self.assertIn(__version__, stackdriver.get_user_agent_slug()) - -# def test_client_info_user_agent(self): -# """Check that the monitoring client sets a user agent. - -# The user agent should include the library version. Note that this -# assumes MetricServiceClient calls ClientInfo.to_user_agent to attach -# the user agent as metadata to metric service API calls. -# """ -# patch_client = mock.patch( -# 'opencensus.ext.stackdriver.stats_exporter.monitoring_v3' -# '.MetricServiceClient', _Client) - -# with patch_client: -# exporter = stackdriver.new_stats_exporter( -# stackdriver.Options(project_id=1)) - -# self.assertIn(stackdriver.get_user_agent_slug(), -# exporter.client.client_info.to_user_agent()) - -# def test_sanitize(self): -# # empty -# result = stackdriver.sanitize_label("") -# self.assertEqual(result, "") - -# # all invalid -# result = stackdriver.sanitize_label("/*^#$") -# self.assertEqual(result, "key_") - -# # all valid -# result = stackdriver.sanitize_label("abc") -# self.assertEqual(result, "abc") - -# # mixed -# result = stackdriver.sanitize_label("a.b/c") -# self.assertEqual(result, "a_b_c") - -# # starts with '_' -# result = stackdriver.sanitize_label("_abc") -# self.assertEqual(result, "key_abc") - -# # starts with digit -# result = stackdriver.sanitize_label("0abc") -# self.assertEqual(result, "key_0abc") - -# # too long -# result = stackdriver.sanitize_label("0123456789" * 10) -# self.assertEqual(len(result), 100) -# self.assertEqual(result, "key_" + "0123456789" * 9 + "012345") - -# def test_get_task_value(self): -# task_value = stackdriver.get_task_value() -# self.assertNotEqual(task_value, "") - -# def test_namespaced_views(self): -# view_name = "view-1" -# expected_view_name_namespaced = ( -# "custom.googleapis.com/opencensus/{}".format(view_name)) -# view_name_namespaced = stackdriver.namespaced_view_name(view_name, "") -# self.assertEqual(expected_view_name_namespaced, view_name_namespaced) - -# expected_view_name_namespaced = "kubernetes.io/myorg/%s" % view_name -# view_name_namespaced = stackdriver.namespaced_view_name( -# view_name, "kubernetes.io/myorg") -# self.assertEqual(expected_view_name_namespaced, view_name_namespaced) - -# def test_stackdriver_register_exporter(self): -# stats = stats_module.stats -# view_manager = stats.view_manager - -# exporter = mock.Mock() -# if len(view_manager.measure_to_view_map.exporters) > 0: -# view_manager.unregister_exporter( -# view_manager.measure_to_view_map.exporters[0]) -# view_manager.register_exporter(exporter) - -# registered_exporters = len(view_manager.measure_to_view_map.exporters) - -# self.assertEqual(registered_exporters, 1) - -# @mock.patch('os.getpid', return_value=12345) -# @mock.patch( -# 'platform.uname', -# return_value=('system', 'node', 'release', 'version', 'machine', -# 'processor')) -# def test_get_task_value_with_hostname(self, mock_uname, mock_pid): -# self.assertEqual(stackdriver.get_task_value(), "py-12345@node") - -# @mock.patch('os.getpid', return_value=12345) -# @mock.patch( -# 'platform.uname', -# return_value=('system', '', 'release', 'version', 'machine', -# 'processor')) -# def test_get_task_value_without_hostname(self, mock_uname, mock_pid): -# self.assertEqual(stackdriver.get_task_value(), "py-12345@localhost") - -# def test_default_default_monitoring_labels(self): -# """Check that metrics include OC task label by default.""" -# exporter = stackdriver.StackdriverStatsExporter( -# options=stackdriver.Options(project_id='project_id'), -# client=mock.Mock()) - -# lv = label_value.LabelValue('val') -# val = value.ValueLong(value=123) -# dt = datetime(2019, 3, 20, 21, 34, 0, 537954) -# pp = point.Point(value=val, timestamp=dt) -# ts = [ -# time_series.TimeSeries(label_values=[lv], points=[pp], -# start_timestamp=utils.to_iso_str(dt)) -# ] - -# desc = metric_descriptor.MetricDescriptor( -# name='name', -# description='description', -# unit='unit', -# type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, -# label_keys=[label_key.LabelKey('key', 'description')] -# ) -# mm = metric.Metric(descriptor=desc, time_series=ts) - -# sd_md = exporter.get_metric_descriptor(desc) -# self.assertEqual(len(sd_md.labels), 2) -# sd_descriptors = {ld.key: ld.description for ld in sd_md.labels} -# self.assertIn('key', sd_descriptors) -# self.assertEqual(sd_descriptors['key'], 'description') -# self.assertIn(stackdriver.OPENCENSUS_TASK, sd_descriptors) -# self.assertEqual( -# sd_descriptors[stackdriver.OPENCENSUS_TASK], -# stackdriver.OPENCENSUS_TASK_DESCRIPTION -# ) - -# sd_ts_list = exporter.create_time_series_list(mm) -# self.assertEqual(len(sd_ts_list), 1) -# [sd_ts] = sd_ts_list -# self.assertIn('key', sd_ts.metric.labels) -# self.assertEqual(sd_ts.metric.labels['key'], 'val') -# self.assertIn(stackdriver.OPENCENSUS_TASK, sd_ts.metric.labels) - -# def test_empty_default_monitoring_labels(self): -# """Check that it's possible to remove the default OC task label.""" -# exporter = stackdriver.StackdriverStatsExporter( -# options=stackdriver.Options( -# project_id='project_id', -# default_monitoring_labels={}), -# client=mock.Mock()) - -# lv = label_value.LabelValue('val') -# val = value.ValueLong(value=123) -# dt = datetime(2019, 3, 20, 21, 34, 0, 537954) -# pp = point.Point(value=val, timestamp=dt) -# ts = [ -# time_series.TimeSeries(label_values=[lv], points=[pp], -# start_timestamp=utils.to_iso_str(dt)) -# ] - -# desc = metric_descriptor.MetricDescriptor( -# name='name', -# description='description', -# unit='unit', -# type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, -# label_keys=[label_key.LabelKey('key', 'description')] -# ) -# mm = metric.Metric(descriptor=desc, time_series=ts) - -# sd_md = exporter.get_metric_descriptor(desc) -# self.assertEqual(len(sd_md.labels), 1) -# [sd_label] = sd_md.labels -# self.assertEqual(sd_label.key, 'key') -# self.assertEqual(sd_label.description, 'description') - -# sd_ts_list = exporter.create_time_series_list(mm) -# self.assertEqual(len(sd_ts_list), 1) -# [sd_ts] = sd_ts_list -# self.assertIn('key', sd_ts.metric.labels) -# self.assertEqual(sd_ts.metric.labels['key'], 'val') -# self.assertNotIn(stackdriver.OPENCENSUS_TASK, sd_ts.metric.labels) - -# def test_custom_default_monitoring_labels(self): -# """Check that custom labels are exported and included in descriptor.""" -# exporter = stackdriver.StackdriverStatsExporter( -# options=stackdriver.Options( -# project_id='project_id', -# default_monitoring_labels={ -# label_key.LabelKey('clk_key', 'clk_desc'): -# label_value.LabelValue('clk_value') -# }), -# client=mock.Mock()) - -# lv = label_value.LabelValue('val') -# val = value.ValueLong(value=123) -# dt = datetime(2019, 3, 20, 21, 34, 0, 537954) -# pp = point.Point(value=val, timestamp=dt) -# ts = [ -# time_series.TimeSeries(label_values=[lv], points=[pp], -# start_timestamp=utils.to_iso_str(dt)) -# ] - -# desc = metric_descriptor.MetricDescriptor( -# name='name', -# description='description', -# unit='unit', -# type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, -# label_keys=[label_key.LabelKey('key', 'description')] -# ) -# mm = metric.Metric(descriptor=desc, time_series=ts) - -# sd_md = exporter.get_metric_descriptor(desc) -# self.assertEqual(len(sd_md.labels), 2) -# sd_descriptors = {ld.key: ld.description for ld in sd_md.labels} -# self.assertIn('key', sd_descriptors) -# self.assertEqual(sd_descriptors['key'], 'description') -# self.assertIn('clk_key', sd_descriptors) -# self.assertEqual(sd_descriptors['clk_key'], 'clk_desc') - -# sd_ts_list = exporter.create_time_series_list(mm) -# self.assertEqual(len(sd_ts_list), 1) -# [sd_ts] = sd_ts_list -# self.assertIn('key', sd_ts.metric.labels) -# self.assertEqual(sd_ts.metric.labels['key'], 'val') -# self.assertIn('clk_key', sd_ts.metric.labels) -# self.assertEqual(sd_ts.metric.labels['clk_key'], 'clk_value') - -# def test_get_metric_descriptor(self): -# exporter = stackdriver.StackdriverStatsExporter( -# options=stackdriver.Options( -# project_id='project_id'), -# client=mock.Mock()) - -# oc_md = metric_descriptor.MetricDescriptor( -# name='name', -# description='description', -# unit='unit', -# type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, -# label_keys=[label_key.LabelKey('ck', 'cd')] -# ) - -# sd_md = exporter.get_metric_descriptor(oc_md) -# self.assertEqual( -# sd_md.metric_kind, -# monitoring_v3.enums.MetricDescriptor.MetricKind.GAUGE) -# self.assertEqual( -# sd_md.value_type, -# monitoring_v3.enums.MetricDescriptor.ValueType.INT64) - -# self.assertIsInstance(sd_md, monitoring_v3.types.MetricDescriptor) -# exporter.client.create_metric_descriptor.assert_not_called() - -# def test_get_metric_descriptor_bad_type(self): -# exporter = stackdriver.StackdriverStatsExporter( -# options=stackdriver.Options(project_id='project_id'), -# client=mock.Mock()) - -# bad_type_oc_md = metric_descriptor.MetricDescriptor( -# name='name', -# description='description', -# unit='unit', -# # Need a valid type to create the descriptor -# type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, -# label_keys=[label_key.LabelKey('key', 'description')] -# ) -# bad_type_oc_md._type = 100 - -# with self.assertRaises(TypeError): -# exporter.get_metric_descriptor(bad_type_oc_md) - -# def test_get_metric_descriptor_custom_prefix(self): - -# exporter = stackdriver.StackdriverStatsExporter( -# options=stackdriver.Options( -# metric_prefix='metric_prefix', -# project_id='project_id'), -# client=mock.Mock()) - -# oc_md = metric_descriptor.MetricDescriptor( -# name='name', -# description='description', -# unit='unit', -# type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, -# label_keys=[label_key.LabelKey('ck', 'cd')] -# ) - -# sd_md = exporter.get_metric_descriptor(oc_md) -# self.assertIn('metric_prefix', sd_md.type) -# self.assertIn('metric_prefix', sd_md.name) - -# def test_register_metric_descriptor(self): -# exporter = stackdriver.StackdriverStatsExporter( -# options=stackdriver.Options( -# metric_prefix='metric_prefix', -# project_id='project_id'), -# client=mock.Mock()) - -# oc_md = metric_descriptor.MetricDescriptor( -# name='name', -# description='description', -# unit='unit', -# type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, -# label_keys=[label_key.LabelKey('key', 'description')] -# ) - -# exporter.register_metric_descriptor(oc_md) -# self.assertEqual( -# exporter.client.create_metric_descriptor.call_count, -# 1 -# ) -# exporter.register_metric_descriptor(oc_md) -# self.assertEqual( -# exporter.client.create_metric_descriptor.call_count, -# 1 -# ) - -# def test_export_metrics(self): -# lv = label_value.LabelValue('val') -# val = value.ValueLong(value=123) -# dt = datetime(2019, 3, 20, 21, 34, 0, 537954) -# pp = point.Point(value=val, timestamp=dt) - -# ts = [ -# time_series.TimeSeries(label_values=[lv], points=[pp], -# start_timestamp=utils.to_iso_str(dt)) -# ] - -# desc = metric_descriptor.MetricDescriptor( -# name='name', -# description='description', -# unit='unit', -# type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, -# label_keys=[label_key.LabelKey('key', 'description')] -# ) - -# mm = metric.Metric(descriptor=desc, time_series=ts) - -# exporter = stackdriver.StackdriverStatsExporter(client=mock.Mock()) -# exporter.export_metrics([mm]) - -# self.assertEqual(exporter.client.create_time_series.call_count, 1) -# sd_args = exporter.client.create_time_series.call_args[0][1] -# self.assertEqual(len(sd_args), 1) -# [sd_arg] = exporter.client.create_time_series.call_args[0][1] -# self.assertEqual(sd_arg.points[0].value.int64_value, 123) - - -# class MockPeriodicMetricTask(object): -# """Testing mock of metrics.transport.PeriodicMetricTask. - -# Simulate calling export asynchronously from another thread synchronously -# from this one. -# """ -# def __init__(self, interval=None, function=None, args=None, kwargs=None): -# self.function = function -# self.logger = mock.Mock() -# self.start = mock.Mock() -# self.run = mock.Mock() - -# def step(self): -# try: -# self.function() -# except transport_module.TransportError as ex: -# self.logger.exception(ex) -# self.cancel() -# except Exception: -# self.logger.exception("Error handling metric export") - - -# class MockGetExporterThread(object): -# """Intercept calls to get_exporter_thread. - -# To get a reference to the running PeriodicMetricTask created by -# get_exporter_thread. -# """ -# def __init__(self): -# self.transport = None - -# def __enter__(self): -# original_func = transport_module.get_exporter_thread - -# def get_exporter_thread(*aa, **kw): -# self.transport = original_func(*aa, **kw) - -# mock_get = mock.Mock() -# mock_get.side_effect = get_exporter_thread -# self.patcher = mock.patch( -# ('opencensus.ext.stackdriver.stats_exporter' -# '.transport.get_exporter_thread'), -# mock_get) -# self.patcher.start() -# return self - -# def __exit__(self, type, value, traceback): -# self.patcher.stop() - - -# @mock.patch('opencensus.ext.stackdriver.stats_exporter' -# '.monitoring_v3.MetricServiceClient') -# @mock.patch('opencensus.ext.stackdriver.stats_exporter' -# '.stats.stats') -# class TestAsyncStatsExport(unittest.TestCase): -# """Check that metrics are exported using the exporter thread.""" - -# def setUp(self): -# patcher = mock.patch( -# 'opencensus.metrics.transport.PeriodicMetricTask', -# MockPeriodicMetricTask) -# patcher.start() -# self.addCleanup(patcher.stop) - -# def test_export_empty(self, mock_stats, mock_client): -# """Check that we don't attempt to export empty metric sets.""" - -# mock_stats.get_metrics.return_value = [] - -# with MockGetExporterThread() as mget: -# exporter = stackdriver.new_stats_exporter( -# stackdriver.Options(project_id=1)) -# mget.transport.step() - -# exporter.client.create_metric_descriptor.assert_not_called() -# exporter.client.create_time_series.assert_not_called() - -# def test_export_single_metric(self, mock_stats, mock_client): -# """Check that we can export a set of a single metric.""" - -# lv = label_value.LabelValue('val') -# val = value.ValueLong(value=123) -# dt = datetime(2019, 3, 20, 21, 34, 0, 537954) -# pp = point.Point(value=val, timestamp=dt) - -# ts = [ -# time_series.TimeSeries(label_values=[lv], points=[pp], -# start_timestamp=utils.to_iso_str(dt)) -# ] - -# desc = metric_descriptor.MetricDescriptor( -# name='name2', -# description='description2', -# unit='unit2', -# type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, -# label_keys=[label_key.LabelKey('key', 'description')] -# ) - -# mm = metric.Metric(descriptor=desc, time_series=ts) -# mock_stats.get_metrics.return_value = [mm] - -# with MockGetExporterThread() as mget: -# exporter = stackdriver.new_stats_exporter( -# stackdriver.Options(project_id=1)) -# mget.transport.step() - -# exporter.client.create_metric_descriptor.assert_called() -# self.assertEqual( -# exporter.client.create_metric_descriptor.call_count, -# 1) -# md_call_arg =\ -# exporter.client.create_metric_descriptor.call_args[0][1] -# self.assertEqual( -# md_call_arg.metric_kind, -# monitoring_v3.enums.MetricDescriptor.MetricKind.GAUGE -# ) -# self.assertEqual( -# md_call_arg.value_type, -# monitoring_v3.enums.MetricDescriptor.ValueType.INT64 -# ) - -# exporter.client.create_time_series.assert_called() -# self.assertEqual( -# exporter.client.create_time_series.call_count, -# 1) -# ts_call_arg = exporter.client.create_time_series.call_args[0][1] -# self.assertEqual(len(ts_call_arg), 1) -# self.assertEqual(len(ts_call_arg[0].points), 1) -# self.assertEqual(ts_call_arg[0].points[0].value.int64_value, 123) - - -# class TestCreateTimeseries(unittest.TestCase): - -# def setUp(self): -# patcher = mock.patch( -# 'opencensus.ext.stackdriver.stats_exporter.stats.stats', -# stats_module._Stats()) -# patcher.start() -# self.addCleanup(patcher.stop) - -# def check_labels(self, -# actual_labels, -# expected_labels, -# include_opencensus=False): -# actual_labels = dict(actual_labels) -# if include_opencensus: -# opencensus_tag = actual_labels.pop(stackdriver.OPENCENSUS_TASK) -# self.assertIsNotNone(opencensus_tag) -# self.assertIn("py-", opencensus_tag) -# self.assertDictEqual(actual_labels, expected_labels) - -# @mock.patch('opencensus.ext.stackdriver.stats_exporter.' -# 'monitored_resource.get_instance', -# return_value=None) -# def test_create_batched_time_series(self, monitor_resource_mock): -# client = mock.Mock() -# v_data = view_data_module.ViewData( -# view=VIDEO_SIZE_VIEW, -# start_time=TEST_TIME_STR, -# end_time=TEST_TIME_STR) -# v_data.record(context=tag_map_module.TagMap(), value=2, timestamp=None) -# view_data = [v_data] - -# option = stackdriver.Options(project_id="project-test") -# exporter = stackdriver.StackdriverStatsExporter( -# options=option, client=client) - -# view_data = [metric_utils.view_data_to_metric(view_data[0], TEST_TIME)] - -# time_series_batches = exporter.create_batched_time_series(view_data, 1) - -# self.assertEqual(len(time_series_batches), 1) -# [time_series_batch] = time_series_batches -# self.assertEqual(len(time_series_batch), 1) -# [time_series] = time_series_batch -# self.assertEqual( -# time_series.metric.type, -# 'custom.googleapis.com/opencensus/' + VIDEO_SIZE_VIEW_NAME) -# self.check_labels( -# time_series.metric.labels, {}, include_opencensus=True) - -# @mock.patch('opencensus.ext.stackdriver.stats_exporter.' -# 'monitored_resource.get_instance', -# return_value=None) -# def test_create_batched_time_series_with_many(self, monitor_resource_mock): -# client = mock.Mock() - -# # First view with 3 -# view_name1 = "view-name1" -# view1 = view_module.View(view_name1, "test description", ['test'], -# VIDEO_SIZE_MEASURE, -# aggregation_module.LastValueAggregation()) -# v_data1 = view_data_module.ViewData( -# view=view1, start_time=TEST_TIME_STR, end_time=TEST_TIME_STR) -# v_data1.record(context=tag_map_module.TagMap({'test': '1'}), value=7, -# timestamp=None) -# v_data1.record(context=tag_map_module.TagMap({'test': '2'}), value=5, -# timestamp=None) -# v_data1.record(context=tag_map_module.TagMap({'test': '3'}), value=3, -# timestamp=None) - -# # Second view with 2 -# view_name2 = "view-name2" -# view2 = view_module.View(view_name2, "test description", ['test'], -# VIDEO_SIZE_MEASURE, -# aggregation_module.LastValueAggregation()) -# v_data2 = view_data_module.ViewData( -# view=view2, start_time=TEST_TIME_STR, end_time=TEST_TIME_STR) -# v_data2.record(context=tag_map_module.TagMap({'test': '1'}), value=7, -# timestamp=None) -# v_data2.record(context=tag_map_module.TagMap({'test': '2'}), value=5, -# timestamp=None) - -# view_data = [v_data1, v_data2] -# view_data = [metric_utils.view_data_to_metric(vd, TEST_TIME) -# for vd in view_data] - -# option = stackdriver.Options(project_id="project-test") -# exporter = stackdriver.StackdriverStatsExporter( -# options=option, client=client) - -# time_series_batches = exporter.create_batched_time_series(view_data, 2) - -# self.assertEqual(len(time_series_batches), 3) -# [tsb1, tsb2, tsb3] = time_series_batches -# self.assertEqual(len(tsb1), 2) -# self.assertEqual(len(tsb2), 2) -# self.assertEqual(len(tsb3), 1) - -# def setup_create_timeseries_test(self): -# client = mock.Mock() -# execution_context.clear() - -# option = stackdriver.Options( -# project_id="project-test", resource="global") -# exporter = stackdriver.StackdriverStatsExporter( -# options=option, client=client) - -# stats = stats_module.stats -# view_manager = stats.view_manager -# stats_recorder = stats.stats_recorder - -# if len(view_manager.measure_to_view_map.exporters) > 0: -# view_manager.unregister_exporter( -# view_manager.measure_to_view_map.exporters[0]) - -# view_manager.register_exporter(exporter) -# return view_manager, stats_recorder, exporter - -# @mock.patch('opencensus.ext.stackdriver.stats_exporter.' -# 'monitored_resource.get_instance', -# return_value=None) -# def test_create_timeseries(self, monitor_resource_mock): -# view_manager, stats_recorder, exporter = \ -# self.setup_create_timeseries_test() - -# view_manager.register_view(VIDEO_SIZE_VIEW) - -# tag_value = tag_value_module.TagValue("1200") -# tag_map = tag_map_module.TagMap() -# tag_map.insert(FRONTEND_KEY, tag_value) - -# measure_map = stats_recorder.new_measurement_map() -# measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) -# measure_map.record(tag_map) - -# v_data = measure_map.measure_to_view_map.get_view( -# VIDEO_SIZE_VIEW_NAME, None) - -# v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) - -# time_series_list = exporter.create_time_series_list(v_data) - -# self.assertEqual(len(time_series_list), 1) -# time_series = time_series_list[0] -# self.assertEqual(time_series.resource.type, "global") -# self.assertEqual( -# time_series_list[0].metric.type, -# "custom.googleapis.com/opencensus/my.org/views/video_size_test2") -# self.check_labels( -# time_series.metric.labels, {FRONTEND_KEY_CLEAN: "1200"}, -# include_opencensus=True) -# self.assertIsNotNone(time_series.resource) - -# self.assertEqual(len(time_series.points), 1) -# value = time_series.points[0].value -# self.assertEqual(value.distribution_value.count, 1) - -# time_series_list = exporter.create_time_series_list(v_data) - -# self.assertEqual(len(time_series_list), 1) -# time_series = time_series_list[0] -# self.check_labels( -# time_series.metric.labels, {FRONTEND_KEY_CLEAN: "1200"}, -# include_opencensus=True) -# self.assertIsNotNone(time_series.resource) - -# self.assertEqual(len(time_series.points), 1) -# value = time_series.points[0].value -# self.assertEqual(value.distribution_value.count, 1) - -# @mock.patch('opencensus.ext.stackdriver.stats_exporter.' -# 'monitored_resource.get_instance') -# def test_create_timeseries_with_resource(self, monitor_resource_mock): - -# client = mock.Mock() -# execution_context.clear() - -# option = stackdriver.Options(project_id="project-test", resource="") -# exporter = stackdriver.StackdriverStatsExporter( -# options=option, client=client) - -# stats = stats_module.stats -# view_manager = stats.view_manager -# stats_recorder = stats.stats_recorder - -# if len(view_manager.measure_to_view_map.exporters) > 0: -# view_manager.unregister_exporter( -# view_manager.measure_to_view_map.exporters[0]) - -# view_manager.register_exporter(exporter) -# view_manager.register_view(VIDEO_SIZE_VIEW) - -# tag_value = tag_value_module.TagValue("1200") -# tag_map = tag_map_module.TagMap() -# tag_map.insert(FRONTEND_KEY, tag_value) - -# measure_map = stats_recorder.new_measurement_map() -# measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) -# measure_map.record(tag_map) - -# v_data = measure_map.measure_to_view_map.get_view( -# VIDEO_SIZE_VIEW_NAME, None) - -# v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) - -# # check for gce_instance monitored resource -# mocked_labels = { -# 'instance_id': 'my-instance', -# 'project_id': 'my-project', -# 'zone': 'us-east1', -# 'k8s.io/pod/name': 'localhost', -# 'k8s.io/namespace/name': 'namespace', -# } - -# mock_resource = mock.Mock() -# mock_resource.get_type.return_value = 'gce_instance' -# mock_resource.get_labels.return_value = mocked_labels -# monitor_resource_mock.return_value = mock_resource - -# time_series_list = exporter.create_time_series_list(v_data) -# self.assertEqual(len(time_series_list), 1) -# time_series = time_series_list[0] -# self.assertEqual(time_series.resource.type, "gce_instance") -# self.check_labels( -# time_series.resource.labels, { -# 'instance_id': 'my-instance', -# 'project_id': 'my-project', -# 'zone': 'us-east1', -# }) -# self.assertEqual( -# time_series.metric.type, -# "custom.googleapis.com/opencensus/my.org/views/video_size_test2") -# self.assertIsNotNone(time_series) - -# time_series_list = exporter.create_time_series_list(v_data) -# self.assertEqual(len(time_series_list), 1) -# time_series = time_series_list[0] - -# self.assertEqual( -# time_series.metric.type, -# "custom.googleapis.com/opencensus/my.org/views/video_size_test2") - -# # check for k8s_container monitored resource -# mocked_labels = { -# 'instance_id': 'my-instance', -# 'project_id': 'my-project', -# 'zone': 'us-east1', -# 'k8s.io/pod/name': 'localhost', -# 'k8s.io/cluster/name': 'cluster', -# 'k8s.io/namespace/name': 'namespace', -# } - -# mock_resource = mock.Mock() -# mock_resource.get_type.return_value = 'k8s_container' -# mock_resource.get_labels.return_value = mocked_labels -# monitor_resource_mock.return_value = mock_resource - -# time_series_list = exporter.create_time_series_list(v_data) -# self.assertEqual(len(time_series_list), 1) -# time_series = time_series_list[0] -# self.assertEqual(time_series.resource.type, "k8s_container") -# self.check_labels( -# time_series.resource.labels, { -# 'project_id': 'my-project', -# 'location': 'us-east1', -# 'cluster_name': 'cluster', -# 'pod_name': 'localhost', -# 'namespace_name': 'namespace', -# }) -# self.assertEqual( -# time_series.metric.type, -# "custom.googleapis.com/opencensus/my.org/views/video_size_test2") -# self.assertIsNotNone(time_series) - -# # check for aws_ec2_instance monitored resource -# mocked_labels = { -# 'instance_id': 'my-instance', -# 'aws_account': 'my-project', -# 'region': 'us-east1', -# } - -# mock_resource = mock.Mock() -# mock_resource.get_type.return_value = 'aws_ec2_instance' -# mock_resource.get_labels.return_value = mocked_labels -# monitor_resource_mock.return_value = mock_resource - -# time_series_list = exporter.create_time_series_list(v_data) -# self.assertEqual(len(time_series_list), 1) -# time_series = time_series_list[0] -# self.assertEqual(time_series.resource.type, "aws_ec2_instance") -# self.check_labels( -# time_series.resource.labels, { -# 'instance_id': 'my-instance', -# 'aws_account': 'my-project', -# 'region': 'aws:us-east1', -# }) -# self.assertEqual( -# time_series.metric.type, -# "custom.googleapis.com/opencensus/my.org/views/video_size_test2") -# self.assertIsNotNone(time_series) - -# # check for out of box monitored resource -# mock_resource = mock.Mock() -# mock_resource.get_type.return_value = '' -# mock_resource.get_labels.return_value = mock.Mock() -# monitor_resource_mock.return_value = mock_resource - -# time_series_list = exporter.create_time_series_list(v_data) -# self.assertEqual(len(time_series_list), 1) -# time_series = time_series_list[0] -# self.assertEqual(time_series.resource.type, 'global') -# self.check_labels(time_series.resource.labels, {}) -# self.assertEqual( -# time_series.metric.type, -# "custom.googleapis.com/opencensus/my.org/views/video_size_test2") -# self.assertIsNotNone(time_series) - -# @mock.patch('opencensus.ext.stackdriver.stats_exporter.' -# 'monitored_resource.get_instance', -# return_value=None) -# def test_create_timeseries_str_tagvalue(self, monitor_resource_mock): -# view_manager, stats_recorder, exporter = \ -# self.setup_create_timeseries_test() - -# agg_1 = aggregation_module.LastValueAggregation(value=2) -# view_name1 = "view-name1" -# new_view1 = view_module.View( -# view_name1, "processed video size over time", [FRONTEND_KEY_INT], -# VIDEO_SIZE_MEASURE_2, agg_1) - -# view_manager.register_view(new_view1) - -# tag_value_int = tag_value_module.TagValue("Abc") -# tag_map = tag_map_module.TagMap() -# tag_map.insert(FRONTEND_KEY_INT, tag_value_int) - -# measure_map = stats_recorder.new_measurement_map() -# measure_map.measure_int_put(VIDEO_SIZE_MEASURE_2, 25 * MiB) -# measure_map.record(tag_map) - -# v_data = measure_map.measure_to_view_map.get_view(view_name1, None) - -# v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) - -# time_series_list = exporter.create_time_series_list(v_data) -# self.assertEqual(len(time_series_list), 1) -# time_series = time_series_list[0] - -# self.check_labels( -# time_series.metric.labels, {FRONTEND_KEY_INT_CLEAN: "Abc"}, -# include_opencensus=True) -# self.assertIsNotNone(time_series.resource) - -# self.assertEqual(len(time_series.points), 1) -# expected_value = monitoring_v3.types.TypedValue() -# # TODO: #565 -# expected_value.double_value = 25.0 * MiB -# self.assertEqual(time_series.points[0].value, expected_value) - -# @mock.patch('opencensus.ext.stackdriver.stats_exporter.' -# 'monitored_resource.get_instance', -# return_value=None) -# def test_create_timeseries_str_tagvalue_count_aggregtation( -# self, monitor_resource_mock): -# view_manager, stats_recorder, exporter = \ -# self.setup_create_timeseries_test() - -# agg_1 = aggregation_module.CountAggregation(count=2) -# view_name1 = "view-name1" -# new_view1 = view_module.View( -# view_name1, "processed video size over time", [FRONTEND_KEY_INT], -# VIDEO_SIZE_MEASURE_2, agg_1) - -# view_manager.register_view(new_view1) - -# tag_value_int = tag_value_module.TagValue("Abc") -# tag_map = tag_map_module.TagMap() -# tag_map.insert(FRONTEND_KEY_INT, tag_value_int) - -# measure_map = stats_recorder.new_measurement_map() -# measure_map.measure_int_put(VIDEO_SIZE_MEASURE_2, 25 * MiB) -# measure_map.record(tag_map) - -# v_data = measure_map.measure_to_view_map.get_view(view_name1, None) - -# v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) - -# time_series_list = exporter.create_time_series_list(v_data) -# self.assertEqual(len(time_series_list), 1) -# time_series = time_series_list[0] -# self.check_labels( -# time_series.metric.labels, {FRONTEND_KEY_INT_CLEAN: "Abc"}, -# include_opencensus=True) -# self.assertIsNotNone(time_series.resource) - -# self.assertEqual(len(time_series.points), 1) -# expected_value = monitoring_v3.types.TypedValue() -# expected_value.int64_value = 3 -# self.assertEqual(time_series.points[0].value, expected_value) - -# @mock.patch('opencensus.ext.stackdriver.stats_exporter.' -# 'monitored_resource.get_instance', -# return_value=None) -# def test_create_timeseries_last_value_float_tagvalue( -# self, monitor_resource_mock): -# view_manager, stats_recorder, exporter = \ -# self.setup_create_timeseries_test() - -# agg_2 = aggregation_module.LastValueAggregation(value=2.2 * MiB) -# view_name2 = "view-name2" -# new_view2 = view_module.View( -# view_name2, "processed video size over time", [FRONTEND_KEY_FLOAT], -# VIDEO_SIZE_MEASURE_FLOAT, agg_2) - -# view_manager.register_view(new_view2) - -# tag_value_float = tag_value_module.TagValue("Abc") -# tag_map = tag_map_module.TagMap() -# tag_map.insert(FRONTEND_KEY_FLOAT, tag_value_float) - -# measure_map = stats_recorder.new_measurement_map() -# measure_map.measure_float_put(VIDEO_SIZE_MEASURE_FLOAT, 25.7 * MiB) -# measure_map.record(tag_map) - -# v_data = measure_map.measure_to_view_map.get_view(view_name2, None) - -# v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) - -# time_series_list = exporter.create_time_series_list(v_data) -# self.assertEqual(len(time_series_list), 1) -# time_series = time_series_list[0] -# self.check_labels( -# time_series.metric.labels, {FRONTEND_KEY_FLOAT_CLEAN: "Abc"}, -# include_opencensus=True) -# self.assertIsNotNone(time_series.resource) - -# self.assertEqual(len(time_series.points), 1) -# expected_value = monitoring_v3.types.TypedValue() -# expected_value.double_value = 25.7 * MiB -# self.assertEqual(time_series.points[0].value, expected_value) - -# @mock.patch('opencensus.ext.stackdriver.stats_exporter.' -# 'monitored_resource.get_instance', -# return_value=None) -# def test_create_timeseries_float_tagvalue(self, monitor_resource_mock): -# client = mock.Mock() - -# option = stackdriver.Options( -# project_id="project-test", resource="global") -# exporter = stackdriver.StackdriverStatsExporter( -# options=option, client=client) - -# stats = stats_module.stats -# view_manager = stats.view_manager -# stats_recorder = stats.stats_recorder - -# if len(view_manager.measure_to_view_map.exporters) > 0: -# view_manager.unregister_exporter( -# view_manager.measure_to_view_map.exporters[0]) - -# view_manager.register_exporter(exporter) - -# agg_3 = aggregation_module.SumAggregation(sum=2.2) -# view_name3 = "view-name3" -# new_view3 = view_module.View( -# view_name3, "processed video size over time", [FRONTEND_KEY_FLOAT], -# VIDEO_SIZE_MEASURE_FLOAT, agg_3) - -# view_manager.register_view(new_view3) - -# tag_value_float = tag_value_module.TagValue("1200") -# tag_map = tag_map_module.TagMap() -# tag_map.insert(FRONTEND_KEY_FLOAT, tag_value_float) - -# measure_map = stats_recorder.new_measurement_map() -# measure_map.measure_float_put(VIDEO_SIZE_MEASURE_FLOAT, 25 * MiB) -# measure_map.record(tag_map) - -# v_data = measure_map.measure_to_view_map.get_view(view_name3, None) - -# v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) - -# time_series_list = exporter.create_time_series_list(v_data) -# self.assertEqual(len(time_series_list), 1) -# [time_series] = time_series_list -# self.assertEqual(time_series.metric.type, -# "custom.googleapis.com/opencensus/view-name3") -# self.check_labels( -# time_series.metric.labels, {FRONTEND_KEY_FLOAT_CLEAN: "1200"}, -# include_opencensus=True) -# self.assertIsNotNone(time_series.resource) - -# self.assertEqual(len(time_series.points), 1) -# expected_value = monitoring_v3.types.TypedValue() -# expected_value.double_value = 2.2 + 25 * MiB -# self.assertEqual(time_series.points[0].value, expected_value) - -# @mock.patch('opencensus.ext.stackdriver.stats_exporter.' -# 'monitored_resource.get_instance', -# return_value=None) -# def test_create_timeseries_multiple_tag_values(self, -# monitoring_resoure_mock): -# view_manager, stats_recorder, exporter = \ -# self.setup_create_timeseries_test() - -# view_manager.register_view(VIDEO_SIZE_VIEW) - -# measure_map = stats_recorder.new_measurement_map() - -# # Add first point with one tag value -# tag_map = tag_map_module.TagMap() -# tag_map.insert(FRONTEND_KEY, tag_value_module.TagValue("1200")) -# measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) -# measure_map.record(tag_map) - -# # Add second point with different tag value -# tag_map = tag_map_module.TagMap() -# tag_map.insert(FRONTEND_KEY, tag_value_module.TagValue("1400")) -# measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 12 * MiB) -# measure_map.record(tag_map) - -# v_data = measure_map.measure_to_view_map.get_view( -# VIDEO_SIZE_VIEW_NAME, None) - -# v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) - -# time_series_list = exporter.create_time_series_list(v_data) - -# self.assertEqual(len(time_series_list), 2) -# ts_by_frontend = { -# ts.metric.labels.get(FRONTEND_KEY_CLEAN): ts -# for ts in time_series_list -# } -# self.assertEqual(set(ts_by_frontend.keys()), {"1200", "1400"}) -# ts1 = ts_by_frontend["1200"] -# ts2 = ts_by_frontend["1400"] - -# # Verify first time series -# self.assertEqual(ts1.resource.type, "global") -# self.assertEqual( -# ts1.metric.type, -# "custom.googleapis.com/opencensus/my.org/views/video_size_test2") -# self.assertIsNotNone(ts1.resource) - -# self.assertEqual(len(ts1.points), 1) -# value1 = ts1.points[0].value -# self.assertEqual(value1.distribution_value.count, 1) - -# # Verify second time series -# self.assertEqual(ts2.resource.type, "global") -# self.assertEqual( -# ts2.metric.type, -# "custom.googleapis.com/opencensus/my.org/views/video_size_test2") -# self.assertIsNotNone(ts2.resource) - -# self.assertEqual(len(ts2.points), 1) -# value2 = ts2.points[0].value -# self.assertEqual(value2.distribution_value.count, 1) - -# @mock.patch('opencensus.ext.stackdriver.stats_exporter.' -# 'monitored_resource.get_instance', -# return_value=None) -# def test_create_timeseries_disjoint_tags(self, monitoring_resoure_mock): -# view_manager, stats_recorder, exporter = \ -# self.setup_create_timeseries_test() - -# # Register view with two tags -# view_name = "view-name" -# view = view_module.View(view_name, "test description", -# [FRONTEND_KEY, FRONTEND_KEY_FLOAT], -# VIDEO_SIZE_MEASURE, -# aggregation_module.SumAggregation()) - -# view_manager.register_view(view) - -# # Add point with one tag in common and one different tag -# measure_map = stats_recorder.new_measurement_map() -# tag_map = tag_map_module.TagMap() -# tag_map.insert(FRONTEND_KEY, tag_value_module.TagValue("1200")) -# tag_map.insert(FRONTEND_KEY_STR, tag_value_module.TagValue("1800")) -# measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) -# measure_map.record(tag_map) - -# v_data = measure_map.measure_to_view_map.get_view(view_name, None) - -# v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) - -# time_series_list = exporter.create_time_series_list(v_data) - -# self.assertEqual(len(time_series_list), 1) -# [time_series] = time_series_list - -# # Verify first time series -# self.assertEqual(time_series.resource.type, "global") -# self.assertEqual(time_series.metric.type, -# "custom.googleapis.com/opencensus/" + view_name) -# self.check_labels( -# time_series.metric.labels, {FRONTEND_KEY_CLEAN: "1200"}, -# include_opencensus=True) -# self.assertIsNotNone(time_series.resource) - -# self.assertEqual(len(time_series.points), 1) -# expected_value = monitoring_v3.types.TypedValue() -# # TODO: #565 -# expected_value.double_value = 25.0 * MiB -# self.assertEqual(time_series.points[0].value, expected_value) - -# def test_create_timeseries_from_distribution(self): -# """Check for explicit 0-bound bucket for SD export.""" -# agg = aggregation_module.DistributionAggregation() - -# view = view_module.View( -# name="example.org/test_view", -# description="example.org/test_view", -# columns=['tag_key'], -# measure=mock.Mock(), -# aggregation=agg, -# ) - -# v_data = view_data_module.ViewData( -# view=view, -# start_time=TEST_TIME_STR, -# end_time=TEST_TIME_STR, -# ) - -# # Aggregation over (10 * range(10)) for buckets [2, 4, 6, 8] -# dad = aggregation_data_module.DistributionAggregationData( -# mean_data=4.5, -# count_data=100, -# sum_of_sqd_deviations=825, -# counts_per_bucket=[20, 20, 20, 20, 20], -# bounds=[2, 4, 6, 8], -# exemplars={mock.Mock() for ii in range(5)} -# ) -# v_data._tag_value_aggregation_data_map = {('tag_value',): dad} - -# v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) - -# exporter = stackdriver.StackdriverStatsExporter() -# time_series_list = exporter.create_time_series_list(v_data) -# self.assertEqual(len(time_series_list), 1) -# [time_series] = time_series_list - -# self.check_labels( -# time_series.metric.labels, {'tag_key': 'tag_value'}, -# include_opencensus=True) -# self.assertEqual(len(time_series.points), 1) -# [point] = time_series.points -# dv = point.value.distribution_value -# self.assertEqual(100, dv.count) -# self.assertEqual(825.0, dv.sum_of_squared_deviation) -# self.assertEqual([0, 20, 20, 20, 20, 20], dv.bucket_counts) -# self.assertEqual([0, 2, 4, 6, 8], -# dv.bucket_options.explicit_buckets.bounds) - -# def test_create_timeseries_multiple_tags(self): -# """Check that exporter creates timeseries for multiple tag values. - -# create_time_series_list should return a time series for each set of -# values in the tag value aggregation map. -# """ -# agg = aggregation_module.CountAggregation() - -# view = view_module.View( -# name="example.org/test_view", -# description="example.org/test_view", -# columns=[tag_key_module.TagKey('color'), -# tag_key_module.TagKey('shape')], -# measure=mock.Mock(), -# aggregation=agg, -# ) - -# v_data = view_data_module.ViewData( -# view=view, -# start_time=TEST_TIME_STR, -# end_time=TEST_TIME_STR, -# ) - -# rs_count = aggregation_data_module.CountAggregationData(10) -# bc_count = aggregation_data_module.CountAggregationData(20) -# v_data._tag_value_aggregation_data_map = { -# ('red', 'square'): rs_count, -# ('blue', 'circle'): bc_count, -# } - -# v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) - -# exporter = stackdriver.StackdriverStatsExporter() -# time_series_list = exporter.create_time_series_list(v_data) - -# self.assertEqual(len(time_series_list), 2) -# self.assertEqual(len(time_series_list[0].points), 1) -# self.assertEqual(len(time_series_list[1].points), 1) - -# ts_by_color = {ts.metric.labels.get('color'): ts -# for ts in time_series_list} -# rs_ts = ts_by_color['red'] -# bc_ts = ts_by_color['blue'] -# self.assertEqual(rs_ts.metric.labels.get('shape'), 'square') -# self.assertEqual(bc_ts.metric.labels.get('shape'), 'circle') -# self.assertEqual(rs_ts.points[0].value.int64_value, 10) -# self.assertEqual(bc_ts.points[0].value.int64_value, 20) - -# def test_create_timeseries_invalid_aggregation(self): -# v_data = mock.Mock(spec=view_data_module.ViewData) -# v_data.view.name = "example.org/base_view" -# v_data.view.columns = [tag_key_module.TagKey('base_key')] -# v_data.start_time = TEST_TIME_STR -# v_data.end_time = TEST_TIME_STR - -# base_data = None -# v_data.tag_value_aggregation_data_map = { -# (None,): base_data, -# } - -# exporter = stackdriver.StackdriverStatsExporter( -# options=mock.Mock(), -# client=mock.Mock(), -# ) -# self.assertRaises(TypeError, exporter.create_time_series_list, v_data, -# "", "") +# Copyright 2018, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +from datetime import datetime + +import google.auth +import mock +from google.cloud import monitoring_v3 + +from opencensus.common import utils +from opencensus.common.version import __version__ +from opencensus.ext.stackdriver import stats_exporter as stackdriver +from opencensus.metrics import label_key, label_value +from opencensus.metrics import transport as transport_module +from opencensus.metrics.export import ( + metric, + metric_descriptor, + point, + time_series, + value, +) +from opencensus.stats import aggregation as aggregation_module +from opencensus.stats import aggregation_data as aggregation_data_module +from opencensus.stats import execution_context +from opencensus.stats import measure as measure_module +from opencensus.stats import metric_utils +from opencensus.stats import stats as stats_module +from opencensus.stats import view as view_module +from opencensus.stats import view_data as view_data_module +from opencensus.tags import tag_key as tag_key_module +from opencensus.tags import tag_map as tag_map_module +from opencensus.tags import tag_value as tag_value_module + +MiB = 1 << 20 +FRONTEND_KEY = tag_key_module.TagKey("my.org/keys/frontend") +FRONTEND_KEY_FLOAT = tag_key_module.TagKey("my.org/keys/frontend-FLOAT") +FRONTEND_KEY_INT = tag_key_module.TagKey("my.org/keys/frontend-INT") +FRONTEND_KEY_STR = tag_key_module.TagKey("my.org/keys/frontend-STR") + +FRONTEND_KEY_CLEAN = "my_org_keys_frontend" +FRONTEND_KEY_FLOAT_CLEAN = "my_org_keys_frontend_FLOAT" +FRONTEND_KEY_INT_CLEAN = "my_org_keys_frontend_INT" +FRONTEND_KEY_STR_CLEAN = "my_org_keys_frontend_STR" + +VIDEO_SIZE_MEASURE = measure_module.MeasureFloat( + "my.org/measure/video_size_test2", "size of processed videos", "By") +VIDEO_SIZE_MEASURE_2 = measure_module.MeasureFloat( + "my.org/measure/video_size_test_2", "size of processed videos", "By") + +VIDEO_SIZE_MEASURE_FLOAT = measure_module.MeasureFloat( + "my.org/measure/video_size_test-float", "size of processed videos-float", + "By") + +VIDEO_SIZE_VIEW_NAME = "my.org/views/video_size_test2" +VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation( + [16.0 * MiB, 256.0 * MiB]) +VIDEO_SIZE_VIEW = view_module.View( + VIDEO_SIZE_VIEW_NAME, "processed video size over time", [FRONTEND_KEY], + VIDEO_SIZE_MEASURE, VIDEO_SIZE_DISTRIBUTION) + +TEST_TIME = datetime(2018, 12, 25, 1, 2, 3, 4) +TEST_TIME_STR = utils.to_iso_str(TEST_TIME) + + +class _Client(object): + def __init__(self, client_info=None): + self.client_info = client_info + + +class TestOptions(unittest.TestCase): + def test_options_blank(self): + options = stackdriver.Options() + + self.assertEqual(options.project_id, "") + self.assertEqual(options.resource, "") + + def test_options_parameters(self): + options = stackdriver.Options( + project_id="project-id", metric_prefix="sample") + self.assertEqual(options.project_id, "project-id") + self.assertEqual(options.metric_prefix, "sample") + + def test_default_monitoring_labels(self): + options = stackdriver.Options(default_monitoring_labels={ + label_key.LabelKey('lk_key', 'lk_desc'): + label_value.LabelValue('lk_value') + }) + + self.assertEqual(len(options.default_monitoring_labels), 1) + [[lk, lv]] = options.default_monitoring_labels.items() + self.assertEqual(lk.key, 'lk_key') + self.assertEqual(lk.description, 'lk_desc') + self.assertEqual(lv.value, 'lk_value') + + def test_default_monitoring_labels_blank(self): + with mock.patch('opencensus.ext.stackdriver.stats_exporter' + '.get_task_value') as mock_gtv: + options = stackdriver.Options() + + mock_gtv.assert_called() + + self.assertEqual(len(options.default_monitoring_labels), 1) + [[lk, lv]] = options.default_monitoring_labels.items() + self.assertEqual(lk.key, stackdriver.OPENCENSUS_TASK) + self.assertEqual(lk.description, + stackdriver.OPENCENSUS_TASK_DESCRIPTION) + self.assertEqual(lv.value, mock_gtv()) + + def test_bad_default_monitoring_labels(self): + with self.assertRaises(AttributeError): + stackdriver.Options( + default_monitoring_labels=[ + 'not a dict' + ]) + + with self.assertRaises(TypeError): + stackdriver.Options( + default_monitoring_labels={ + 'bad key': + label_value.LabelValue('clk_value') + }) + + with self.assertRaises(TypeError): + stackdriver.Options( + default_monitoring_labels={ + label_key.LabelKey('clk_key', 'clk_desc'): + 'bad value' + }) + + +class TestStackdriverStatsExporter(unittest.TestCase): + def test_constructor(self): + exporter = stackdriver.StackdriverStatsExporter() + + self.assertIsNone(exporter.client) + + def test_constructor_param(self): + exporter = stackdriver.StackdriverStatsExporter( + options=stackdriver.Options(project_id=1)) + self.assertEqual(exporter.options.project_id, 1) + + def test_null_options(self): + # Check that we don't suppress auth errors + auth_error = google.auth.exceptions.DefaultCredentialsError + mock_auth_error = mock.Mock() + mock_auth_error.side_effect = auth_error + with mock.patch('opencensus.ext.stackdriver.stats_exporter' + '.google.auth.default', mock_auth_error): + with self.assertRaises(auth_error): + stackdriver.new_stats_exporter() + + # Check that we get the default credentials' project ID + mock_auth_ok = mock.Mock() + mock_auth_ok.return_value = (None, 123) + with mock.patch('opencensus.ext.stackdriver.stats_exporter' + '.google.auth.default', mock_auth_ok): + sdse = stackdriver.new_stats_exporter() + self.assertEqual(sdse.options.project_id, 123) + + # Check that we raise if auth works but the project is empty + mock_auth_no_project = mock.Mock() + mock_auth_no_project.return_value = (None, '') + with mock.patch('opencensus.ext.stackdriver.stats_exporter' + '.google.auth.default', mock_auth_no_project): + with self.assertRaises(ValueError): + stackdriver.new_stats_exporter() + + def test_blank_project(self): + self.assertRaises(ValueError, stackdriver.new_stats_exporter, + stackdriver.Options(project_id="")) + + def test_not_blank_project(self): + patch_client = mock.patch( + ('opencensus.ext.stackdriver.stats_exporter' + '.monitoring_v3.MetricServiceClient'), _Client) + + with patch_client: + exporter_created = stackdriver.new_stats_exporter( + stackdriver.Options(project_id=1)) + + self.assertIsInstance(exporter_created, + stackdriver.StackdriverStatsExporter) + + def test_get_user_agent_slug(self): + self.assertIn(__version__, stackdriver.get_user_agent_slug()) + + def test_client_info_user_agent(self): + """Check that the monitoring client sets a user agent. + + The user agent should include the library version. Note that this + assumes MetricServiceClient calls ClientInfo.to_user_agent to attach + the user agent as metadata to metric service API calls. + """ + patch_client = mock.patch( + 'opencensus.ext.stackdriver.stats_exporter.monitoring_v3' + '.MetricServiceClient', _Client) + + with patch_client: + exporter = stackdriver.new_stats_exporter( + stackdriver.Options(project_id=1)) + + self.assertIn(stackdriver.get_user_agent_slug(), + exporter.client.client_info.to_user_agent()) + + def test_sanitize(self): + # empty + result = stackdriver.sanitize_label("") + self.assertEqual(result, "") + + # all invalid + result = stackdriver.sanitize_label("/*^#$") + self.assertEqual(result, "key_") + + # all valid + result = stackdriver.sanitize_label("abc") + self.assertEqual(result, "abc") + + # mixed + result = stackdriver.sanitize_label("a.b/c") + self.assertEqual(result, "a_b_c") + + # starts with '_' + result = stackdriver.sanitize_label("_abc") + self.assertEqual(result, "key_abc") + + # starts with digit + result = stackdriver.sanitize_label("0abc") + self.assertEqual(result, "key_0abc") + + # too long + result = stackdriver.sanitize_label("0123456789" * 10) + self.assertEqual(len(result), 100) + self.assertEqual(result, "key_" + "0123456789" * 9 + "012345") + + def test_get_task_value(self): + task_value = stackdriver.get_task_value() + self.assertNotEqual(task_value, "") + + def test_namespaced_views(self): + view_name = "view-1" + expected_view_name_namespaced = ( + "custom.googleapis.com/opencensus/{}".format(view_name)) + view_name_namespaced = stackdriver.namespaced_view_name(view_name, "") + self.assertEqual(expected_view_name_namespaced, view_name_namespaced) + + expected_view_name_namespaced = "kubernetes.io/myorg/%s" % view_name + view_name_namespaced = stackdriver.namespaced_view_name( + view_name, "kubernetes.io/myorg") + self.assertEqual(expected_view_name_namespaced, view_name_namespaced) + + def test_stackdriver_register_exporter(self): + stats = stats_module.stats + view_manager = stats.view_manager + + exporter = mock.Mock() + if len(view_manager.measure_to_view_map.exporters) > 0: + view_manager.unregister_exporter( + view_manager.measure_to_view_map.exporters[0]) + view_manager.register_exporter(exporter) + + registered_exporters = len(view_manager.measure_to_view_map.exporters) + + self.assertEqual(registered_exporters, 1) + + @mock.patch('os.getpid', return_value=12345) + @mock.patch( + 'platform.uname', + return_value=('system', 'node', 'release', 'version', 'machine', + 'processor')) + def test_get_task_value_with_hostname(self, mock_uname, mock_pid): + self.assertEqual(stackdriver.get_task_value(), "py-12345@node") + + @mock.patch('os.getpid', return_value=12345) + @mock.patch( + 'platform.uname', + return_value=('system', '', 'release', 'version', 'machine', + 'processor')) + def test_get_task_value_without_hostname(self, mock_uname, mock_pid): + self.assertEqual(stackdriver.get_task_value(), "py-12345@localhost") + + def test_default_default_monitoring_labels(self): + """Check that metrics include OC task label by default.""" + exporter = stackdriver.StackdriverStatsExporter( + options=stackdriver.Options(project_id='project_id'), + client=mock.Mock()) + + lv = label_value.LabelValue('val') + val = value.ValueLong(value=123) + dt = datetime(2019, 3, 20, 21, 34, 0, 537954) + pp = point.Point(value=val, timestamp=dt) + ts = [ + time_series.TimeSeries(label_values=[lv], points=[pp], + start_timestamp=utils.to_iso_str(dt)) + ] + + desc = metric_descriptor.MetricDescriptor( + name='name', + description='description', + unit='unit', + type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, + label_keys=[label_key.LabelKey('key', 'description')] + ) + mm = metric.Metric(descriptor=desc, time_series=ts) + + sd_md = exporter.get_metric_descriptor(desc) + self.assertEqual(len(sd_md.labels), 2) + sd_descriptors = {ld.key: ld.description for ld in sd_md.labels} + self.assertIn('key', sd_descriptors) + self.assertEqual(sd_descriptors['key'], 'description') + self.assertIn(stackdriver.OPENCENSUS_TASK, sd_descriptors) + self.assertEqual( + sd_descriptors[stackdriver.OPENCENSUS_TASK], + stackdriver.OPENCENSUS_TASK_DESCRIPTION + ) + + sd_ts_list = exporter.create_time_series_list(mm) + self.assertEqual(len(sd_ts_list), 1) + [sd_ts] = sd_ts_list + self.assertIn('key', sd_ts.metric.labels) + self.assertEqual(sd_ts.metric.labels['key'], 'val') + self.assertIn(stackdriver.OPENCENSUS_TASK, sd_ts.metric.labels) + + def test_empty_default_monitoring_labels(self): + """Check that it's possible to remove the default OC task label.""" + exporter = stackdriver.StackdriverStatsExporter( + options=stackdriver.Options( + project_id='project_id', + default_monitoring_labels={}), + client=mock.Mock()) + + lv = label_value.LabelValue('val') + val = value.ValueLong(value=123) + dt = datetime(2019, 3, 20, 21, 34, 0, 537954) + pp = point.Point(value=val, timestamp=dt) + ts = [ + time_series.TimeSeries(label_values=[lv], points=[pp], + start_timestamp=utils.to_iso_str(dt)) + ] + + desc = metric_descriptor.MetricDescriptor( + name='name', + description='description', + unit='unit', + type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, + label_keys=[label_key.LabelKey('key', 'description')] + ) + mm = metric.Metric(descriptor=desc, time_series=ts) + + sd_md = exporter.get_metric_descriptor(desc) + self.assertEqual(len(sd_md.labels), 1) + [sd_label] = sd_md.labels + self.assertEqual(sd_label.key, 'key') + self.assertEqual(sd_label.description, 'description') + + sd_ts_list = exporter.create_time_series_list(mm) + self.assertEqual(len(sd_ts_list), 1) + [sd_ts] = sd_ts_list + self.assertIn('key', sd_ts.metric.labels) + self.assertEqual(sd_ts.metric.labels['key'], 'val') + self.assertNotIn(stackdriver.OPENCENSUS_TASK, sd_ts.metric.labels) + + def test_custom_default_monitoring_labels(self): + """Check that custom labels are exported and included in descriptor.""" + exporter = stackdriver.StackdriverStatsExporter( + options=stackdriver.Options( + project_id='project_id', + default_monitoring_labels={ + label_key.LabelKey('clk_key', 'clk_desc'): + label_value.LabelValue('clk_value') + }), + client=mock.Mock()) + + lv = label_value.LabelValue('val') + val = value.ValueLong(value=123) + dt = datetime(2019, 3, 20, 21, 34, 0, 537954) + pp = point.Point(value=val, timestamp=dt) + ts = [ + time_series.TimeSeries(label_values=[lv], points=[pp], + start_timestamp=utils.to_iso_str(dt)) + ] + + desc = metric_descriptor.MetricDescriptor( + name='name', + description='description', + unit='unit', + type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, + label_keys=[label_key.LabelKey('key', 'description')] + ) + mm = metric.Metric(descriptor=desc, time_series=ts) + + sd_md = exporter.get_metric_descriptor(desc) + self.assertEqual(len(sd_md.labels), 2) + sd_descriptors = {ld.key: ld.description for ld in sd_md.labels} + self.assertIn('key', sd_descriptors) + self.assertEqual(sd_descriptors['key'], 'description') + self.assertIn('clk_key', sd_descriptors) + self.assertEqual(sd_descriptors['clk_key'], 'clk_desc') + + sd_ts_list = exporter.create_time_series_list(mm) + self.assertEqual(len(sd_ts_list), 1) + [sd_ts] = sd_ts_list + self.assertIn('key', sd_ts.metric.labels) + self.assertEqual(sd_ts.metric.labels['key'], 'val') + self.assertIn('clk_key', sd_ts.metric.labels) + self.assertEqual(sd_ts.metric.labels['clk_key'], 'clk_value') + + def test_get_metric_descriptor(self): + exporter = stackdriver.StackdriverStatsExporter( + options=stackdriver.Options( + project_id='project_id'), + client=mock.Mock()) + + oc_md = metric_descriptor.MetricDescriptor( + name='name', + description='description', + unit='unit', + type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, + label_keys=[label_key.LabelKey('ck', 'cd')] + ) + + sd_md = exporter.get_metric_descriptor(oc_md) + self.assertEqual( + sd_md.metric_kind, + monitoring_v3.enums.MetricDescriptor.MetricKind.GAUGE) + self.assertEqual( + sd_md.value_type, + monitoring_v3.enums.MetricDescriptor.ValueType.INT64) + + self.assertIsInstance(sd_md, monitoring_v3.types.MetricDescriptor) + exporter.client.create_metric_descriptor.assert_not_called() + + def test_get_metric_descriptor_bad_type(self): + exporter = stackdriver.StackdriverStatsExporter( + options=stackdriver.Options(project_id='project_id'), + client=mock.Mock()) + + bad_type_oc_md = metric_descriptor.MetricDescriptor( + name='name', + description='description', + unit='unit', + # Need a valid type to create the descriptor + type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, + label_keys=[label_key.LabelKey('key', 'description')] + ) + bad_type_oc_md._type = 100 + + with self.assertRaises(TypeError): + exporter.get_metric_descriptor(bad_type_oc_md) + + def test_get_metric_descriptor_custom_prefix(self): + + exporter = stackdriver.StackdriverStatsExporter( + options=stackdriver.Options( + metric_prefix='metric_prefix', + project_id='project_id'), + client=mock.Mock()) + + oc_md = metric_descriptor.MetricDescriptor( + name='name', + description='description', + unit='unit', + type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, + label_keys=[label_key.LabelKey('ck', 'cd')] + ) + + sd_md = exporter.get_metric_descriptor(oc_md) + self.assertIn('metric_prefix', sd_md.type) + self.assertIn('metric_prefix', sd_md.name) + + def test_register_metric_descriptor(self): + exporter = stackdriver.StackdriverStatsExporter( + options=stackdriver.Options( + metric_prefix='metric_prefix', + project_id='project_id'), + client=mock.Mock()) + + oc_md = metric_descriptor.MetricDescriptor( + name='name', + description='description', + unit='unit', + type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, + label_keys=[label_key.LabelKey('key', 'description')] + ) + + exporter.register_metric_descriptor(oc_md) + self.assertEqual( + exporter.client.create_metric_descriptor.call_count, + 1 + ) + exporter.register_metric_descriptor(oc_md) + self.assertEqual( + exporter.client.create_metric_descriptor.call_count, + 1 + ) + + def test_export_metrics(self): + lv = label_value.LabelValue('val') + val = value.ValueLong(value=123) + dt = datetime(2019, 3, 20, 21, 34, 0, 537954) + pp = point.Point(value=val, timestamp=dt) + + ts = [ + time_series.TimeSeries(label_values=[lv], points=[pp], + start_timestamp=utils.to_iso_str(dt)) + ] + + desc = metric_descriptor.MetricDescriptor( + name='name', + description='description', + unit='unit', + type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, + label_keys=[label_key.LabelKey('key', 'description')] + ) + + mm = metric.Metric(descriptor=desc, time_series=ts) + + exporter = stackdriver.StackdriverStatsExporter(client=mock.Mock()) + exporter.export_metrics([mm]) + + self.assertEqual(exporter.client.create_time_series.call_count, 1) + sd_args = exporter.client.create_time_series.call_args[0][1] + self.assertEqual(len(sd_args), 1) + [sd_arg] = exporter.client.create_time_series.call_args[0][1] + self.assertEqual(sd_arg.points[0].value.int64_value, 123) + + +class MockPeriodicMetricTask(object): + """Testing mock of metrics.transport.PeriodicMetricTask. + + Simulate calling export asynchronously from another thread synchronously + from this one. + """ + def __init__(self, interval=None, function=None, args=None, kwargs=None): + self.function = function + self.logger = mock.Mock() + self.start = mock.Mock() + self.run = mock.Mock() + + def step(self): + try: + self.function() + except transport_module.TransportError as ex: + self.logger.exception(ex) + self.cancel() + except Exception: + self.logger.exception("Error handling metric export") + + +class MockGetExporterThread(object): + """Intercept calls to get_exporter_thread. + + To get a reference to the running PeriodicMetricTask created by + get_exporter_thread. + """ + def __init__(self): + self.transport = None + + def __enter__(self): + original_func = transport_module.get_exporter_thread + + def get_exporter_thread(*aa, **kw): + self.transport = original_func(*aa, **kw) + + mock_get = mock.Mock() + mock_get.side_effect = get_exporter_thread + self.patcher = mock.patch( + ('opencensus.ext.stackdriver.stats_exporter' + '.transport.get_exporter_thread'), + mock_get) + self.patcher.start() + return self + + def __exit__(self, type, value, traceback): + self.patcher.stop() + + +@mock.patch('opencensus.ext.stackdriver.stats_exporter' + '.monitoring_v3.MetricServiceClient') +@mock.patch('opencensus.ext.stackdriver.stats_exporter' + '.stats.stats') +class TestAsyncStatsExport(unittest.TestCase): + """Check that metrics are exported using the exporter thread.""" + + def setUp(self): + patcher = mock.patch( + 'opencensus.metrics.transport.PeriodicMetricTask', + MockPeriodicMetricTask) + patcher.start() + self.addCleanup(patcher.stop) + + def test_export_empty(self, mock_stats, mock_client): + """Check that we don't attempt to export empty metric sets.""" + + mock_stats.get_metrics.return_value = [] + + with MockGetExporterThread() as mget: + exporter = stackdriver.new_stats_exporter( + stackdriver.Options(project_id=1)) + mget.transport.step() + + exporter.client.create_metric_descriptor.assert_not_called() + exporter.client.create_time_series.assert_not_called() + + def test_export_single_metric(self, mock_stats, mock_client): + """Check that we can export a set of a single metric.""" + + lv = label_value.LabelValue('val') + val = value.ValueLong(value=123) + dt = datetime(2019, 3, 20, 21, 34, 0, 537954) + pp = point.Point(value=val, timestamp=dt) + + ts = [ + time_series.TimeSeries(label_values=[lv], points=[pp], + start_timestamp=utils.to_iso_str(dt)) + ] + + desc = metric_descriptor.MetricDescriptor( + name='name2', + description='description2', + unit='unit2', + type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, + label_keys=[label_key.LabelKey('key', 'description')] + ) + + mm = metric.Metric(descriptor=desc, time_series=ts) + mock_stats.get_metrics.return_value = [mm] + + with MockGetExporterThread() as mget: + exporter = stackdriver.new_stats_exporter( + stackdriver.Options(project_id=1)) + mget.transport.step() + + exporter.client.create_metric_descriptor.assert_called() + self.assertEqual( + exporter.client.create_metric_descriptor.call_count, + 1) + md_call_arg =\ + exporter.client.create_metric_descriptor.call_args[0][1] + self.assertEqual( + md_call_arg.metric_kind, + monitoring_v3.enums.MetricDescriptor.MetricKind.GAUGE + ) + self.assertEqual( + md_call_arg.value_type, + monitoring_v3.enums.MetricDescriptor.ValueType.INT64 + ) + + exporter.client.create_time_series.assert_called() + self.assertEqual( + exporter.client.create_time_series.call_count, + 1) + ts_call_arg = exporter.client.create_time_series.call_args[0][1] + self.assertEqual(len(ts_call_arg), 1) + self.assertEqual(len(ts_call_arg[0].points), 1) + self.assertEqual(ts_call_arg[0].points[0].value.int64_value, 123) + + +class TestCreateTimeseries(unittest.TestCase): + + def setUp(self): + patcher = mock.patch( + 'opencensus.ext.stackdriver.stats_exporter.stats.stats', + stats_module._Stats()) + patcher.start() + self.addCleanup(patcher.stop) + + def check_labels(self, + actual_labels, + expected_labels, + include_opencensus=False): + actual_labels = dict(actual_labels) + if include_opencensus: + opencensus_tag = actual_labels.pop(stackdriver.OPENCENSUS_TASK) + self.assertIsNotNone(opencensus_tag) + self.assertIn("py-", opencensus_tag) + self.assertDictEqual(actual_labels, expected_labels) + + @mock.patch('opencensus.ext.stackdriver.stats_exporter.' + 'monitored_resource.get_instance', + return_value=None) + def test_create_batched_time_series(self, monitor_resource_mock): + client = mock.Mock() + v_data = view_data_module.ViewData( + view=VIDEO_SIZE_VIEW, + start_time=TEST_TIME_STR, + end_time=TEST_TIME_STR) + v_data.record(context=tag_map_module.TagMap(), value=2, timestamp=None) + view_data = [v_data] + + option = stackdriver.Options(project_id="project-test") + exporter = stackdriver.StackdriverStatsExporter( + options=option, client=client) + + view_data = [metric_utils.view_data_to_metric(view_data[0], TEST_TIME)] + + time_series_batches = exporter.create_batched_time_series(view_data, 1) + + self.assertEqual(len(time_series_batches), 1) + [time_series_batch] = time_series_batches + self.assertEqual(len(time_series_batch), 1) + [time_series] = time_series_batch + self.assertEqual( + time_series.metric.type, + 'custom.googleapis.com/opencensus/' + VIDEO_SIZE_VIEW_NAME) + self.check_labels( + time_series.metric.labels, {}, include_opencensus=True) + + @mock.patch('opencensus.ext.stackdriver.stats_exporter.' + 'monitored_resource.get_instance', + return_value=None) + def test_create_batched_time_series_with_many(self, monitor_resource_mock): + client = mock.Mock() + + # First view with 3 + view_name1 = "view-name1" + view1 = view_module.View(view_name1, "test description", ['test'], + VIDEO_SIZE_MEASURE, + aggregation_module.LastValueAggregation()) + v_data1 = view_data_module.ViewData( + view=view1, start_time=TEST_TIME_STR, end_time=TEST_TIME_STR) + v_data1.record(context=tag_map_module.TagMap({'test': '1'}), value=7, + timestamp=None) + v_data1.record(context=tag_map_module.TagMap({'test': '2'}), value=5, + timestamp=None) + v_data1.record(context=tag_map_module.TagMap({'test': '3'}), value=3, + timestamp=None) + + # Second view with 2 + view_name2 = "view-name2" + view2 = view_module.View(view_name2, "test description", ['test'], + VIDEO_SIZE_MEASURE, + aggregation_module.LastValueAggregation()) + v_data2 = view_data_module.ViewData( + view=view2, start_time=TEST_TIME_STR, end_time=TEST_TIME_STR) + v_data2.record(context=tag_map_module.TagMap({'test': '1'}), value=7, + timestamp=None) + v_data2.record(context=tag_map_module.TagMap({'test': '2'}), value=5, + timestamp=None) + + view_data = [v_data1, v_data2] + view_data = [metric_utils.view_data_to_metric(vd, TEST_TIME) + for vd in view_data] + + option = stackdriver.Options(project_id="project-test") + exporter = stackdriver.StackdriverStatsExporter( + options=option, client=client) + + time_series_batches = exporter.create_batched_time_series(view_data, 2) + + self.assertEqual(len(time_series_batches), 3) + [tsb1, tsb2, tsb3] = time_series_batches + self.assertEqual(len(tsb1), 2) + self.assertEqual(len(tsb2), 2) + self.assertEqual(len(tsb3), 1) + + def setup_create_timeseries_test(self): + client = mock.Mock() + execution_context.clear() + + option = stackdriver.Options( + project_id="project-test", resource="global") + exporter = stackdriver.StackdriverStatsExporter( + options=option, client=client) + + stats = stats_module.stats + view_manager = stats.view_manager + stats_recorder = stats.stats_recorder + + if len(view_manager.measure_to_view_map.exporters) > 0: + view_manager.unregister_exporter( + view_manager.measure_to_view_map.exporters[0]) + + view_manager.register_exporter(exporter) + return view_manager, stats_recorder, exporter + + @mock.patch('opencensus.ext.stackdriver.stats_exporter.' + 'monitored_resource.get_instance', + return_value=None) + def test_create_timeseries(self, monitor_resource_mock): + view_manager, stats_recorder, exporter = \ + self.setup_create_timeseries_test() + + view_manager.register_view(VIDEO_SIZE_VIEW) + + tag_value = tag_value_module.TagValue("1200") + tag_map = tag_map_module.TagMap() + tag_map.insert(FRONTEND_KEY, tag_value) + + measure_map = stats_recorder.new_measurement_map() + measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) + measure_map.record(tag_map) + + v_data = measure_map.measure_to_view_map.get_view( + VIDEO_SIZE_VIEW_NAME, None) + + v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) + + time_series_list = exporter.create_time_series_list(v_data) + + self.assertEqual(len(time_series_list), 1) + time_series = time_series_list[0] + self.assertEqual(time_series.resource.type, "global") + self.assertEqual( + time_series_list[0].metric.type, + "custom.googleapis.com/opencensus/my.org/views/video_size_test2") + self.check_labels( + time_series.metric.labels, {FRONTEND_KEY_CLEAN: "1200"}, + include_opencensus=True) + self.assertIsNotNone(time_series.resource) + + self.assertEqual(len(time_series.points), 1) + value = time_series.points[0].value + self.assertEqual(value.distribution_value.count, 1) + + time_series_list = exporter.create_time_series_list(v_data) + + self.assertEqual(len(time_series_list), 1) + time_series = time_series_list[0] + self.check_labels( + time_series.metric.labels, {FRONTEND_KEY_CLEAN: "1200"}, + include_opencensus=True) + self.assertIsNotNone(time_series.resource) + + self.assertEqual(len(time_series.points), 1) + value = time_series.points[0].value + self.assertEqual(value.distribution_value.count, 1) + + @mock.patch('opencensus.ext.stackdriver.stats_exporter.' + 'monitored_resource.get_instance') + def test_create_timeseries_with_resource(self, monitor_resource_mock): + + client = mock.Mock() + execution_context.clear() + + option = stackdriver.Options(project_id="project-test", resource="") + exporter = stackdriver.StackdriverStatsExporter( + options=option, client=client) + + stats = stats_module.stats + view_manager = stats.view_manager + stats_recorder = stats.stats_recorder + + if len(view_manager.measure_to_view_map.exporters) > 0: + view_manager.unregister_exporter( + view_manager.measure_to_view_map.exporters[0]) + + view_manager.register_exporter(exporter) + view_manager.register_view(VIDEO_SIZE_VIEW) + + tag_value = tag_value_module.TagValue("1200") + tag_map = tag_map_module.TagMap() + tag_map.insert(FRONTEND_KEY, tag_value) + + measure_map = stats_recorder.new_measurement_map() + measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) + measure_map.record(tag_map) + + v_data = measure_map.measure_to_view_map.get_view( + VIDEO_SIZE_VIEW_NAME, None) + + v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) + + # check for gce_instance monitored resource + mocked_labels = { + 'instance_id': 'my-instance', + 'project_id': 'my-project', + 'zone': 'us-east1', + 'k8s.io/pod/name': 'localhost', + 'k8s.io/namespace/name': 'namespace', + } + + mock_resource = mock.Mock() + mock_resource.get_type.return_value = 'gce_instance' + mock_resource.get_labels.return_value = mocked_labels + monitor_resource_mock.return_value = mock_resource + + time_series_list = exporter.create_time_series_list(v_data) + self.assertEqual(len(time_series_list), 1) + time_series = time_series_list[0] + self.assertEqual(time_series.resource.type, "gce_instance") + self.check_labels( + time_series.resource.labels, { + 'instance_id': 'my-instance', + 'project_id': 'my-project', + 'zone': 'us-east1', + }) + self.assertEqual( + time_series.metric.type, + "custom.googleapis.com/opencensus/my.org/views/video_size_test2") + self.assertIsNotNone(time_series) + + time_series_list = exporter.create_time_series_list(v_data) + self.assertEqual(len(time_series_list), 1) + time_series = time_series_list[0] + + self.assertEqual( + time_series.metric.type, + "custom.googleapis.com/opencensus/my.org/views/video_size_test2") + + # check for k8s_container monitored resource + mocked_labels = { + 'instance_id': 'my-instance', + 'project_id': 'my-project', + 'zone': 'us-east1', + 'k8s.io/pod/name': 'localhost', + 'k8s.io/cluster/name': 'cluster', + 'k8s.io/namespace/name': 'namespace', + } + + mock_resource = mock.Mock() + mock_resource.get_type.return_value = 'k8s_container' + mock_resource.get_labels.return_value = mocked_labels + monitor_resource_mock.return_value = mock_resource + + time_series_list = exporter.create_time_series_list(v_data) + self.assertEqual(len(time_series_list), 1) + time_series = time_series_list[0] + self.assertEqual(time_series.resource.type, "k8s_container") + self.check_labels( + time_series.resource.labels, { + 'project_id': 'my-project', + 'location': 'us-east1', + 'cluster_name': 'cluster', + 'pod_name': 'localhost', + 'namespace_name': 'namespace', + }) + self.assertEqual( + time_series.metric.type, + "custom.googleapis.com/opencensus/my.org/views/video_size_test2") + self.assertIsNotNone(time_series) + + # check for aws_ec2_instance monitored resource + mocked_labels = { + 'instance_id': 'my-instance', + 'aws_account': 'my-project', + 'region': 'us-east1', + } + + mock_resource = mock.Mock() + mock_resource.get_type.return_value = 'aws_ec2_instance' + mock_resource.get_labels.return_value = mocked_labels + monitor_resource_mock.return_value = mock_resource + + time_series_list = exporter.create_time_series_list(v_data) + self.assertEqual(len(time_series_list), 1) + time_series = time_series_list[0] + self.assertEqual(time_series.resource.type, "aws_ec2_instance") + self.check_labels( + time_series.resource.labels, { + 'instance_id': 'my-instance', + 'aws_account': 'my-project', + 'region': 'aws:us-east1', + }) + self.assertEqual( + time_series.metric.type, + "custom.googleapis.com/opencensus/my.org/views/video_size_test2") + self.assertIsNotNone(time_series) + + # check for out of box monitored resource + mock_resource = mock.Mock() + mock_resource.get_type.return_value = '' + mock_resource.get_labels.return_value = mock.Mock() + monitor_resource_mock.return_value = mock_resource + + time_series_list = exporter.create_time_series_list(v_data) + self.assertEqual(len(time_series_list), 1) + time_series = time_series_list[0] + self.assertEqual(time_series.resource.type, 'global') + self.check_labels(time_series.resource.labels, {}) + self.assertEqual( + time_series.metric.type, + "custom.googleapis.com/opencensus/my.org/views/video_size_test2") + self.assertIsNotNone(time_series) + + @mock.patch('opencensus.ext.stackdriver.stats_exporter.' + 'monitored_resource.get_instance', + return_value=None) + def test_create_timeseries_str_tagvalue(self, monitor_resource_mock): + view_manager, stats_recorder, exporter = \ + self.setup_create_timeseries_test() + + agg_1 = aggregation_module.LastValueAggregation(value=2) + view_name1 = "view-name1" + new_view1 = view_module.View( + view_name1, "processed video size over time", [FRONTEND_KEY_INT], + VIDEO_SIZE_MEASURE_2, agg_1) + + view_manager.register_view(new_view1) + + tag_value_int = tag_value_module.TagValue("Abc") + tag_map = tag_map_module.TagMap() + tag_map.insert(FRONTEND_KEY_INT, tag_value_int) + + measure_map = stats_recorder.new_measurement_map() + measure_map.measure_int_put(VIDEO_SIZE_MEASURE_2, 25 * MiB) + measure_map.record(tag_map) + + v_data = measure_map.measure_to_view_map.get_view(view_name1, None) + + v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) + + time_series_list = exporter.create_time_series_list(v_data) + self.assertEqual(len(time_series_list), 1) + time_series = time_series_list[0] + + self.check_labels( + time_series.metric.labels, {FRONTEND_KEY_INT_CLEAN: "Abc"}, + include_opencensus=True) + self.assertIsNotNone(time_series.resource) + + self.assertEqual(len(time_series.points), 1) + expected_value = monitoring_v3.types.TypedValue() + # TODO: #565 + expected_value.double_value = 25.0 * MiB + self.assertEqual(time_series.points[0].value, expected_value) + + @mock.patch('opencensus.ext.stackdriver.stats_exporter.' + 'monitored_resource.get_instance', + return_value=None) + def test_create_timeseries_str_tagvalue_count_aggregtation( + self, monitor_resource_mock): + view_manager, stats_recorder, exporter = \ + self.setup_create_timeseries_test() + + agg_1 = aggregation_module.CountAggregation(count=2) + view_name1 = "view-name1" + new_view1 = view_module.View( + view_name1, "processed video size over time", [FRONTEND_KEY_INT], + VIDEO_SIZE_MEASURE_2, agg_1) + + view_manager.register_view(new_view1) + + tag_value_int = tag_value_module.TagValue("Abc") + tag_map = tag_map_module.TagMap() + tag_map.insert(FRONTEND_KEY_INT, tag_value_int) + + measure_map = stats_recorder.new_measurement_map() + measure_map.measure_int_put(VIDEO_SIZE_MEASURE_2, 25 * MiB) + measure_map.record(tag_map) + + v_data = measure_map.measure_to_view_map.get_view(view_name1, None) + + v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) + + time_series_list = exporter.create_time_series_list(v_data) + self.assertEqual(len(time_series_list), 1) + time_series = time_series_list[0] + self.check_labels( + time_series.metric.labels, {FRONTEND_KEY_INT_CLEAN: "Abc"}, + include_opencensus=True) + self.assertIsNotNone(time_series.resource) + + self.assertEqual(len(time_series.points), 1) + expected_value = monitoring_v3.types.TypedValue() + expected_value.int64_value = 3 + self.assertEqual(time_series.points[0].value, expected_value) + + @mock.patch('opencensus.ext.stackdriver.stats_exporter.' + 'monitored_resource.get_instance', + return_value=None) + def test_create_timeseries_last_value_float_tagvalue( + self, monitor_resource_mock): + view_manager, stats_recorder, exporter = \ + self.setup_create_timeseries_test() + + agg_2 = aggregation_module.LastValueAggregation(value=2.2 * MiB) + view_name2 = "view-name2" + new_view2 = view_module.View( + view_name2, "processed video size over time", [FRONTEND_KEY_FLOAT], + VIDEO_SIZE_MEASURE_FLOAT, agg_2) + + view_manager.register_view(new_view2) + + tag_value_float = tag_value_module.TagValue("Abc") + tag_map = tag_map_module.TagMap() + tag_map.insert(FRONTEND_KEY_FLOAT, tag_value_float) + + measure_map = stats_recorder.new_measurement_map() + measure_map.measure_float_put(VIDEO_SIZE_MEASURE_FLOAT, 25.7 * MiB) + measure_map.record(tag_map) + + v_data = measure_map.measure_to_view_map.get_view(view_name2, None) + + v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) + + time_series_list = exporter.create_time_series_list(v_data) + self.assertEqual(len(time_series_list), 1) + time_series = time_series_list[0] + self.check_labels( + time_series.metric.labels, {FRONTEND_KEY_FLOAT_CLEAN: "Abc"}, + include_opencensus=True) + self.assertIsNotNone(time_series.resource) + + self.assertEqual(len(time_series.points), 1) + expected_value = monitoring_v3.types.TypedValue() + expected_value.double_value = 25.7 * MiB + self.assertEqual(time_series.points[0].value, expected_value) + + @mock.patch('opencensus.ext.stackdriver.stats_exporter.' + 'monitored_resource.get_instance', + return_value=None) + def test_create_timeseries_float_tagvalue(self, monitor_resource_mock): + client = mock.Mock() + + option = stackdriver.Options( + project_id="project-test", resource="global") + exporter = stackdriver.StackdriverStatsExporter( + options=option, client=client) + + stats = stats_module.stats + view_manager = stats.view_manager + stats_recorder = stats.stats_recorder + + if len(view_manager.measure_to_view_map.exporters) > 0: + view_manager.unregister_exporter( + view_manager.measure_to_view_map.exporters[0]) + + view_manager.register_exporter(exporter) + + agg_3 = aggregation_module.SumAggregation(sum=2.2) + view_name3 = "view-name3" + new_view3 = view_module.View( + view_name3, "processed video size over time", [FRONTEND_KEY_FLOAT], + VIDEO_SIZE_MEASURE_FLOAT, agg_3) + + view_manager.register_view(new_view3) + + tag_value_float = tag_value_module.TagValue("1200") + tag_map = tag_map_module.TagMap() + tag_map.insert(FRONTEND_KEY_FLOAT, tag_value_float) + + measure_map = stats_recorder.new_measurement_map() + measure_map.measure_float_put(VIDEO_SIZE_MEASURE_FLOAT, 25 * MiB) + measure_map.record(tag_map) + + v_data = measure_map.measure_to_view_map.get_view(view_name3, None) + + v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) + + time_series_list = exporter.create_time_series_list(v_data) + self.assertEqual(len(time_series_list), 1) + [time_series] = time_series_list + self.assertEqual(time_series.metric.type, + "custom.googleapis.com/opencensus/view-name3") + self.check_labels( + time_series.metric.labels, {FRONTEND_KEY_FLOAT_CLEAN: "1200"}, + include_opencensus=True) + self.assertIsNotNone(time_series.resource) + + self.assertEqual(len(time_series.points), 1) + expected_value = monitoring_v3.types.TypedValue() + expected_value.double_value = 2.2 + 25 * MiB + self.assertEqual(time_series.points[0].value, expected_value) + + @mock.patch('opencensus.ext.stackdriver.stats_exporter.' + 'monitored_resource.get_instance', + return_value=None) + def test_create_timeseries_multiple_tag_values(self, + monitoring_resoure_mock): + view_manager, stats_recorder, exporter = \ + self.setup_create_timeseries_test() + + view_manager.register_view(VIDEO_SIZE_VIEW) + + measure_map = stats_recorder.new_measurement_map() + + # Add first point with one tag value + tag_map = tag_map_module.TagMap() + tag_map.insert(FRONTEND_KEY, tag_value_module.TagValue("1200")) + measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) + measure_map.record(tag_map) + + # Add second point with different tag value + tag_map = tag_map_module.TagMap() + tag_map.insert(FRONTEND_KEY, tag_value_module.TagValue("1400")) + measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 12 * MiB) + measure_map.record(tag_map) + + v_data = measure_map.measure_to_view_map.get_view( + VIDEO_SIZE_VIEW_NAME, None) + + v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) + + time_series_list = exporter.create_time_series_list(v_data) + + self.assertEqual(len(time_series_list), 2) + ts_by_frontend = { + ts.metric.labels.get(FRONTEND_KEY_CLEAN): ts + for ts in time_series_list + } + self.assertEqual(set(ts_by_frontend.keys()), {"1200", "1400"}) + ts1 = ts_by_frontend["1200"] + ts2 = ts_by_frontend["1400"] + + # Verify first time series + self.assertEqual(ts1.resource.type, "global") + self.assertEqual( + ts1.metric.type, + "custom.googleapis.com/opencensus/my.org/views/video_size_test2") + self.assertIsNotNone(ts1.resource) + + self.assertEqual(len(ts1.points), 1) + value1 = ts1.points[0].value + self.assertEqual(value1.distribution_value.count, 1) + + # Verify second time series + self.assertEqual(ts2.resource.type, "global") + self.assertEqual( + ts2.metric.type, + "custom.googleapis.com/opencensus/my.org/views/video_size_test2") + self.assertIsNotNone(ts2.resource) + + self.assertEqual(len(ts2.points), 1) + value2 = ts2.points[0].value + self.assertEqual(value2.distribution_value.count, 1) + + @mock.patch('opencensus.ext.stackdriver.stats_exporter.' + 'monitored_resource.get_instance', + return_value=None) + def test_create_timeseries_disjoint_tags(self, monitoring_resoure_mock): + view_manager, stats_recorder, exporter = \ + self.setup_create_timeseries_test() + + # Register view with two tags + view_name = "view-name" + view = view_module.View(view_name, "test description", + [FRONTEND_KEY, FRONTEND_KEY_FLOAT], + VIDEO_SIZE_MEASURE, + aggregation_module.SumAggregation()) + + view_manager.register_view(view) + + # Add point with one tag in common and one different tag + measure_map = stats_recorder.new_measurement_map() + tag_map = tag_map_module.TagMap() + tag_map.insert(FRONTEND_KEY, tag_value_module.TagValue("1200")) + tag_map.insert(FRONTEND_KEY_STR, tag_value_module.TagValue("1800")) + measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) + measure_map.record(tag_map) + + v_data = measure_map.measure_to_view_map.get_view(view_name, None) + + v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) + + time_series_list = exporter.create_time_series_list(v_data) + + self.assertEqual(len(time_series_list), 1) + [time_series] = time_series_list + + # Verify first time series + self.assertEqual(time_series.resource.type, "global") + self.assertEqual(time_series.metric.type, + "custom.googleapis.com/opencensus/" + view_name) + self.check_labels( + time_series.metric.labels, {FRONTEND_KEY_CLEAN: "1200"}, + include_opencensus=True) + self.assertIsNotNone(time_series.resource) + + self.assertEqual(len(time_series.points), 1) + expected_value = monitoring_v3.types.TypedValue() + # TODO: #565 + expected_value.double_value = 25.0 * MiB + self.assertEqual(time_series.points[0].value, expected_value) + + def test_create_timeseries_from_distribution(self): + """Check for explicit 0-bound bucket for SD export.""" + agg = aggregation_module.DistributionAggregation() + + view = view_module.View( + name="example.org/test_view", + description="example.org/test_view", + columns=['tag_key'], + measure=mock.Mock(), + aggregation=agg, + ) + + v_data = view_data_module.ViewData( + view=view, + start_time=TEST_TIME_STR, + end_time=TEST_TIME_STR, + ) + + # Aggregation over (10 * range(10)) for buckets [2, 4, 6, 8] + dad = aggregation_data_module.DistributionAggregationData( + mean_data=4.5, + count_data=100, + sum_of_sqd_deviations=825, + counts_per_bucket=[20, 20, 20, 20, 20], + bounds=[2, 4, 6, 8], + exemplars={mock.Mock() for ii in range(5)} + ) + v_data._tag_value_aggregation_data_map = {('tag_value',): dad} + + v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) + + exporter = stackdriver.StackdriverStatsExporter() + time_series_list = exporter.create_time_series_list(v_data) + self.assertEqual(len(time_series_list), 1) + [time_series] = time_series_list + + self.check_labels( + time_series.metric.labels, {'tag_key': 'tag_value'}, + include_opencensus=True) + self.assertEqual(len(time_series.points), 1) + [point] = time_series.points + dv = point.value.distribution_value + self.assertEqual(100, dv.count) + self.assertEqual(825.0, dv.sum_of_squared_deviation) + self.assertEqual([0, 20, 20, 20, 20, 20], dv.bucket_counts) + self.assertEqual([0, 2, 4, 6, 8], + dv.bucket_options.explicit_buckets.bounds) + + def test_create_timeseries_multiple_tags(self): + """Check that exporter creates timeseries for multiple tag values. + + create_time_series_list should return a time series for each set of + values in the tag value aggregation map. + """ + agg = aggregation_module.CountAggregation() + + view = view_module.View( + name="example.org/test_view", + description="example.org/test_view", + columns=[tag_key_module.TagKey('color'), + tag_key_module.TagKey('shape')], + measure=mock.Mock(), + aggregation=agg, + ) + + v_data = view_data_module.ViewData( + view=view, + start_time=TEST_TIME_STR, + end_time=TEST_TIME_STR, + ) + + rs_count = aggregation_data_module.CountAggregationData(10) + bc_count = aggregation_data_module.CountAggregationData(20) + v_data._tag_value_aggregation_data_map = { + ('red', 'square'): rs_count, + ('blue', 'circle'): bc_count, + } + + v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) + + exporter = stackdriver.StackdriverStatsExporter() + time_series_list = exporter.create_time_series_list(v_data) + + self.assertEqual(len(time_series_list), 2) + self.assertEqual(len(time_series_list[0].points), 1) + self.assertEqual(len(time_series_list[1].points), 1) + + ts_by_color = {ts.metric.labels.get('color'): ts + for ts in time_series_list} + rs_ts = ts_by_color['red'] + bc_ts = ts_by_color['blue'] + self.assertEqual(rs_ts.metric.labels.get('shape'), 'square') + self.assertEqual(bc_ts.metric.labels.get('shape'), 'circle') + self.assertEqual(rs_ts.points[0].value.int64_value, 10) + self.assertEqual(bc_ts.points[0].value.int64_value, 20) + + def test_create_timeseries_invalid_aggregation(self): + v_data = mock.Mock(spec=view_data_module.ViewData) + v_data.view.name = "example.org/base_view" + v_data.view.columns = [tag_key_module.TagKey('base_key')] + v_data.start_time = TEST_TIME_STR + v_data.end_time = TEST_TIME_STR + + base_data = None + v_data.tag_value_aggregation_data_map = { + (None,): base_data, + } + + exporter = stackdriver.StackdriverStatsExporter( + options=mock.Mock(), + client=mock.Mock(), + ) + self.assertRaises(TypeError, exporter.create_time_series_list, v_data, + "", "") From 18466760f2247eb5445a7a162d4529f8795bb106 Mon Sep 17 00:00:00 2001 From: Marian Hromiak <62988885+marianhromiak@users.noreply.github.com> Date: Thu, 25 Jun 2020 20:01:09 +0200 Subject: [PATCH 49/79] Add mean property for distribution value (#919) --- .../opencensus-ext-stackdriver/CHANGELOG.md | 3 + .../stackdriver/stats_exporter/__init__.py | 1 + .../tests/test_stackdriver_stats.py | 60 ++++++++++++------- 3 files changed, 44 insertions(+), 20 deletions(-) diff --git a/contrib/opencensus-ext-stackdriver/CHANGELOG.md b/contrib/opencensus-ext-stackdriver/CHANGELOG.md index a3b8b4c7b..98e4c6e6b 100644 --- a/contrib/opencensus-ext-stackdriver/CHANGELOG.md +++ b/contrib/opencensus-ext-stackdriver/CHANGELOG.md @@ -2,6 +2,9 @@ ## Unreleased + - Add mean property for distribution values + ([#919](https://github.com/census-instrumentation/opencensus-python/pull/919)) + ## 0.7.2 Released 2019-08-26 diff --git a/contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/stats_exporter/__init__.py b/contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/stats_exporter/__init__.py index 9249294ff..b47d44b81 100644 --- a/contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/stats_exporter/__init__.py +++ b/contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/stats_exporter/__init__.py @@ -201,6 +201,7 @@ def _convert_point(self, metric, ts, point, sd_point): sd_dist_val.count = point.value.count sd_dist_val.sum_of_squared_deviation =\ point.value.sum_of_squared_deviation + sd_dist_val.mean = point.value.sum / sd_dist_val.count assert sd_dist_val.bucket_options.explicit_buckets.bounds == [] sd_dist_val.bucket_options.explicit_buckets.bounds.extend( diff --git a/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_stats.py b/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_stats.py index fd3b517c6..d301a0d15 100644 --- a/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_stats.py +++ b/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_stats.py @@ -822,20 +822,16 @@ def test_create_timeseries(self, monitor_resource_mock): self.assertEqual(len(time_series.points), 1) value = time_series.points[0].value - self.assertEqual(value.distribution_value.count, 1) - time_series_list = exporter.create_time_series_list(v_data) - - self.assertEqual(len(time_series_list), 1) - time_series = time_series_list[0] - self.check_labels( - time_series.metric.labels, {FRONTEND_KEY_CLEAN: "1200"}, - include_opencensus=True) - self.assertIsNotNone(time_series.resource) - - self.assertEqual(len(time_series.points), 1) - value = time_series.points[0].value - self.assertEqual(value.distribution_value.count, 1) + expected_distb = google.api.distribution_pb2.Distribution( + count=1, + mean=26214400.0, + bucket_options=google.api.distribution_pb2.Distribution.BucketOptions( # noqa + explicit_buckets=google.api.distribution_pb2.Distribution.BucketOptions.Explicit( # noqa + bounds=[0.0, 16777216.0, 268435456.0])), + bucket_counts=[0, 0, 1, 0] + ) + self.assertEqual(value.distribution_value, expected_distb) @mock.patch('opencensus.ext.stackdriver.stats_exporter.' 'monitored_resource.get_instance') @@ -1213,7 +1209,16 @@ def test_create_timeseries_multiple_tag_values(self, self.assertEqual(len(ts1.points), 1) value1 = ts1.points[0].value - self.assertEqual(value1.distribution_value.count, 1) + + expected_distb = google.api.distribution_pb2.Distribution( + count=1, + mean=26214400.0, + bucket_options=google.api.distribution_pb2.Distribution.BucketOptions( # noqa + explicit_buckets=google.api.distribution_pb2.Distribution.BucketOptions.Explicit( # noqa + bounds=[0.0, 16777216.0, 268435456.0])), + bucket_counts=[0, 0, 1, 0] + ) + self.assertEqual(value1.distribution_value, expected_distb) # Verify second time series self.assertEqual(ts2.resource.type, "global") @@ -1224,7 +1229,16 @@ def test_create_timeseries_multiple_tag_values(self, self.assertEqual(len(ts2.points), 1) value2 = ts2.points[0].value - self.assertEqual(value2.distribution_value.count, 1) + + expected_distb = google.api.distribution_pb2.Distribution( + count=1, + mean=12582912.0, + bucket_options=google.api.distribution_pb2.Distribution.BucketOptions( # noqa + explicit_buckets=google.api.distribution_pb2.Distribution.BucketOptions.Explicit( # noqa + bounds=[0.0, 16777216.0, 268435456.0])), + bucket_counts=[0, 1, 0, 0] + ) + self.assertEqual(value2.distribution_value, expected_distb) @mock.patch('opencensus.ext.stackdriver.stats_exporter.' 'monitored_resource.get_instance', @@ -1315,12 +1329,18 @@ def test_create_timeseries_from_distribution(self): include_opencensus=True) self.assertEqual(len(time_series.points), 1) [point] = time_series.points + dv = point.value.distribution_value - self.assertEqual(100, dv.count) - self.assertEqual(825.0, dv.sum_of_squared_deviation) - self.assertEqual([0, 20, 20, 20, 20, 20], dv.bucket_counts) - self.assertEqual([0, 2, 4, 6, 8], - dv.bucket_options.explicit_buckets.bounds) + expected_distb = google.api.distribution_pb2.Distribution( + count=100, + mean=4.5, + sum_of_squared_deviation=825.0, + bucket_options=google.api.distribution_pb2.Distribution.BucketOptions( # noqa + explicit_buckets=google.api.distribution_pb2.Distribution.BucketOptions.Explicit( # noqa + bounds=[0, 2, 4, 6, 8])), + bucket_counts=[0, 20, 20, 20, 20, 20] + ) + self.assertEqual(dv, expected_distb) def test_create_timeseries_multiple_tags(self): """Check that exporter creates timeseries for multiple tag values. From 07cb5f01e1054d9ff0da54f7df8c3619806ceb02 Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Fri, 26 Jun 2020 09:36:34 -0700 Subject: [PATCH 50/79] Add @aabmass to CODEOWNERs (#923) --- .github/CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index b1f3db1df..a94f95b81 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -2,4 +2,4 @@ # This file controls who is tagged for review for any given pull request. # For anything not explicitly taken by someone else: -* @census-instrumentation/global-owners @c24t @hectorhdzg @lzchen @reyang @songy23 @victoraugustolls +* @census-instrumentation/global-owners @aabmass @c24t @hectorhdzg @lzchen @reyang @songy23 @victoraugustolls From 86d5bd7e836e93d4762a0f46e0326912d2136fd2 Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Fri, 26 Jun 2020 10:07:42 -0700 Subject: [PATCH 51/79] Remove dependencies/sec from performance counters for Azure (#922) --- contrib/opencensus-ext-azure/CHANGELOG.md | 3 + contrib/opencensus-ext-azure/README.rst | 11 +-- .../standard_metrics/__init__.py | 4 - .../standard_metrics/http_dependency.py | 92 ------------------- .../tests/test_azure_standard_metrics.py | 59 +----------- 5 files changed, 10 insertions(+), 159 deletions(-) delete mode 100644 contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_dependency.py diff --git a/contrib/opencensus-ext-azure/CHANGELOG.md b/contrib/opencensus-ext-azure/CHANGELOG.md index 7efa8b779..ce9f4a741 100644 --- a/contrib/opencensus-ext-azure/CHANGELOG.md +++ b/contrib/opencensus-ext-azure/CHANGELOG.md @@ -2,6 +2,9 @@ ## Unreleased +- Remove dependency rate from standard metrics + ([#903](https://github.com/census-instrumentation/opencensus-python/pull/903)) + ## 1.0.3 Released 2020-06-17 diff --git a/contrib/opencensus-ext-azure/README.rst b/contrib/opencensus-ext-azure/README.rst index 49ba625f3..9e726c08a 100644 --- a/contrib/opencensus-ext-azure/README.rst +++ b/contrib/opencensus-ext-azure/README.rst @@ -178,10 +178,10 @@ The **Azure Monitor Metrics Exporter** allows you to export metrics to `Azure Mo if __name__ == "__main__": main() -Standard Metrics -################ +Performance counters +#################### -The exporter also includes a set of standard metrics that are exported to Azure Monitor by default. +The exporter also includes a set of performance counters that are exported to Azure Monitor by default. .. code:: python @@ -191,7 +191,7 @@ The exporter also includes a set of standard metrics that are exported to Azure from opencensus.ext.azure import metrics_exporter def main(): - # All you need is the next line. You can disable standard metrics by + # All you need is the next line. You can disable performance counters by # passing in enable_standard_metrics=False into the constructor of # new_metrics_exporter() _exporter = metrics_exporter.new_metrics_exporter(connection_string='InstrumentationKey=') @@ -205,13 +205,12 @@ The exporter also includes a set of standard metrics that are exported to Azure if __name__ == "__main__": main() -Below is a list of standard metrics that are currently available: +Below is a list of performance counters that are currently available: - Available Memory (bytes) - CPU Processor Time (percentage) - Incoming Request Rate (per second) - Incoming Request Average Execution Time (milliseconds) -- Outgoing Request Rate (per second) - Process CPU Usage (percentage) - Process Private Bytes (bytes) diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/__init__.py index b9ce6380e..3ad5109ca 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/__init__.py @@ -15,9 +15,6 @@ from opencensus.ext.azure.metrics_exporter.standard_metrics.cpu import ( ProcessorTimeMetric, ) -from opencensus.ext.azure.metrics_exporter.standard_metrics.http_dependency import ( # noqa E501 - DependencyRateMetric, -) from opencensus.ext.azure.metrics_exporter.standard_metrics.http_requests import ( # noqa E501 RequestsAvgExecutionMetric, RequestsRateMetric, @@ -34,7 +31,6 @@ # List of standard metrics to track STANDARD_METRICS = [AvailableMemoryMetric, - DependencyRateMetric, ProcessCPUMetric, ProcessMemoryMetric, ProcessorTimeMetric, diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_dependency.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_dependency.py deleted file mode 100644 index 0632ba2d2..000000000 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/standard_metrics/http_dependency.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright 2019, OpenCensus Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import threading -import time - -import requests - -from opencensus.metrics.export.gauge import DerivedDoubleGauge -from opencensus.trace import execution_context - -dependency_map = dict() -_dependency_lock = threading.Lock() -ORIGINAL_REQUEST = requests.Session.request - - -def dependency_patch(*args, **kwargs): - result = ORIGINAL_REQUEST(*args, **kwargs) - # Only collect request metric if sent from non-exporter thread - if not execution_context.is_exporter(): - # We don't want multiple threads updating this at once - with _dependency_lock: - count = dependency_map.get('count', 0) - dependency_map['count'] = count + 1 - return result - - -def setup(): - # Patch the requests library functions to track dependency information - requests.Session.request = dependency_patch - - -class DependencyRateMetric(object): - # Dependency call metrics can be found under custom metrics - NAME = "\\ApplicationInsights\\Dependency Calls/Sec" - - def __init__(self): - setup() - - @staticmethod - def get_value(): - current_count = dependency_map.get('count', 0) - current_time = time.time() - last_count = dependency_map.get('last_count', 0) - last_time = dependency_map.get('last_time') - last_result = dependency_map.get('last_result', 0) - - try: - # last_time is None the very first time this function is called - if last_time is not None: - elapsed_seconds = current_time - last_time - interval_count = current_count - last_count - result = interval_count / elapsed_seconds - else: - result = 0 - dependency_map['last_time'] = current_time - dependency_map['last_count'] = current_count - dependency_map['last_result'] = result - return result - except ZeroDivisionError: - # If elapsed_seconds is 0, exporter call made too close to previous - # Return the previous result if this is the case - return last_result - - def __call__(self): - """ Returns a derived gauge for outgoing requests per second - - Calculated by obtaining by getting the number of outgoing requests made - using the requests library within an elapsed time and dividing that - value over the elapsed time. - - :rtype: :class:`opencensus.metrics.export.gauge.DerivedLongGauge` - :return: The gauge representing the outgoing requests metric - """ - gauge = DerivedDoubleGauge( - DependencyRateMetric.NAME, - 'Outgoing Requests per second', - 'rps', - []) - gauge.create_default_time_series(DependencyRateMetric.get_value) - return gauge diff --git a/contrib/opencensus-ext-azure/tests/test_azure_standard_metrics.py b/contrib/opencensus-ext-azure/tests/test_azure_standard_metrics.py index 5cbb6e601..317c0e2c5 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_standard_metrics.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_standard_metrics.py @@ -20,7 +20,6 @@ import requests from opencensus.ext.azure.metrics_exporter import standard_metrics -from opencensus.trace import execution_context if sys.version_info < (3,): from BaseHTTPServer import HTTPServer @@ -33,10 +32,8 @@ class TestStandardMetrics(unittest.TestCase): def setUp(self): - standard_metrics.http_dependency.dependency_map.clear() standard_metrics.http_requests.requests_map.clear() requests.Session.request = ORIGINAL_FUNCTION - standard_metrics.http_dependency.ORIGINAL_REQUEST = ORIGINAL_FUNCTION standard_metrics.http_requests.ORIGINAL_CONSTRUCTOR = ORIGINAL_CONS @mock.patch('opencensus.ext.azure.metrics_exporter' @@ -50,12 +47,12 @@ def test_producer_get_metrics(self): producer = standard_metrics.AzureStandardMetricsProducer() metrics = producer.get_metrics() - self.assertEqual(len(metrics), 7) + self.assertEqual(len(metrics), 6) def test_register_metrics(self): registry = standard_metrics.register_metrics() - self.assertEqual(len(registry.get_metrics()), 7) + self.assertEqual(len(registry.get_metrics()), 6) def test_get_available_memory_metric(self): metric = standard_metrics.AvailableMemoryMetric() @@ -144,58 +141,6 @@ def test_get_process_cpu_usage_exception(self, logger_mock): logger_mock.exception.assert_called() - def test_dependency_patch(self): - map = standard_metrics.http_dependency.dependency_map - standard_metrics.http_dependency.ORIGINAL_REQUEST = lambda x: None - session = requests.Session() - execution_context.set_is_exporter(False) - result = standard_metrics.http_dependency.dependency_patch(session) - - self.assertEqual(map['count'], 1) - self.assertIsNone(result) - - def test_dependency_patch_exporter_thread(self): - map = standard_metrics.http_dependency.dependency_map - standard_metrics.http_dependency.ORIGINAL_REQUEST = lambda x: None - session = mock.Mock() - execution_context.set_is_exporter(True) - result = standard_metrics.http_dependency.dependency_patch(session) - - self.assertIsNone(map.get('count')) - self.assertIsNone(result) - - def test_get_dependency_rate_metric(self): - metric = standard_metrics.DependencyRateMetric() - gauge = metric() - - self.assertEqual(gauge.descriptor.name, - '\\ApplicationInsights\\Dependency Calls/Sec') - - def test_get_dependency_rate_first_time(self): - rate = standard_metrics.DependencyRateMetric.get_value() - - self.assertEqual(rate, 0) - - @mock.patch('opencensus.ext.azure.metrics_exporter' - '.standard_metrics.http_dependency.time') - def test_get_dependency_rate(self, time_mock): - time_mock.time.return_value = 100 - standard_metrics.http_dependency.dependency_map['last_time'] = 98 - standard_metrics.http_dependency.dependency_map['count'] = 4 - rate = standard_metrics.DependencyRateMetric.get_value() - - self.assertEqual(rate, 2) - - @mock.patch('opencensus.ext.azure.metrics_exporter' - '.standard_metrics.http_dependency.time') - def test_get_dependency_rate_error(self, time_mock): - time_mock.time.return_value = 100 - standard_metrics.http_dependency.dependency_map['last_result'] = 5 - standard_metrics.http_dependency.dependency_map['last_time'] = 100 - result = standard_metrics.DependencyRateMetric.get_value() - - self.assertEqual(result, 5) - def test_request_patch(self): map = standard_metrics.http_requests.requests_map func = mock.Mock() From dea0794640497e2338798651457ecd228e258a6c Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Mon, 29 Jun 2020 11:43:50 -0700 Subject: [PATCH 52/79] Implement custom events in Azure (#925) --- contrib/opencensus-ext-azure/README.rst | 15 ++ .../examples/logs/correlated.py | 2 + .../examples/logs/error.py | 1 + .../examples/logs/event.py | 27 +++ .../examples/logs/properties.py | 2 + .../examples/logs/simple.py | 2 + .../ext/azure/log_exporter/__init__.py | 148 +++++++------ .../tests/test_azure_log_exporter.py | 195 +++++++++++++++++- 8 files changed, 323 insertions(+), 69 deletions(-) create mode 100644 contrib/opencensus-ext-azure/examples/logs/event.py diff --git a/contrib/opencensus-ext-azure/README.rst b/contrib/opencensus-ext-azure/README.rst index 9e726c08a..8cb7b64e7 100644 --- a/contrib/opencensus-ext-azure/README.rst +++ b/contrib/opencensus-ext-azure/README.rst @@ -121,6 +121,21 @@ Modifying Logs logger.addHandler(handler) logger.warning('Hello, World!') +Events +###### + +You can send `customEvent` telemetry in exactly the same way you would send `trace` telemetry except using the `AzureEventHandler` instead. + +.. code:: python + + import logging + + from opencensus.ext.azure.log_exporter import AzureEventHandler + + logger = logging.getLogger(__name__) + logger.addHandler(AzureEventHandler(connection_string='InstrumentationKey=')) + logger.setLevel(logging.INFO) + logger.info('Hello, World!') Metrics ~~~~~~~ diff --git a/contrib/opencensus-ext-azure/examples/logs/correlated.py b/contrib/opencensus-ext-azure/examples/logs/correlated.py index 69445e997..9854b9a91 100644 --- a/contrib/opencensus-ext-azure/examples/logs/correlated.py +++ b/contrib/opencensus-ext-azure/examples/logs/correlated.py @@ -36,3 +36,5 @@ with tracer.span(name='test'): logger.warning('In the span') logger.warning('After the span') + +input("...") diff --git a/contrib/opencensus-ext-azure/examples/logs/error.py b/contrib/opencensus-ext-azure/examples/logs/error.py index 772861cf1..f1349226d 100644 --- a/contrib/opencensus-ext-azure/examples/logs/error.py +++ b/contrib/opencensus-ext-azure/examples/logs/error.py @@ -32,3 +32,4 @@ def main(): if __name__ == '__main__': main() + input("...") diff --git a/contrib/opencensus-ext-azure/examples/logs/event.py b/contrib/opencensus-ext-azure/examples/logs/event.py new file mode 100644 index 000000000..d62f82468 --- /dev/null +++ b/contrib/opencensus-ext-azure/examples/logs/event.py @@ -0,0 +1,27 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from opencensus.ext.azure.log_exporter import AzureEventHandler + +logger = logging.getLogger(__name__) +# TODO: you need to specify the instrumentation key in a connection string +# and place it in the APPLICATIONINSIGHTS_CONNECTION_STRING +# environment variable. +logger.addHandler(AzureEventHandler()) +logger.setLevel(logging.INFO) +logger.info('Hello, World!') + +input("...") diff --git a/contrib/opencensus-ext-azure/examples/logs/properties.py b/contrib/opencensus-ext-azure/examples/logs/properties.py index 5cfdd3568..f00b63b77 100644 --- a/contrib/opencensus-ext-azure/examples/logs/properties.py +++ b/contrib/opencensus-ext-azure/examples/logs/properties.py @@ -32,3 +32,5 @@ result = 1 / 0 # generate a ZeroDivisionError except Exception: logger.exception('Captured an exception.', extra=properties) + +input("...") diff --git a/contrib/opencensus-ext-azure/examples/logs/simple.py b/contrib/opencensus-ext-azure/examples/logs/simple.py index 1fba3d668..87031ed07 100644 --- a/contrib/opencensus-ext-azure/examples/logs/simple.py +++ b/contrib/opencensus-ext-azure/examples/logs/simple.py @@ -22,3 +22,5 @@ # environment variable. logger.addHandler(AzureLogHandler()) logger.warning('Hello, World!') + +input("...") diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py index 60c42e566..a83dec8c7 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py @@ -24,6 +24,7 @@ from opencensus.ext.azure.common.protocol import ( Data, Envelope, + Event, ExceptionData, Message, ) @@ -33,17 +34,51 @@ logger = logging.getLogger(__name__) -__all__ = ['AzureLogHandler'] +__all__ = ['AzureEventHandler', 'AzureLogHandler'] class BaseLogHandler(logging.Handler): - def __init__(self): + + def __init__(self, **options): super(BaseLogHandler, self).__init__() + self.options = Options(**options) + utils.validate_instrumentation_key(self.options.instrumentation_key) + if not 0 <= self.options.logging_sampling_rate <= 1: + raise ValueError('Sampling must be in the range: [0,1]') + self.export_interval = self.options.export_interval + self.max_batch_size = self.options.max_batch_size + self.storage = LocalFileStorage( + path=self.options.storage_path, + max_size=self.options.storage_max_size, + maintenance_period=self.options.storage_maintenance_period, + retention_period=self.options.storage_retention_period, + ) + self._telemetry_processors = [] + self.addFilter(SamplingFilter(self.options.logging_sampling_rate)) self._queue = Queue(capacity=8192) # TODO: make this configurable self._worker = Worker(self._queue, self) self._worker.start() + def _export(self, batch, event=None): # pragma: NO COVER + try: + if batch: + envelopes = [self.log_record_to_envelope(x) for x in batch] + envelopes = self.apply_telemetry_processors(envelopes) + result = self._transmit(envelopes) + if result > 0: + self.storage.put(envelopes, result) + if event: + if isinstance(event, QueueExitEvent): + self._transmit_from_storage() # send files before exit + return + if len(batch) < self.options.max_batch_size: + self._transmit_from_storage() + finally: + if event: + event.set() + def close(self): + self.storage.close() self._worker.stop() def createLock(self): @@ -52,14 +87,7 @@ def createLock(self): def emit(self, record): self._queue.put(record, block=False) - def _export(self, batch, event=None): - try: - return self.export(batch) - finally: - if event: - event.set() - - def export(self, batch): + def log_record_to_envelope(self, record): raise NotImplementedError # pragma: NO COVER def flush(self, timeout=None): @@ -121,66 +149,11 @@ def filter(self, record): class AzureLogHandler(TransportMixin, ProcessorMixin, BaseLogHandler): - """Handler for logging to Microsoft Azure Monitor. - - :param options: Options for the log handler. - """ - - def __init__(self, **options): - self.options = Options(**options) - utils.validate_instrumentation_key(self.options.instrumentation_key) - if not 0 <= self.options.logging_sampling_rate <= 1: - raise ValueError('Sampling must be in the range: [0,1]') - self.export_interval = self.options.export_interval - self.max_batch_size = self.options.max_batch_size - self.storage = LocalFileStorage( - path=self.options.storage_path, - max_size=self.options.storage_max_size, - maintenance_period=self.options.storage_maintenance_period, - retention_period=self.options.storage_retention_period, - ) - self._telemetry_processors = [] - super(AzureLogHandler, self).__init__() - self.addFilter(SamplingFilter(self.options.logging_sampling_rate)) - - def close(self): - self.storage.close() - super(AzureLogHandler, self).close() - - def _export(self, batch, event=None): # pragma: NO COVER - try: - if batch: - envelopes = [self.log_record_to_envelope(x) for x in batch] - envelopes = self.apply_telemetry_processors(envelopes) - result = self._transmit(envelopes) - if result > 0: - self.storage.put(envelopes, result) - if event: - if isinstance(event, QueueExitEvent): - self._transmit_from_storage() # send files before exit - return - if len(batch) < self.options.max_batch_size: - self._transmit_from_storage() - finally: - if event: - event.set() + """Handler for logging to Microsoft Azure Monitor.""" def log_record_to_envelope(self, record): - envelope = Envelope( - iKey=self.options.instrumentation_key, - tags=dict(utils.azure_monitor_context), - time=utils.timestamp_to_iso_str(record.created), - ) + envelope = create_envelope(self.options.instrumentation_key, record) - envelope.tags['ai.operation.id'] = getattr( - record, - 'traceId', - '00000000000000000000000000000000', - ) - envelope.tags['ai.operation.parentId'] = '|{}.{}.'.format( - envelope.tags['ai.operation.id'], - getattr(record, 'spanId', '0000000000000000'), - ) properties = { 'process': record.processName, 'module': record.module, @@ -188,7 +161,6 @@ def log_record_to_envelope(self, record): 'lineNumber': record.lineno, 'level': record.levelname, } - if (hasattr(record, 'custom_dimensions') and isinstance(record.custom_dimensions, dict)): properties.update(record.custom_dimensions) @@ -230,3 +202,43 @@ def log_record_to_envelope(self, record): ) envelope.data = Data(baseData=data, baseType='MessageData') return envelope + + +class AzureEventHandler(TransportMixin, ProcessorMixin, BaseLogHandler): + """Handler for sending custom events to Microsoft Azure Monitor.""" + + def log_record_to_envelope(self, record): + envelope = create_envelope(self.options.instrumentation_key, record) + + properties = {} + if (hasattr(record, 'custom_dimensions') and + isinstance(record.custom_dimensions, dict)): + properties.update(record.custom_dimensions) + + envelope.name = 'Microsoft.ApplicationInsights.Event' + data = Event( + name=self.format(record), + properties=properties, + ) + envelope.data = Data(baseData=data, baseType='EventData') + + return envelope + + +def create_envelope(instrumentation_key, record): + envelope = Envelope( + iKey=instrumentation_key, + tags=dict(utils.azure_monitor_context), + time=utils.timestamp_to_iso_str(record.created), + ) + envelope.tags['ai.operation.id'] = getattr( + record, + 'traceId', + '00000000000000000000000000000000', + ) + envelope.tags['ai.operation.parentId'] = '|{}.{}.'.format( + envelope.tags['ai.operation.id'], + getattr(record, 'spanId', '0000000000000000'), + ) + + return envelope diff --git a/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py b/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py index 88b8c436b..5db155e54 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py @@ -43,7 +43,9 @@ def __init__(self, max_batch_size, callback): self.export_interval = 1 self.max_batch_size = max_batch_size self.callback = callback - super(CustomLogHandler, self).__init__() + super(CustomLogHandler, self).__init__( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + ) def export(self, batch): return self.callback(batch) @@ -259,3 +261,194 @@ def test_log_record_not_sampled(self, requests_mock): logger.warning('Hello_World4') handler.close() self.assertFalse(requests_mock.called) + + +class TestAzureEventHandler(unittest.TestCase): + def test_ctor(self): + from opencensus.ext.azure.common import Options + instrumentation_key = Options._default.instrumentation_key + Options._default.instrumentation_key = None + self.assertRaises(ValueError, lambda: log_exporter.AzureEventHandler()) + Options._default.instrumentation_key = instrumentation_key + + def test_invalid_sampling_rate(self): + with self.assertRaises(ValueError): + log_exporter.AzureEventHandler( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + logging_sampling_rate=4.0, + ) + + def test_init_handler_with_proxies(self): + handler = log_exporter.AzureEventHandler( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + proxies='{"https":"https://test-proxy.com"}', + ) + + self.assertEqual( + handler.options.proxies, + '{"https":"https://test-proxy.com"}', + ) + + @mock.patch('requests.post', return_value=mock.Mock()) + def test_exception(self, requests_mock): + logger = logging.getLogger(self.id()) + handler = log_exporter.AzureEventHandler( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + storage_path=os.path.join(TEST_FOLDER, self.id()), + ) + logger.addHandler(handler) + try: + return 1 / 0 # generate a ZeroDivisionError + except Exception: + logger.exception('Captured an exception.') + handler.close() + self.assertEqual(len(requests_mock.call_args_list), 1) + post_body = requests_mock.call_args_list[0][1]['data'] + self.assertTrue('ZeroDivisionError' in post_body) + + @mock.patch('requests.post', return_value=mock.Mock()) + def test_exception_with_custom_properties(self, requests_mock): + logger = logging.getLogger(self.id()) + handler = log_exporter.AzureEventHandler( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + storage_path=os.path.join(TEST_FOLDER, self.id()), + ) + logger.addHandler(handler) + try: + return 1 / 0 # generate a ZeroDivisionError + except Exception: + properties = { + 'custom_dimensions': + { + 'key_1': 'value_1', + 'key_2': 'value_2' + } + } + logger.exception('Captured an exception.', extra=properties) + handler.close() + self.assertEqual(len(requests_mock.call_args_list), 1) + post_body = requests_mock.call_args_list[0][1]['data'] + self.assertTrue('ZeroDivisionError' in post_body) + self.assertTrue('key_1' in post_body) + self.assertTrue('key_2' in post_body) + + @mock.patch('requests.post', return_value=mock.Mock()) + def test_export_empty(self, request_mock): + handler = log_exporter.AzureEventHandler( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + storage_path=os.path.join(TEST_FOLDER, self.id()), + ) + handler._export([]) + self.assertEqual(len(os.listdir(handler.storage.path)), 0) + handler.close() + + @mock.patch('opencensus.ext.azure.log_exporter' + '.AzureEventHandler.log_record_to_envelope') + def test_export_failure(self, log_record_to_envelope_mock): + log_record_to_envelope_mock.return_value = ['bar'] + handler = log_exporter.AzureEventHandler( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + storage_path=os.path.join(TEST_FOLDER, self.id()), + ) + with mock.patch('opencensus.ext.azure.log_exporter' + '.AzureEventHandler._transmit') as transmit: + transmit.return_value = 10 + handler._export(['foo']) + self.assertEqual(len(os.listdir(handler.storage.path)), 1) + self.assertIsNone(handler.storage.get()) + handler.close() + + def test_log_record_to_envelope(self): + handler = log_exporter.AzureEventHandler( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + storage_path=os.path.join(TEST_FOLDER, self.id()), + ) + envelope = handler.log_record_to_envelope(mock.MagicMock( + exc_info=None, + levelno=10, + )) + self.assertEqual( + envelope.iKey, + '12345678-1234-5678-abcd-12345678abcd') + handler.close() + + @mock.patch('requests.post', return_value=mock.Mock()) + def test_log_record_with_custom_properties(self, requests_mock): + logger = logging.getLogger(self.id()) + handler = log_exporter.AzureEventHandler( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + storage_path=os.path.join(TEST_FOLDER, self.id()), + ) + logger.addHandler(handler) + logger.warning('action', extra={ + 'custom_dimensions': + { + 'key_1': 'value_1', + 'key_2': 'value_2' + } + }) + handler.close() + post_body = requests_mock.call_args_list[0][1]['data'] + self.assertTrue('action' in post_body) + self.assertTrue('key_1' in post_body) + self.assertTrue('key_2' in post_body) + + @mock.patch('requests.post', return_value=mock.Mock()) + def test_log_with_invalid_custom_properties(self, requests_mock): + logger = logging.getLogger(self.id()) + handler = log_exporter.AzureEventHandler( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + storage_path=os.path.join(TEST_FOLDER, self.id()), + ) + logger.addHandler(handler) + logger.warning('action_1_%s', None) + logger.warning('action_2_%s', 'arg', extra={ + 'custom_dimensions': 'not_a_dict' + }) + logger.warning('action_3_%s', 'arg', extra={ + 'notcustom_dimensions': {'key_1': 'value_1'} + }) + + handler.close() + self.assertEqual(len(os.listdir(handler.storage.path)), 0) + post_body = requests_mock.call_args_list[0][1]['data'] + self.assertTrue('action_1_' in post_body) + self.assertTrue('action_2_arg' in post_body) + self.assertTrue('action_3_arg' in post_body) + + self.assertFalse('not_a_dict' in post_body) + self.assertFalse('key_1' in post_body) + + @mock.patch('requests.post', return_value=mock.Mock()) + def test_log_record_sampled(self, requests_mock): + logger = logging.getLogger(self.id()) + handler = log_exporter.AzureEventHandler( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + logging_sampling_rate=1.0, + ) + logger.addHandler(handler) + logger.warning('Hello_World') + logger.warning('Hello_World2') + logger.warning('Hello_World3') + logger.warning('Hello_World4') + handler.close() + post_body = requests_mock.call_args_list[0][1]['data'] + self.assertTrue('Hello_World' in post_body) + self.assertTrue('Hello_World2' in post_body) + self.assertTrue('Hello_World3' in post_body) + self.assertTrue('Hello_World4' in post_body) + + @mock.patch('requests.post', return_value=mock.Mock()) + def test_log_record_not_sampled(self, requests_mock): + logger = logging.getLogger(self.id()) + handler = log_exporter.AzureEventHandler( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + logging_sampling_rate=0.0, + ) + logger.addHandler(handler) + logger.warning('Hello_World') + logger.warning('Hello_World2') + logger.warning('Hello_World3') + logger.warning('Hello_World4') + handler.close() + self.assertFalse(requests_mock.called) From 329b214ba11e39f8bc339dccd9e1381a104afea7 Mon Sep 17 00:00:00 2001 From: Ram Rachum Date: Mon, 29 Jun 2020 22:26:29 +0300 Subject: [PATCH 53/79] Use chain.from_iterable in span.py (#911) --- opencensus/trace/span.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opencensus/trace/span.py b/opencensus/trace/span.py index 04aa376e7..4e872c060 100644 --- a/opencensus/trace/span.py +++ b/opencensus/trace/span.py @@ -370,7 +370,7 @@ def finish(self): def __iter__(self): """Iterate through the span tree.""" - for span in chain(*(map(iter, self.children))): + for span in chain.from_iterable(map(iter, self.children)): yield span yield self From b7c7a67d7bcae749c19a3508777522f206da75cf Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Fri, 17 Jul 2020 18:13:59 -0700 Subject: [PATCH 54/79] Attach rate metrics via Heartbeat for Azure exporter (#930) --- .../examples/traces/simple.py | 1 + .../opencensus/ext/azure/common/exporter.py | 4 +- .../opencensus/ext/azure/common/storage.py | 2 + .../ext/azure/log_exporter/__init__.py | 4 + .../ext/azure/metrics_exporter/__init__.py | 15 +- .../heartbeat_metrics/__init__.py | 62 +++++++ .../heartbeat_metrics/heartbeat.py | 64 +++++++ .../ext/azure/trace_exporter/__init__.py | 4 + .../tests/test_azure_heartbeat_metrics.py | 165 ++++++++++++++++++ .../tests/test_stackdriver_stats.py | 9 +- opencensus/common/schedule/__init__.py | 9 +- opencensus/metrics/transport.py | 22 ++- 12 files changed, 352 insertions(+), 9 deletions(-) create mode 100644 contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/__init__.py create mode 100644 contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/heartbeat.py create mode 100644 contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py diff --git a/contrib/opencensus-ext-azure/examples/traces/simple.py b/contrib/opencensus-ext-azure/examples/traces/simple.py index b0008f464..2c2ce6e19 100644 --- a/contrib/opencensus-ext-azure/examples/traces/simple.py +++ b/contrib/opencensus-ext-azure/examples/traces/simple.py @@ -23,3 +23,4 @@ with tracer.span(name='foo'): print('Hello, World!') +input(...) diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/exporter.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/exporter.py index 3e67e0eb1..fddf8e1a2 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/exporter.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/exporter.py @@ -61,7 +61,9 @@ def __init__(self, src, dst): self.src = src self.dst = dst self._stopping = False - super(Worker, self).__init__() + super(Worker, self).__init__( + name="AzureExporter Worker" + ) def run(self): # pragma: NO COVER # Indicate that this thread is an exporter thread. diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/storage.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/storage.py index 304b1f5e3..b36269792 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/storage.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/storage.py @@ -81,6 +81,7 @@ def __init__( maintenance_period=60, # 1 minute retention_period=7*24*60*60, # 7 days write_timeout=60, # 1 minute + source=None, ): self.path = os.path.abspath(path) self.max_size = max_size @@ -92,6 +93,7 @@ def __init__( self._maintenance_task = PeriodicTask( interval=self.maintenance_period, function=self._maintenance_routine, + name='{} Storage Worker'.format(source) ) self._maintenance_task.daemon = True self._maintenance_task.start() diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py index a83dec8c7..da4afe178 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py @@ -30,6 +30,7 @@ ) from opencensus.ext.azure.common.storage import LocalFileStorage from opencensus.ext.azure.common.transport import TransportMixin +from opencensus.ext.azure.metrics_exporter import heartbeat_metrics from opencensus.trace import execution_context logger = logging.getLogger(__name__) @@ -52,12 +53,15 @@ def __init__(self, **options): max_size=self.options.storage_max_size, maintenance_period=self.options.storage_maintenance_period, retention_period=self.options.storage_retention_period, + source=self.__class__.__name__, ) self._telemetry_processors = [] self.addFilter(SamplingFilter(self.options.logging_sampling_rate)) self._queue = Queue(capacity=8192) # TODO: make this configurable self._worker = Worker(self._queue, self) self._worker.start() + heartbeat_metrics.enable_heartbeat_metrics( + self.options.connection_string, self.options.instrumentation_key) def _export(self, batch, event=None): # pragma: NO COVER try: diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py index a5ba2a4ec..beec26d07 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py @@ -52,7 +52,9 @@ def __init__(self, **options): max_size=self.options.storage_max_size, maintenance_period=self.options.storage_maintenance_period, retention_period=self.options.storage_retention_period, + source=self.__class__.__name__, ) + self._atexit_handler = atexit.register(self.shutdown) super(MetricsExporter, self).__init__() def export_metrics(self, metrics): @@ -133,6 +135,13 @@ def _create_envelope(self, data_point, timestamp, properties): envelope.data = Data(baseData=data, baseType="MetricData") return envelope + def shutdown(self): + # flush metrics on exit + self.export_metrics(stats_module.stats.get_metrics()) + if self._atexit_handler is not None: + atexit.unregister(self._atexit_handler) + self._atexit_handler = None + def new_metrics_exporter(**options): exporter = MetricsExporter(**options) @@ -142,5 +151,9 @@ def new_metrics_exporter(**options): transport.get_exporter_thread(producers, exporter, interval=exporter.options.export_interval) - atexit.register(exporter.export_metrics, stats_module.stats.get_metrics()) + from opencensus.ext.azure.metrics_exporter import heartbeat_metrics + heartbeat_metrics.enable_heartbeat_metrics( + exporter.options.connection_string, + exporter.options.instrumentation_key + ) return exporter diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/__init__.py new file mode 100644 index 000000000..51c59a7cf --- /dev/null +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/__init__.py @@ -0,0 +1,62 @@ +# Copyright 2020, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import threading + +from opencensus.ext.azure.metrics_exporter import MetricsExporter +from opencensus.ext.azure.metrics_exporter.heartbeat_metrics.heartbeat import ( + HeartbeatMetric, +) +from opencensus.metrics import transport +from opencensus.metrics.export.gauge import Registry +from opencensus.metrics.export.metric_producer import MetricProducer + +_HEARTBEAT_METRICS = None +_HEARTBEAT_LOCK = threading.Lock() + + +def enable_heartbeat_metrics(connection_string, ikey): + with _HEARTBEAT_LOCK: + # Only start heartbeat if did not exist before + global _HEARTBEAT_METRICS # pylint: disable=global-statement + if _HEARTBEAT_METRICS is None: + exporter = MetricsExporter( + connection_string=connection_string, + instrumentation_key=ikey, + export_interval=900.0, # Send every 15 minutes + ) + producer = AzureHeartbeatMetricsProducer() + _HEARTBEAT_METRICS = producer + transport.get_exporter_thread([_HEARTBEAT_METRICS], + exporter, + exporter.options.export_interval) + + +def register_metrics(): + registry = Registry() + metric = HeartbeatMetric() + registry.add_gauge(metric()) + return registry + + +class AzureHeartbeatMetricsProducer(MetricProducer): + """Implementation of the producer of heartbeat metrics. + + Includes Azure attach rate metrics, implemented using gauges. + """ + def __init__(self): + self.registry = register_metrics() + + def get_metrics(self): + return self.registry.get_metrics() diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/heartbeat.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/heartbeat.py new file mode 100644 index 000000000..d23417585 --- /dev/null +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/heartbeat.py @@ -0,0 +1,64 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import platform +from collections import OrderedDict + +from opencensus.common.version import __version__ as opencensus_version +from opencensus.ext.azure.common.version import __version__ as ext_version +from opencensus.metrics.export.gauge import LongGauge +from opencensus.metrics.label_key import LabelKey +from opencensus.metrics.label_value import LabelValue + + +class HeartbeatMetric: + NAME = "Heartbeat" + + def __init__(self): + self.properties = OrderedDict() + self.properties[LabelKey("sdk", '')] = LabelValue( + 'py{}:oc{}:ext{}'.format( + platform.python_version(), + opencensus_version, + ext_version, + ) + ) + self.properties[LabelKey("osType", '')] = LabelValue(platform.system()) + if os.environ.get("WEBSITE_SITE_NAME") is not None: + # Web apps + self.properties[LabelKey("appSrv_SiteName", '')] = \ + LabelValue(os.environ.get("WEBSITE_SITE_NAME")) + self.properties[LabelKey("appSrv_wsStamp", '')] = \ + LabelValue(os.environ.get("WEBSITE_HOME_STAMPNAME", '')) + self.properties[LabelKey("appSrv_wsHost", '')] = \ + LabelValue(os.environ.get("WEBSITE_HOSTNAME", '')) + elif os.environ.get("FUNCTIONS_WORKER_RUNTIME") is not None: + # Function apps + self.properties[LabelKey("azfunction_appId", '')] = \ + LabelValue(os.environ.get("WEBSITE_HOSTNAME")) + + def __call__(self): + """ Returns a derived gauge for the heartbeat metric. + + :rtype: :class:`opencensus.metrics.export.gauge.LongGauge` + :return: The gauge representing the heartbeat metric + """ + gauge = LongGauge( + HeartbeatMetric.NAME, + 'Heartbeat metric with custom dimensions', + 'count', + list(self.properties.keys())) + gauge.get_or_create_time_series(list(self.properties.values())) + return gauge diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py index ce904e673..7583b8c04 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py @@ -26,6 +26,7 @@ ) from opencensus.ext.azure.common.storage import LocalFileStorage from opencensus.ext.azure.common.transport import TransportMixin +from opencensus.ext.azure.metrics_exporter import heartbeat_metrics from opencensus.trace.span import SpanKind try: @@ -52,9 +53,12 @@ def __init__(self, **options): max_size=self.options.storage_max_size, maintenance_period=self.options.storage_maintenance_period, retention_period=self.options.storage_retention_period, + source=self.__class__.__name__, ) self._telemetry_processors = [] super(AzureExporter, self).__init__(**options) + heartbeat_metrics.enable_heartbeat_metrics( + self.options.connection_string, self.options.instrumentation_key) def span_data_to_envelope(self, sd): envelope = Envelope( diff --git a/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py b/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py new file mode 100644 index 000000000..f3f3b11f4 --- /dev/null +++ b/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py @@ -0,0 +1,165 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import platform +import unittest + +import mock + +from opencensus.common.version import __version__ as opencensus_version +from opencensus.ext.azure.common.version import __version__ as ext_version +from opencensus.ext.azure.metrics_exporter import heartbeat_metrics + + +class TestHeartbeatMetrics(unittest.TestCase): + def setUp(self): + # pylint: disable=protected-access + heartbeat_metrics._HEARTBEAT_METRICS = None + + @mock.patch('opencensus.ext.azure.metrics_exporter' + '.heartbeat_metrics.register_metrics') + def test_producer_ctor(self, avail_mock): + heartbeat_metrics.AzureHeartbeatMetricsProducer() + + self.assertEqual(len(avail_mock.call_args_list), 1) + + def test_producer_get_metrics(self): + producer = heartbeat_metrics.AzureHeartbeatMetricsProducer() + metrics = producer.get_metrics() + + self.assertEqual(len(metrics), 1) + + def test_register_metrics(self): + registry = heartbeat_metrics.register_metrics() + + self.assertEqual(len(registry.get_metrics()), 1) + + @mock.patch('opencensus.metrics.transport.get_exporter_thread') + def test_enable_heartbeat_metrics(self, transport_mock): + ikey = '12345678-1234-5678-abcd-12345678abcd' + # pylint: disable=protected-access + self.assertIsNone(heartbeat_metrics._HEARTBEAT_METRICS) + heartbeat_metrics.enable_heartbeat_metrics(None, ikey) + self.assertTrue( + isinstance( + heartbeat_metrics._HEARTBEAT_METRICS, + heartbeat_metrics.AzureHeartbeatMetricsProducer + ) + ) + transport_mock.assert_called() + + @mock.patch('opencensus.metrics.transport.get_exporter_thread') + def test_enable_heartbeat_metrics_exits(self, transport_mock): + # pylint: disable=protected-access + producer = heartbeat_metrics.AzureHeartbeatMetricsProducer() + heartbeat_metrics._HEARTBEAT_METRICS = producer + heartbeat_metrics.enable_heartbeat_metrics(None, None) + self.assertEqual(heartbeat_metrics._HEARTBEAT_METRICS, producer) + transport_mock.assert_not_called() + + def test_heartbeat_metric_init(self): + metric = heartbeat_metrics.HeartbeatMetric() + + self.assertEqual(metric.NAME, 'Heartbeat') + keys = list(metric.properties.keys()) + values = list(metric.properties.values()) + self.assertEqual(len(keys), 2) + self.assertEqual(len(keys), len(values)) + self.assertEqual(keys[0].key, "sdk") + self.assertEqual(keys[1].key, "osType") + self.assertEqual(values[0].value, 'py{}:oc{}:ext{}'.format( + platform.python_version(), + opencensus_version, + ext_version, + )) + self.assertEqual(values[1].value, platform.system()) + + @mock.patch.dict( + os.environ, + { + "WEBSITE_SITE_NAME": "site_name", + "WEBSITE_HOME_STAMPNAME": "stamp_name", + "WEBSITE_HOSTNAME": "host_name", + } + ) + def test_heartbeat_metric_init_webapp(self): + metric = heartbeat_metrics.HeartbeatMetric() + + self.assertEqual(metric.NAME, 'Heartbeat') + keys = list(metric.properties.keys()) + values = list(metric.properties.values()) + self.assertEqual(len(keys), 5) + self.assertEqual(len(keys), len(values)) + self.assertEqual(keys[0].key, "sdk") + self.assertEqual(keys[1].key, "osType") + self.assertEqual(values[0].value, 'py{}:oc{}:ext{}'.format( + platform.python_version(), + opencensus_version, + ext_version, + )) + self.assertEqual(values[1].value, platform.system()) + self.assertEqual(keys[2].key, "appSrv_SiteName") + self.assertEqual(keys[3].key, "appSrv_wsStamp") + self.assertEqual(keys[4].key, "appSrv_wsHost") + self.assertEqual(values[2].value, "site_name") + self.assertEqual(values[3].value, "stamp_name") + self.assertEqual(values[4].value, "host_name") + + @mock.patch.dict( + os.environ, + { + "FUNCTIONS_WORKER_RUNTIME": "python", + "WEBSITE_HOSTNAME": "host_name", + } + ) + def test_heartbeat_metric_init_functionapp(self): + metric = heartbeat_metrics.HeartbeatMetric() + + self.assertEqual(metric.NAME, 'Heartbeat') + keys = list(metric.properties.keys()) + values = list(metric.properties.values()) + self.assertEqual(len(keys), 3) + self.assertEqual(len(keys), len(values)) + self.assertEqual(keys[0].key, "sdk") + self.assertEqual(keys[1].key, "osType") + self.assertEqual(values[0].value, 'py{}:oc{}:ext{}'.format( + platform.python_version(), + opencensus_version, + ext_version, + )) + self.assertEqual(values[1].value, platform.system()) + self.assertEqual(keys[2].key, "azfunction_appId") + self.assertEqual(values[2].value, "host_name") + + def test_heartbeat_metric(self): + # pylint: disable=protected-access + metric = heartbeat_metrics.HeartbeatMetric() + gauge = metric() + + self.assertEqual(gauge.descriptor.name, 'Heartbeat') + self.assertEqual( + gauge.descriptor.description, + 'Heartbeat metric with custom dimensions' + ) + self.assertEqual(gauge.descriptor.unit, 'count') + self.assertEqual(gauge.descriptor._type, 1) + self.assertEqual( + gauge.descriptor.label_keys, + list(metric.properties.keys()) + ) + self.assertEqual( + gauge._len_label_keys, + len(metric.properties.keys()) + ) diff --git a/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_stats.py b/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_stats.py index d301a0d15..7bc81fd3e 100644 --- a/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_stats.py +++ b/contrib/opencensus-ext-stackdriver/tests/test_stackdriver_stats.py @@ -542,7 +542,14 @@ class MockPeriodicMetricTask(object): Simulate calling export asynchronously from another thread synchronously from this one. """ - def __init__(self, interval=None, function=None, args=None, kwargs=None): + def __init__( + self, + interval=None, + function=None, + args=None, + kwargs=None, + name=None + ): self.function = function self.logger = mock.Mock() self.start = mock.Mock() diff --git a/opencensus/common/schedule/__init__.py b/opencensus/common/schedule/__init__.py index 719d89c25..faa1db1d5 100644 --- a/opencensus/common/schedule/__init__.py +++ b/opencensus/common/schedule/__init__.py @@ -31,11 +31,14 @@ class PeriodicTask(threading.Thread): :param args: The args passed in while calling `function`. :type kwargs: dict - :param args: The kwargs passed in while calling `function`. + :param kwargs: The kwargs passed in while calling `function`. + + :type name: str + :param name: The source of the worker. Used for naming. """ - def __init__(self, interval, function, args=None, kwargs=None): - super(PeriodicTask, self).__init__() + def __init__(self, interval, function, args=None, kwargs=None, name=None): + super(PeriodicTask, self).__init__(name=name) self.interval = interval self.function = function self.args = args or [] diff --git a/opencensus/metrics/transport.py b/opencensus/metrics/transport.py index 74d78fc87..14eae5efc 100644 --- a/opencensus/metrics/transport.py +++ b/opencensus/metrics/transport.py @@ -43,11 +43,21 @@ class PeriodicMetricTask(PeriodicTask): :type kwargs: dict :param args: The kwargs passed in while calling `function`. + + :type name: str + :param name: The source of the worker. Used for naming. """ daemon = True - def __init__(self, interval=None, function=None, args=None, kwargs=None): + def __init__( + self, + interval=None, + function=None, + args=None, + kwargs=None, + name=None + ): if interval is None: interval = DEFAULT_INTERVAL @@ -62,7 +72,9 @@ def func(*aa, **kw): except Exception: logger.exception("Error handling metric export") - super(PeriodicMetricTask, self).__init__(interval, func, args, kwargs) + super(PeriodicMetricTask, self).__init__( + interval, func, args, kwargs, '{} Worker'.format(name) + ) def run(self): # Indicate that this thread is an exporter thread. @@ -112,6 +124,10 @@ def export_all(): export(itertools.chain(*all_gets)) - tt = PeriodicMetricTask(interval, export_all) + tt = PeriodicMetricTask( + interval, + export_all, + name=exporter.__class__.__name__ + ) tt.start() return tt From a4496ddaf59ffa3ec7cda806e56dcc320bf2d2df Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Thu, 23 Jul 2020 09:25:32 -0700 Subject: [PATCH 55/79] Attach rate metrics for VM (#935) --- CHANGELOG.md | 10 ++ context/opencensus-context/CHANGELOG.md | 7 + contrib/opencensus-ext-azure/CHANGELOG.md | 11 +- .../heartbeat_metrics/__init__.py | 12 +- .../heartbeat_metrics/heartbeat.py | 79 ++++++++-- .../tests/test_azure_heartbeat_metrics.py | 145 ++++++++++++++---- .../opencensus-ext-stackdriver/CHANGELOG.md | 3 + 7 files changed, 212 insertions(+), 55 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 34c6e56fb..016b82da2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,16 @@ ## Unreleased +## 0.7.10 +Released 2020-06-29 + +- Updated `azure` module +([#903](https://github.com/census-instrumentation/opencensus-python/pull/903), + [#925](https://github.com/census-instrumentation/opencensus-python/pull/925)) + +- Updated `stackdriver` module +([#919](https://github.com/census-instrumentation/opencensus-python/pull/919)) + ## 0.7.9 Released 2020-06-17 diff --git a/context/opencensus-context/CHANGELOG.md b/context/opencensus-context/CHANGELOG.md index a470e2320..a49592ac1 100644 --- a/context/opencensus-context/CHANGELOG.md +++ b/context/opencensus-context/CHANGELOG.md @@ -2,6 +2,13 @@ ## Unreleased +## opencensus-ext-context 0.1.2 + +## 0.1.2 +Released 2020-06-29 + +- Release source distribution + ## 0.1.1 Released 2019-05-31 diff --git a/contrib/opencensus-ext-azure/CHANGELOG.md b/contrib/opencensus-ext-azure/CHANGELOG.md index ce9f4a741..f7a69a9b1 100644 --- a/contrib/opencensus-ext-azure/CHANGELOG.md +++ b/contrib/opencensus-ext-azure/CHANGELOG.md @@ -2,8 +2,18 @@ ## Unreleased +- Attach rate metrics via Heartbeat for Web and Function apps + ([#930](https://github.com/census-instrumentation/opencensus-python/pull/930)) +- Attach rate metrics for VM + ([#935](https://github.com/census-instrumentation/opencensus-python/pull/935)) + +## 1.0.4 +Released 2020-06-29 + - Remove dependency rate from standard metrics ([#903](https://github.com/census-instrumentation/opencensus-python/pull/903)) +- Implement customEvents using AzureEventHandler + ([#925](https://github.com/census-instrumentation/opencensus-python/pull/925)) ## 1.0.3 Released 2020-06-17 @@ -13,7 +23,6 @@ Released 2020-06-17 - Add support to initialize azure exporters with proxies ([#902](https://github.com/census-instrumentation/opencensus-python/pull/902)) - ## 1.0.2 Released 2020-02-04 diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/__init__.py index 51c59a7cf..b6cc12ede 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/__init__.py @@ -19,7 +19,6 @@ HeartbeatMetric, ) from opencensus.metrics import transport -from opencensus.metrics.export.gauge import Registry from opencensus.metrics.export.metric_producer import MetricProducer _HEARTBEAT_METRICS = None @@ -43,20 +42,13 @@ def enable_heartbeat_metrics(connection_string, ikey): exporter.options.export_interval) -def register_metrics(): - registry = Registry() - metric = HeartbeatMetric() - registry.add_gauge(metric()) - return registry - - class AzureHeartbeatMetricsProducer(MetricProducer): """Implementation of the producer of heartbeat metrics. Includes Azure attach rate metrics, implemented using gauges. """ def __init__(self): - self.registry = register_metrics() + self._heartbeat = HeartbeatMetric() def get_metrics(self): - return self.registry.get_metrics() + return self._heartbeat.get_metrics() diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/heartbeat.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/heartbeat.py index d23417585..12bbe8124 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/heartbeat.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/heartbeat.py @@ -12,22 +12,60 @@ # See the License for the specific language governing permissions and # limitations under the License. +import datetime +import json import os import platform from collections import OrderedDict +import requests + from opencensus.common.version import __version__ as opencensus_version from opencensus.ext.azure.common.version import __version__ as ext_version from opencensus.metrics.export.gauge import LongGauge from opencensus.metrics.label_key import LabelKey from opencensus.metrics.label_value import LabelValue +_AIMS_URI = "http://169.254.169.254/metadata/instance/compute" +_AIMS_API_VERSION = "api-version=2017-12-01" +_AIMS_FORMAT = "format=json" + class HeartbeatMetric: NAME = "Heartbeat" def __init__(self): + self.vm_data = {} + self.is_vm = False self.properties = OrderedDict() + self.update_properties() + self.heartbeat = LongGauge( + HeartbeatMetric.NAME, + 'Heartbeat metric with custom dimensions', + 'count', + list(self.properties.keys()), + ) + self.heartbeat.get_or_create_time_series( + list(self.properties.values()) + ) + + def get_metrics(self): + if self.is_vm: + # Only need to update if in vm (properties could change) + self.properties.clear() + self.update_properties() + self.heartbeat = LongGauge( + HeartbeatMetric.NAME, + 'Heartbeat metric with custom dimensions', + 'count', + list(self.properties.keys()), + ) + self.heartbeat.get_or_create_time_series( + list(self.properties.values()) + ) + return [self.heartbeat.get_metric(datetime.datetime.utcnow())] + + def update_properties(self): self.properties[LabelKey("sdk", '')] = LabelValue( 'py{}:oc{}:ext{}'.format( platform.python_version(), @@ -48,17 +86,34 @@ def __init__(self): # Function apps self.properties[LabelKey("azfunction_appId", '')] = \ LabelValue(os.environ.get("WEBSITE_HOSTNAME")) + elif self.get_azure_compute_metadata(): + # VM + if self.vm_data: + self.properties[LabelKey("azInst_vmId", '')] = \ + LabelValue(self.vm_data.get("vmId", '')) + self.properties[LabelKey("azInst_subscriptionId", '')] = \ + LabelValue(self.vm_data.get("subscriptionId", '')) + self.properties[LabelKey("azInst_osType", '')] = \ + LabelValue(self.vm_data.get("osType", '')) - def __call__(self): - """ Returns a derived gauge for the heartbeat metric. + def get_azure_compute_metadata(self): + try: + request_url = "{0}?{1}&{2}".format( + _AIMS_URI, _AIMS_API_VERSION, _AIMS_FORMAT) + response = requests.get(request_url, headers={"MetaData": "True"}) + except requests.exceptions.ConnectionError: + # Not in VM + self.is_vm = False + return False + except requests.exceptions.RequestException: + pass # retry - :rtype: :class:`opencensus.metrics.export.gauge.LongGauge` - :return: The gauge representing the heartbeat metric - """ - gauge = LongGauge( - HeartbeatMetric.NAME, - 'Heartbeat metric with custom dimensions', - 'count', - list(self.properties.keys())) - gauge.get_or_create_time_series(list(self.properties.values())) - return gauge + self.is_vm = True + try: + text = response.text + self.vm_data = json.loads(text) + except Exception: # pylint: disable=broad-except + # Error in reading response body, retry + pass + + return True diff --git a/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py b/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py index f3f3b11f4..c86f48446 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py @@ -12,28 +12,46 @@ # See the License for the specific language governing permissions and # limitations under the License. +import json import os import platform import unittest import mock +import requests from opencensus.common.version import __version__ as opencensus_version from opencensus.ext.azure.common.version import __version__ as ext_version from opencensus.ext.azure.metrics_exporter import heartbeat_metrics +class MockResponse(object): + def __init__(self, status_code, text): + self.status_code = status_code + self.text = text + + +def throw(exc_type, *args, **kwargs): + def func(*_args, **_kwargs): + raise exc_type(*args, **kwargs) + return func + + class TestHeartbeatMetrics(unittest.TestCase): def setUp(self): # pylint: disable=protected-access heartbeat_metrics._HEARTBEAT_METRICS = None - @mock.patch('opencensus.ext.azure.metrics_exporter' - '.heartbeat_metrics.register_metrics') - def test_producer_ctor(self, avail_mock): - heartbeat_metrics.AzureHeartbeatMetricsProducer() - - self.assertEqual(len(avail_mock.call_args_list), 1) + def test_producer_ctor(self): + producer = heartbeat_metrics.AzureHeartbeatMetricsProducer() + # pylint: disable=protected-access + metric = producer._heartbeat + self.assertTrue( + isinstance( + metric, + heartbeat_metrics.heartbeat.HeartbeatMetric + ) + ) def test_producer_get_metrics(self): producer = heartbeat_metrics.AzureHeartbeatMetricsProducer() @@ -41,11 +59,6 @@ def test_producer_get_metrics(self): self.assertEqual(len(metrics), 1) - def test_register_metrics(self): - registry = heartbeat_metrics.register_metrics() - - self.assertEqual(len(registry.get_metrics()), 1) - @mock.patch('opencensus.metrics.transport.get_exporter_thread') def test_enable_heartbeat_metrics(self, transport_mock): ikey = '12345678-1234-5678-abcd-12345678abcd' @@ -61,7 +74,7 @@ def test_enable_heartbeat_metrics(self, transport_mock): transport_mock.assert_called() @mock.patch('opencensus.metrics.transport.get_exporter_thread') - def test_enable_heartbeat_metrics_exits(self, transport_mock): + def test_enable_heartbeat_metrics_exists(self, transport_mock): # pylint: disable=protected-access producer = heartbeat_metrics.AzureHeartbeatMetricsProducer() heartbeat_metrics._HEARTBEAT_METRICS = producer @@ -85,6 +98,23 @@ def test_heartbeat_metric_init(self): ext_version, )) self.assertEqual(values[1].value, platform.system()) + gauge = metric.heartbeat + + self.assertEqual(gauge.descriptor.name, 'Heartbeat') + self.assertEqual( + gauge.descriptor.description, + 'Heartbeat metric with custom dimensions' + ) + self.assertEqual(gauge.descriptor.unit, 'count') + self.assertEqual(gauge.descriptor._type, 1) + self.assertEqual( + gauge.descriptor.label_keys, + list(metric.properties.keys()) + ) + self.assertEqual( + gauge._len_label_keys, + len(metric.properties.keys()) + ) @mock.patch.dict( os.environ, @@ -143,23 +173,74 @@ def test_heartbeat_metric_init_functionapp(self): self.assertEqual(keys[2].key, "azfunction_appId") self.assertEqual(values[2].value, "host_name") - def test_heartbeat_metric(self): - # pylint: disable=protected-access - metric = heartbeat_metrics.HeartbeatMetric() - gauge = metric() - - self.assertEqual(gauge.descriptor.name, 'Heartbeat') - self.assertEqual( - gauge.descriptor.description, - 'Heartbeat metric with custom dimensions' - ) - self.assertEqual(gauge.descriptor.unit, 'count') - self.assertEqual(gauge.descriptor._type, 1) - self.assertEqual( - gauge.descriptor.label_keys, - list(metric.properties.keys()) - ) - self.assertEqual( - gauge._len_label_keys, - len(metric.properties.keys()) - ) + def test_heartbeat_metric_init_vm(self): + with mock.patch('requests.get') as get: + get.return_value = MockResponse( + 200, + json.dumps( + { + 'vmId': 5, + 'subscriptionId': 3, + 'osType': 'Linux' + } + ) + ) + metric = heartbeat_metrics.HeartbeatMetric() + self.assertTrue(metric.is_vm) + self.assertEqual(metric.NAME, 'Heartbeat') + keys = list(metric.properties.keys()) + values = list(metric.properties.values()) + self.assertEqual(len(keys), 5) + self.assertEqual(len(keys), len(values)) + self.assertEqual(keys[0].key, "sdk") + self.assertEqual(keys[1].key, "osType") + self.assertEqual(values[0].value, 'py{}:oc{}:ext{}'.format( + platform.python_version(), + opencensus_version, + ext_version, + )) + self.assertEqual(values[1].value, platform.system()) + self.assertEqual(keys[2].key, "azInst_vmId") + self.assertEqual(values[2].value, 5) + self.assertEqual(keys[3].key, "azInst_subscriptionId") + self.assertEqual(values[3].value, 3) + self.assertEqual(keys[4].key, "azInst_osType") + self.assertEqual(values[4].value, "Linux") + + def test_heartbeat_metric_not_vm(self): + with mock.patch( + 'requests.get', + throw(requests.exceptions.ConnectionError) + ): + metric = heartbeat_metrics.HeartbeatMetric() + self.assertFalse(metric.is_vm) + self.assertEqual(metric.NAME, 'Heartbeat') + keys = list(metric.properties.keys()) + self.assertEqual(len(keys), 2) + + def test_heartbeat_metric_vm_error_response(self): + with mock.patch('requests.get') as get: + get.return_value = MockResponse( + 200, + json.dumps( + { + 'vmId': 5, + 'subscriptionId': 3, + 'osType': 'Linux' + } + ) + ) + metric = heartbeat_metrics.HeartbeatMetric() + self.assertTrue(metric.is_vm) + keys = list(metric.properties.keys()) + self.assertEqual(len(keys), 5) + with mock.patch( + 'requests.get', + throw(Exception) + ): + metric.vm_data.clear() + self.assertTrue(metric.is_vm) + self.assertEqual(len(metric.vm_data), 0) + self.assertTrue(metric.is_vm) + keys = list(metric.properties.keys()) + self.assertEqual(len(keys), 5) diff --git a/contrib/opencensus-ext-stackdriver/CHANGELOG.md b/contrib/opencensus-ext-stackdriver/CHANGELOG.md index 98e4c6e6b..8e79c9f15 100644 --- a/contrib/opencensus-ext-stackdriver/CHANGELOG.md +++ b/contrib/opencensus-ext-stackdriver/CHANGELOG.md @@ -2,6 +2,9 @@ ## Unreleased +## 0.7.3 +Released 2020-06-29 + - Add mean property for distribution values ([#919](https://github.com/census-instrumentation/opencensus-python/pull/919)) From b332ae3e7a21bc061b6af5ed13ae516ad9ba91ad Mon Sep 17 00:00:00 2001 From: Guido Tournois Date: Mon, 27 Jul 2020 18:09:17 +0200 Subject: [PATCH 56/79] Change logger to module logger instance, rather than root logger (#938) --- opencensus/stats/measure_to_view_map.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/opencensus/stats/measure_to_view_map.py b/opencensus/stats/measure_to_view_map.py index 47863e328..6eab9b2b2 100644 --- a/opencensus/stats/measure_to_view_map.py +++ b/opencensus/stats/measure_to_view_map.py @@ -19,6 +19,8 @@ from opencensus.stats import metric_utils from opencensus.stats import view_data as view_data_module +logger = logging.getLogger(__name__) + class MeasureToViewMap(object): """Measure To View Map stores a map from names of Measures to @@ -90,13 +92,13 @@ def register_view(self, view, timestamp): # ignore the views that are already registered return else: - logging.warning( + logger.warning( "A different view with the same name is already registered" ) # pragma: NO COVER measure = view.measure registered_measure = self._registered_measures.get(measure.name) if registered_measure is not None and registered_measure != measure: - logging.warning( + logger.warning( "A different measure with the same name is already registered") self._registered_views[view.name] = view if registered_measure is None: From 862885af8081dc3ede50d1df4a28e4400c22e8f8 Mon Sep 17 00:00:00 2001 From: Aravin <34178459+aravinsiva@users.noreply.github.com> Date: Wed, 29 Jul 2020 11:56:41 -0400 Subject: [PATCH 57/79] Changing default transport for stackdriver exporter (#929) --- contrib/opencensus-ext-stackdriver/CHANGELOG.md | 2 ++ contrib/opencensus-ext-stackdriver/README.rst | 11 +++++------ .../ext/stackdriver/trace_exporter/__init__.py | 4 ++-- 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/contrib/opencensus-ext-stackdriver/CHANGELOG.md b/contrib/opencensus-ext-stackdriver/CHANGELOG.md index 8e79c9f15..7dd8184aa 100644 --- a/contrib/opencensus-ext-stackdriver/CHANGELOG.md +++ b/contrib/opencensus-ext-stackdriver/CHANGELOG.md @@ -1,6 +1,8 @@ # Changelog ## Unreleased + - Change default transporter in stackdriver exporter + ([#929](https://github.com/census-instrumentation/opencensus-python/pull/929)) ## 0.7.3 Released 2020-06-29 diff --git a/contrib/opencensus-ext-stackdriver/README.rst b/contrib/opencensus-ext-stackdriver/README.rst index 46b878a15..d938ad12f 100644 --- a/contrib/opencensus-ext-stackdriver/README.rst +++ b/contrib/opencensus-ext-stackdriver/README.rst @@ -35,20 +35,19 @@ This example shows how to report the traces to Stackdriver Trace: pip install google-cloud-trace pipenv install google-cloud-trace -By default, traces are exported synchronously, which introduces latency during -your code's execution. To avoid blocking code execution, you can initialize -your exporter to use a background thread. +By default, traces are exported asynchronously, to reduce latency during +your code's execution. If you would like to export data on the main thread +use the synchronous transporter: -This example shows how to configure OpenCensus to use a background thread: .. code:: python - from opencensus.common.transports.async_ import AsyncTransport + from opencensus.common.transports.sync import SyncTransport from opencensus.ext.stackdriver import trace_exporter as stackdriver_exporter from opencensus.trace import tracer as tracer_module exporter = stackdriver_exporter.StackdriverExporter( - project_id='your_cloud_project', transport=AsyncTransport) + project_id='your_cloud_project', transport=SyncTransport) tracer = tracer_module.Tracer(exporter=exporter) Stats diff --git a/contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/trace_exporter/__init__.py b/contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/trace_exporter/__init__.py index 8e07756cc..cf7fe0d79 100644 --- a/contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/trace_exporter/__init__.py +++ b/contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/trace_exporter/__init__.py @@ -23,7 +23,7 @@ k8s_utils, monitored_resource, ) -from opencensus.common.transports import sync +from opencensus.common.transports.async_ import AsyncTransport from opencensus.common.version import __version__ from opencensus.trace import attributes_helper, base_exporter, span_data from opencensus.trace.attributes import Attributes @@ -180,7 +180,7 @@ class StackdriverExporter(base_exporter.Exporter): """ def __init__(self, client=None, project_id=None, - transport=sync.SyncTransport): + transport=AsyncTransport): # The client will handle the case when project_id is None if client is None: client = Client(project=project_id) From 6754bf4e768353a01cea3a9aea95e7e8b5c8b8fa Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Thu, 30 Jul 2020 02:07:23 -0700 Subject: [PATCH 58/79] Add a timeout to azure meta data service (#939) --- .../metrics_exporter/heartbeat_metrics/heartbeat.py | 5 +++-- .../tests/test_azure_heartbeat_metrics.py | 11 +++++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/heartbeat.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/heartbeat.py index 12bbe8124..8c142210e 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/heartbeat.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/heartbeat.py @@ -100,8 +100,9 @@ def get_azure_compute_metadata(self): try: request_url = "{0}?{1}&{2}".format( _AIMS_URI, _AIMS_API_VERSION, _AIMS_FORMAT) - response = requests.get(request_url, headers={"MetaData": "True"}) - except requests.exceptions.ConnectionError: + response = requests.get( + request_url, headers={"MetaData": "True"}, timeout=5.0) + except (requests.exceptions.ConnectionError, requests.Timeout): # Not in VM self.is_vm = False return False diff --git a/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py b/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py index c86f48446..934e48a6c 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py @@ -218,6 +218,17 @@ def test_heartbeat_metric_not_vm(self): keys = list(metric.properties.keys()) self.assertEqual(len(keys), 2) + def test_heartbeat_metric_not_vm_timeout(self): + with mock.patch( + 'requests.get', + throw(requests.Timeout) + ): + metric = heartbeat_metrics.HeartbeatMetric() + self.assertFalse(metric.is_vm) + self.assertEqual(metric.NAME, 'Heartbeat') + keys = list(metric.properties.keys()) + self.assertEqual(len(keys), 2) + def test_heartbeat_metric_vm_error_response(self): with mock.patch('requests.get') as get: get.return_value = MockResponse( From 9d6a607aaaf62b64dcd77c2850d21736fb659d90 Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Thu, 30 Jul 2020 07:57:12 -0700 Subject: [PATCH 59/79] Add links for trace exporter envelopes (#936) --- contrib/opencensus-ext-azure/CHANGELOG.md | 2 ++ .../examples/traces/config.py | 29 ------------------- .../ext/azure/trace_exporter/__init__.py | 9 +++++- .../tests/test_azure_trace_exporter.py | 17 ++++++++++- 4 files changed, 26 insertions(+), 31 deletions(-) delete mode 100644 contrib/opencensus-ext-azure/examples/traces/config.py diff --git a/contrib/opencensus-ext-azure/CHANGELOG.md b/contrib/opencensus-ext-azure/CHANGELOG.md index f7a69a9b1..7b922c0bb 100644 --- a/contrib/opencensus-ext-azure/CHANGELOG.md +++ b/contrib/opencensus-ext-azure/CHANGELOG.md @@ -6,6 +6,8 @@ ([#930](https://github.com/census-instrumentation/opencensus-python/pull/930)) - Attach rate metrics for VM ([#935](https://github.com/census-instrumentation/opencensus-python/pull/935)) +- Add links in properties for trace exporter envelopes + ([#936](https://github.com/census-instrumentation/opencensus-python/pull/936)) ## 1.0.4 Released 2020-06-29 diff --git a/contrib/opencensus-ext-azure/examples/traces/config.py b/contrib/opencensus-ext-azure/examples/traces/config.py deleted file mode 100644 index c5a9e025a..000000000 --- a/contrib/opencensus-ext-azure/examples/traces/config.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright 2019, OpenCensus Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from opencensus.ext.azure.trace_exporter import AzureExporter -from opencensus.trace.samplers import ProbabilitySampler -from opencensus.trace.tracer import Tracer - -tracer = Tracer( - exporter=AzureExporter( - # TODO: replace the all-zero GUID with your instrumentation key. - connection_string='InstrumentationKey= \ - 00000000-0000-0000-0000-000000000000', - ), - sampler=ProbabilitySampler(rate=1.0), -) - -with tracer.span(name='foo'): - print('Hello, World!') diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py index 7583b8c04..c67e64b87 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import json import logging from opencensus.common.schedule import QueueExitEvent @@ -146,7 +147,13 @@ def span_data_to_envelope(self, sd): else: data.type = 'INPROC' data.success = True - # TODO: links, tracestate, tags + if sd.links: + links = [] + for link in sd.links: + links.append( + {"operation_Id": link.trace_id, "id": link.span_id}) + data.properties["_MS.links"] = json.dumps(links) + # TODO: tracestate, tags for key in sd.attributes: # This removes redundant data from ApplicationInsights if key.startswith('http.'): diff --git a/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py b/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py index 5ff70e234..f1b874d78 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import json import os import shutil import unittest @@ -19,6 +20,7 @@ import mock from opencensus.ext.azure import trace_exporter +from opencensus.trace.link import Link TEST_FOLDER = os.path.abspath('.test.exporter') @@ -141,7 +143,9 @@ def test_span_data_to_envelope(self): start_time='2010-10-24T07:28:38.123456Z', end_time='2010-10-24T07:28:38.234567Z', stack_trace=None, - links=None, + links=[ + Link('6e0c63257de34c90bf9efcd03927272e', '6e0c63257de34c91') + ], status=Status(0), annotations=None, message_events=None, @@ -188,6 +192,17 @@ def test_span_data_to_envelope(self): self.assertEqual( envelope.data.baseType, 'RemoteDependencyData') + json_dict = json.loads( + envelope.data.baseData.properties["_MS.links"] + )[0] + self.assertEqual( + json_dict["id"], + "6e0c63257de34c91", + ) + self.assertEqual( + json_dict["operation_Id"], + "6e0c63257de34c90bf9efcd03927272e", + ) # SpanKind.CLIENT unknown type envelope = exporter.span_data_to_envelope(SpanData( From 67502e512ab073382b4fd12580c3b4534968129b Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Wed, 5 Aug 2020 12:12:04 -0700 Subject: [PATCH 60/79] Azure Monitor metrics exporter atexit fixes (#943) --- CHANGELOG.md | 3 + .../ext/azure/metrics_exporter/__init__.py | 18 +++-- .../heartbeat_metrics/__init__.py | 7 +- .../tests/test_azure_metrics_exporter.py | 77 +++++++++++++------ opencensus/metrics/transport.py | 9 +++ tests/unit/metrics/test_transport.py | 9 +++ 6 files changed, 90 insertions(+), 33 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 016b82da2..14ea915ab 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,9 @@ ## Unreleased +- PeriodicMetricTask flush on exit +([#943](https://github.com/census-instrumentation/opencensus-python/pull/943)) + ## 0.7.10 Released 2020-06-29 diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py index beec26d07..bd523809a 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py @@ -55,6 +55,7 @@ def __init__(self, **options): source=self.__class__.__name__, ) self._atexit_handler = atexit.register(self.shutdown) + self.exporter_thread = None super(MetricsExporter, self).__init__() def export_metrics(self, metrics): @@ -136,11 +137,11 @@ def _create_envelope(self, data_point, timestamp, properties): return envelope def shutdown(self): - # flush metrics on exit - self.export_metrics(stats_module.stats.get_metrics()) - if self._atexit_handler is not None: - atexit.unregister(self._atexit_handler) - self._atexit_handler = None + # Flush the exporter thread + if self.exporter_thread: + self.exporter_thread.close() + # Shutsdown storage worker + self.storage.close() def new_metrics_exporter(**options): @@ -148,9 +149,10 @@ def new_metrics_exporter(**options): producers = [stats_module.stats] if exporter.options.enable_standard_metrics: producers.append(standard_metrics.producer) - transport.get_exporter_thread(producers, - exporter, - interval=exporter.options.export_interval) + exporter.exporter_thread = transport.get_exporter_thread( + producers, + exporter, + interval=exporter.options.export_interval) from opencensus.ext.azure.metrics_exporter import heartbeat_metrics heartbeat_metrics.enable_heartbeat_metrics( exporter.options.connection_string, diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/__init__.py index b6cc12ede..6f13b2709 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/__init__.py @@ -37,9 +37,10 @@ def enable_heartbeat_metrics(connection_string, ikey): ) producer = AzureHeartbeatMetricsProducer() _HEARTBEAT_METRICS = producer - transport.get_exporter_thread([_HEARTBEAT_METRICS], - exporter, - exporter.options.export_interval) + exporter.exporter_thread = \ + transport.get_exporter_thread([_HEARTBEAT_METRICS], + exporter, + exporter.options.export_interval) class AzureHeartbeatMetricsProducer(MetricProducer): diff --git a/contrib/opencensus-ext-azure/tests/test_azure_metrics_exporter.py b/contrib/opencensus-ext-azure/tests/test_azure_metrics_exporter.py index 9c84118f0..497422c17 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_metrics_exporter.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_metrics_exporter.py @@ -186,32 +186,65 @@ def test_create_envelope(self): self.assertTrue('properties' in envelope.data.baseData) self.assertEqual(envelope.data.baseData.properties, properties) + def test_shutdown(self): + mock_thread = mock.Mock() + mock_storage = mock.Mock() + exporter = metrics_exporter.MetricsExporter( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd' + ) + exporter.exporter_thread = mock_thread + exporter.storage = mock_storage + exporter.shutdown() + mock_thread.close.assert_called_once() + mock_storage.close.assert_called_once() + @mock.patch('opencensus.ext.azure.metrics_exporter' '.transport.get_exporter_thread') def test_new_metrics_exporter(self, exporter_mock): - iKey = '12345678-1234-5678-abcd-12345678abcd' - exporter = metrics_exporter.new_metrics_exporter( - instrumentation_key=iKey) - - self.assertEqual(exporter.options.instrumentation_key, iKey) - self.assertEqual(len(exporter_mock.call_args_list), 1) - self.assertEqual(len(exporter_mock.call_args[0][0]), 2) - producer_class = standard_metrics.AzureStandardMetricsProducer - self.assertFalse(isinstance(exporter_mock.call_args[0][0][0], - producer_class)) - self.assertTrue(isinstance(exporter_mock.call_args[0][0][1], - producer_class)) + with mock.patch('opencensus.ext.azure.metrics_exporter' + '.heartbeat_metrics.enable_heartbeat_metrics') as hb: + hb.return_value = None + iKey = '12345678-1234-5678-abcd-12345678abcd' + exporter = metrics_exporter.new_metrics_exporter( + instrumentation_key=iKey) + + self.assertEqual(exporter.options.instrumentation_key, iKey) + self.assertEqual(len(exporter_mock.call_args_list), 1) + self.assertEqual(len(exporter_mock.call_args[0][0]), 2) + producer_class = standard_metrics.AzureStandardMetricsProducer + self.assertFalse(isinstance(exporter_mock.call_args[0][0][0], + producer_class)) + self.assertTrue(isinstance(exporter_mock.call_args[0][0][1], + producer_class)) @mock.patch('opencensus.ext.azure.metrics_exporter' '.transport.get_exporter_thread') def test_new_metrics_exporter_no_standard_metrics(self, exporter_mock): - iKey = '12345678-1234-5678-abcd-12345678abcd' - exporter = metrics_exporter.new_metrics_exporter( - instrumentation_key=iKey, enable_standard_metrics=False) - - self.assertEqual(exporter.options.instrumentation_key, iKey) - self.assertEqual(len(exporter_mock.call_args_list), 1) - self.assertEqual(len(exporter_mock.call_args[0][0]), 1) - producer_class = standard_metrics.AzureStandardMetricsProducer - self.assertFalse(isinstance(exporter_mock.call_args[0][0][0], - producer_class)) + with mock.patch('opencensus.ext.azure.metrics_exporter' + '.heartbeat_metrics.enable_heartbeat_metrics') as hb: + hb.return_value = None + iKey = '12345678-1234-5678-abcd-12345678abcd' + exporter = metrics_exporter.new_metrics_exporter( + instrumentation_key=iKey, enable_standard_metrics=False) + + self.assertEqual(exporter.options.instrumentation_key, iKey) + self.assertEqual(len(exporter_mock.call_args_list), 1) + self.assertEqual(len(exporter_mock.call_args[0][0]), 1) + producer_class = standard_metrics.AzureStandardMetricsProducer + self.assertFalse(isinstance(exporter_mock.call_args[0][0][0], + producer_class)) + + @mock.patch('opencensus.ext.azure.metrics_exporter' + '.transport.get_exporter_thread') + def test_new_metrics_exporter_heartbeat(self, exporter_mock): + with mock.patch('opencensus.ext.azure.metrics_exporter' + '.heartbeat_metrics.enable_heartbeat_metrics') as hb: + iKey = '12345678-1234-5678-abcd-12345678abcd' + exporter = metrics_exporter.new_metrics_exporter( + instrumentation_key=iKey) + + self.assertEqual(exporter.options.instrumentation_key, iKey) + self.assertEqual(len(hb.call_args_list), 1) + self.assertEqual(len(hb.call_args[0]), 2) + self.assertEqual(hb.call_args[0][0], None) + self.assertEqual(hb.call_args[0][1], iKey) diff --git a/opencensus/metrics/transport.py b/opencensus/metrics/transport.py index 14eae5efc..07b359847 100644 --- a/opencensus/metrics/transport.py +++ b/opencensus/metrics/transport.py @@ -62,6 +62,8 @@ def __init__( interval = DEFAULT_INTERVAL self.func = function + self.args = args + self.kwargs = kwargs def func(*aa, **kw): try: @@ -82,6 +84,13 @@ def run(self): execution_context.set_is_exporter(True) super(PeriodicMetricTask, self).run() + def close(self): + try: + return self.func(*self.args, **self.kwargs) + except Exception as ex: + logger.exception("Error handling metric flush: {}".format(ex)) + self.cancel() + def get_exporter_thread(metric_producers, exporter, interval=None): """Get a running task that periodically exports metrics. diff --git a/tests/unit/metrics/test_transport.py b/tests/unit/metrics/test_transport.py index 630f90669..cd2ad0a52 100644 --- a/tests/unit/metrics/test_transport.py +++ b/tests/unit/metrics/test_transport.py @@ -58,6 +58,7 @@ def test_periodic_task(self): self.assertEqual(mock_func.call_count, 2) time.sleep(INTERVAL) self.assertEqual(mock_func.call_count, 3) + task.cancel() def test_periodic_task_cancel(self): mock_func = mock.Mock() @@ -69,6 +70,14 @@ def test_periodic_task_cancel(self): time.sleep(INTERVAL) self.assertEqual(mock_func.call_count, 1) + def test_periodic_task_close(self): + mock_func = mock.Mock() + task = transport.PeriodicMetricTask(100, mock_func) + task.start() + mock_func.assert_not_called() + task.close() + self.assertEqual(mock_func.call_count, 1) + @mock.patch('opencensus.metrics.transport.DEFAULT_INTERVAL', INTERVAL) @mock.patch('opencensus.metrics.transport.logger') From 6d20b642ef69c5b5ce76b6d44528014bbf9fa472 Mon Sep 17 00:00:00 2001 From: Bri Date: Mon, 10 Aug 2020 15:49:55 -0500 Subject: [PATCH 61/79] Add exception source data to logging output for easier debugging (#942) --- opencensus/metrics/transport.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/opencensus/metrics/transport.py b/opencensus/metrics/transport.py index 07b359847..88a59b8ac 100644 --- a/opencensus/metrics/transport.py +++ b/opencensus/metrics/transport.py @@ -71,8 +71,8 @@ def func(*aa, **kw): except TransportError as ex: logger.exception(ex) self.cancel() - except Exception: - logger.exception("Error handling metric export") + except Exception as ex: + logger.exception("Error handling metric export: {}".format(ex)) super(PeriodicMetricTask, self).__init__( interval, func, args, kwargs, '{} Worker'.format(name) From e1d3c3b7d86ea8601f59836d0a4ec4c55117d33f Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Wed, 12 Aug 2020 13:33:24 -0700 Subject: [PATCH 62/79] Only ping azuremetadataservice if vm retry (#946) --- contrib/opencensus-ext-azure/CHANGELOG.md | 2 + .../heartbeat_metrics/heartbeat.py | 64 ++++++++++--------- .../tests/test_azure_heartbeat_metrics.py | 50 +++++++-------- 3 files changed, 62 insertions(+), 54 deletions(-) diff --git a/contrib/opencensus-ext-azure/CHANGELOG.md b/contrib/opencensus-ext-azure/CHANGELOG.md index 7b922c0bb..45719c679 100644 --- a/contrib/opencensus-ext-azure/CHANGELOG.md +++ b/contrib/opencensus-ext-azure/CHANGELOG.md @@ -8,6 +8,8 @@ ([#935](https://github.com/census-instrumentation/opencensus-python/pull/935)) - Add links in properties for trace exporter envelopes ([#936](https://github.com/census-instrumentation/opencensus-python/pull/936)) +- Fix attach rate metrics for VM to only ping data service on retry + ([#946](https://github.com/census-instrumentation/opencensus-python/pull/946)) ## 1.0.4 Released 2020-06-29 diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/heartbeat.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/heartbeat.py index 8c142210e..11527a360 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/heartbeat.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/heartbeat.py @@ -36,9 +36,9 @@ class HeartbeatMetric: def __init__(self): self.vm_data = {} - self.is_vm = False + self.vm_retry = False self.properties = OrderedDict() - self.update_properties() + self._init_properties() self.heartbeat = LongGauge( HeartbeatMetric.NAME, 'Heartbeat metric with custom dimensions', @@ -50,22 +50,23 @@ def __init__(self): ) def get_metrics(self): - if self.is_vm: - # Only need to update if in vm (properties could change) - self.properties.clear() - self.update_properties() - self.heartbeat = LongGauge( - HeartbeatMetric.NAME, - 'Heartbeat metric with custom dimensions', - 'count', - list(self.properties.keys()), - ) - self.heartbeat.get_or_create_time_series( - list(self.properties.values()) - ) + if self.vm_retry: + # Only need to possibly update if vm retry + if self._get_azure_compute_metadata() and not self.vm_retry: + self._populate_vm_data() + # Recreate the metric to initialize key/values + self.heartbeat = LongGauge( + HeartbeatMetric.NAME, + 'Heartbeat metric with custom dimensions', + 'count', + list(self.properties.keys()), + ) + self.heartbeat.get_or_create_time_series( + list(self.properties.values()) + ) return [self.heartbeat.get_metric(datetime.datetime.utcnow())] - def update_properties(self): + def _init_properties(self): self.properties[LabelKey("sdk", '')] = LabelValue( 'py{}:oc{}:ext{}'.format( platform.python_version(), @@ -86,17 +87,11 @@ def update_properties(self): # Function apps self.properties[LabelKey("azfunction_appId", '')] = \ LabelValue(os.environ.get("WEBSITE_HOSTNAME")) - elif self.get_azure_compute_metadata(): + elif self._get_azure_compute_metadata() and not self.vm_retry: # VM - if self.vm_data: - self.properties[LabelKey("azInst_vmId", '')] = \ - LabelValue(self.vm_data.get("vmId", '')) - self.properties[LabelKey("azInst_subscriptionId", '')] = \ - LabelValue(self.vm_data.get("subscriptionId", '')) - self.properties[LabelKey("azInst_osType", '')] = \ - LabelValue(self.vm_data.get("osType", '')) + self._populate_vm_data() - def get_azure_compute_metadata(self): + def _get_azure_compute_metadata(self): try: request_url = "{0}?{1}&{2}".format( _AIMS_URI, _AIMS_API_VERSION, _AIMS_FORMAT) @@ -104,17 +99,28 @@ def get_azure_compute_metadata(self): request_url, headers={"MetaData": "True"}, timeout=5.0) except (requests.exceptions.ConnectionError, requests.Timeout): # Not in VM - self.is_vm = False + self.vm_retry = False return False except requests.exceptions.RequestException: - pass # retry + self.vm_retry = True # retry + return False - self.is_vm = True try: text = response.text self.vm_data = json.loads(text) except Exception: # pylint: disable=broad-except # Error in reading response body, retry - pass + self.vm_retry = True + return False + self.vm_retry = False return True + + def _populate_vm_data(self): + if self.vm_data: + self.properties[LabelKey("azInst_vmId", '')] = \ + LabelValue(self.vm_data.get("vmId", '')) + self.properties[LabelKey("azInst_subscriptionId", '')] = \ + LabelValue(self.vm_data.get("subscriptionId", '')) + self.properties[LabelKey("azInst_osType", '')] = \ + LabelValue(self.vm_data.get("osType", '')) diff --git a/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py b/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py index 934e48a6c..729ebc2a4 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py @@ -186,7 +186,7 @@ def test_heartbeat_metric_init_vm(self): ) ) metric = heartbeat_metrics.HeartbeatMetric() - self.assertTrue(metric.is_vm) + self.assertFalse(metric.vm_retry) self.assertEqual(metric.NAME, 'Heartbeat') keys = list(metric.properties.keys()) values = list(metric.properties.values()) @@ -213,7 +213,7 @@ def test_heartbeat_metric_not_vm(self): throw(requests.exceptions.ConnectionError) ): metric = heartbeat_metrics.HeartbeatMetric() - self.assertFalse(metric.is_vm) + self.assertFalse(metric.vm_retry) self.assertEqual(metric.NAME, 'Heartbeat') keys = list(metric.properties.keys()) self.assertEqual(len(keys), 2) @@ -224,34 +224,34 @@ def test_heartbeat_metric_not_vm_timeout(self): throw(requests.Timeout) ): metric = heartbeat_metrics.HeartbeatMetric() - self.assertFalse(metric.is_vm) + self.assertFalse(metric.vm_retry) self.assertEqual(metric.NAME, 'Heartbeat') keys = list(metric.properties.keys()) self.assertEqual(len(keys), 2) - def test_heartbeat_metric_vm_error_response(self): - with mock.patch('requests.get') as get: - get.return_value = MockResponse( - 200, - json.dumps( - { - 'vmId': 5, - 'subscriptionId': 3, - 'osType': 'Linux' - } - ) - ) + def test_heartbeat_metric_vm_retry(self): + with mock.patch( + 'requests.get', + throw(requests.exceptions.RequestException) + ): metric = heartbeat_metrics.HeartbeatMetric() - self.assertTrue(metric.is_vm) + self.assertTrue(metric.vm_retry) keys = list(metric.properties.keys()) - self.assertEqual(len(keys), 5) - with mock.patch( - 'requests.get', - throw(Exception) - ): - metric.vm_data.clear() - self.assertTrue(metric.is_vm) - self.assertEqual(len(metric.vm_data), 0) - self.assertTrue(metric.is_vm) + self.assertEqual(len(keys), 2) + self.assertEqual(len(metric.vm_data), 0) + with mock.patch('requests.get') as get: + get.return_value = MockResponse( + 200, + json.dumps( + { + 'vmId': 5, + 'subscriptionId': 3, + 'osType': 'Linux' + } + ) + ) + metric.get_metrics() + self.assertFalse(metric.vm_retry) + self.assertEqual(len(metric.vm_data), 3) keys = list(metric.properties.keys()) self.assertEqual(len(keys), 5) From e577df99236f6b601d586757c64ecc735fdb2a77 Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Mon, 14 Sep 2020 15:45:20 -0700 Subject: [PATCH 63/79] Configurable queue capacity for azure exporters (#949) --- contrib/opencensus-ext-azure/CHANGELOG.md | 2 ++ .../opencensus/ext/azure/common/__init__.py | 2 ++ .../opencensus/ext/azure/common/exporter.py | 2 +- .../opencensus/ext/azure/common/storage.py | 9 ++++-- .../opencensus/ext/azure/common/transport.py | 1 - .../ext/azure/log_exporter/__init__.py | 2 +- .../tests/test_azure_log_exporter.py | 32 +++++++++++++++++++ .../tests/test_azure_trace_exporter.py | 16 ++++++++++ opencensus/common/schedule/__init__.py | 5 ++- 9 files changed, 65 insertions(+), 6 deletions(-) diff --git a/contrib/opencensus-ext-azure/CHANGELOG.md b/contrib/opencensus-ext-azure/CHANGELOG.md index 45719c679..7f719df48 100644 --- a/contrib/opencensus-ext-azure/CHANGELOG.md +++ b/contrib/opencensus-ext-azure/CHANGELOG.md @@ -10,6 +10,8 @@ ([#936](https://github.com/census-instrumentation/opencensus-python/pull/936)) - Fix attach rate metrics for VM to only ping data service on retry ([#946](https://github.com/census-instrumentation/opencensus-python/pull/946)) +- Added queue capacity configuration for exporters + ([#949](https://github.com/census-instrumentation/opencensus-python/pull/949)) ## 1.0.4 Released 2020-06-29 diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py index 8d76d91ea..d6c099619 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/__init__.py @@ -56,6 +56,7 @@ def process_options(options): TEMPDIR_PREFIX + TEMPDIR_SUFFIX ) + # proxies if options.proxies is None: options.proxies = '{}' @@ -109,6 +110,7 @@ def __init__(self, *args, **kwargs): max_batch_size=100, minimum_retry_interval=60, # minimum retry interval in seconds proxies=None, # string maps url schemes to the url of the proxies + queue_capacity=8192, storage_maintenance_period=60, storage_max_size=50*1024*1024, # 50MiB storage_path=None, diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/exporter.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/exporter.py index fddf8e1a2..a4c0b9df2 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/exporter.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/exporter.py @@ -28,7 +28,7 @@ def __init__(self, **options): self.max_batch_size = options.max_batch_size # TODO: queue should be moved to tracer # too much refactor work, leave to the next PR - self._queue = Queue(capacity=8192) # TODO: make this configurable + self._queue = Queue(capacity=options.queue_capacity) # TODO: worker should not be created in the base exporter self._worker = Worker(self._queue, self) self._worker.start() diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/storage.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/storage.py index b36269792..905d86dd0 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/storage.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/storage.py @@ -133,7 +133,9 @@ def gets(self): if path.endswith('.tmp'): if name < timeout_deadline: try: - os.remove(path) # TODO: log data loss + os.remove(path) + logger.warning( + 'File write exceeded timeout. Dropping telemetry') except Exception: pass # keep silent if path.endswith('.lock'): @@ -148,7 +150,10 @@ def gets(self): if path.endswith('.blob'): if name < retention_deadline: try: - os.remove(path) # TODO: log data loss + os.remove(path) + logger.warning( + 'File write exceeded retention.' + + 'Dropping telemetry') except Exception: pass # keep silent else: diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/transport.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/transport.py index 3643da02b..4e7401b77 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/transport.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/transport.py @@ -79,7 +79,6 @@ def _transmit(self, envelopes): logger.info('Transmission succeeded: %s.', text) return 0 if response.status_code == 206: # Partial Content - # TODO: store the unsent data if data: try: resend_envelopes = [] diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py index da4afe178..f74d0c24e 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py @@ -57,7 +57,7 @@ def __init__(self, **options): ) self._telemetry_processors = [] self.addFilter(SamplingFilter(self.options.logging_sampling_rate)) - self._queue = Queue(capacity=8192) # TODO: make this configurable + self._queue = Queue(capacity=self.options.queue_capacity) self._worker = Worker(self._queue, self) self._worker.start() heartbeat_metrics.enable_heartbeat_metrics( diff --git a/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py b/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py index 5db155e54..bb3faa3d3 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py @@ -98,6 +98,22 @@ def test_init_handler_with_proxies(self): '{"https":"https://test-proxy.com"}', ) + def test_init_handler_with_queue_capacity(self): + handler = log_exporter.AzureLogHandler( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + queue_capacity=500, + ) + + self.assertEqual( + handler.options.queue_capacity, + 500 + ) + + self.assertEqual( + handler._worker._src._queue.maxsize, + 500 + ) + @mock.patch('requests.post', return_value=mock.Mock()) def test_exception(self, requests_mock): logger = logging.getLogger(self.id()) @@ -289,6 +305,22 @@ def test_init_handler_with_proxies(self): '{"https":"https://test-proxy.com"}', ) + def test_init_handler_with_queue_capacity(self): + handler = log_exporter.AzureEventHandler( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + queue_capacity=500, + ) + + self.assertEqual( + handler.options.queue_capacity, + 500 + ) + # pylint: disable=protected-access + self.assertEqual( + handler._worker._src._queue.maxsize, + 500 + ) + @mock.patch('requests.post', return_value=mock.Mock()) def test_exception(self, requests_mock): logger = logging.getLogger(self.id()) diff --git a/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py b/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py index f1b874d78..93161e53d 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_trace_exporter.py @@ -58,6 +58,22 @@ def test_init_exporter_with_proxies(self): '{"https":"https://test-proxy.com"}', ) + def test_init_exporter_with_queue_capacity(self): + exporter = trace_exporter.AzureExporter( + instrumentation_key='12345678-1234-5678-abcd-12345678abcd', + queue_capacity=500, + ) + + self.assertEqual( + exporter.options.queue_capacity, + 500 + ) + # pylint: disable=protected-access + self.assertEqual( + exporter._worker.src._queue.maxsize, + 500 + ) + @mock.patch('requests.post', return_value=mock.Mock()) def test_emit_empty(self, request_mock): exporter = trace_exporter.AzureExporter( diff --git a/opencensus/common/schedule/__init__.py b/opencensus/common/schedule/__init__.py index faa1db1d5..f5de7108a 100644 --- a/opencensus/common/schedule/__init__.py +++ b/opencensus/common/schedule/__init__.py @@ -14,9 +14,12 @@ from six.moves import queue +import logging import threading import time +logger = logging.getLogger(__name__) + class PeriodicTask(threading.Thread): """Thread that periodically calls a given function. @@ -128,7 +131,7 @@ def put(self, item, block=True, timeout=None): try: self._queue.put(item, block, timeout) except queue.Full: - pass # TODO: log data loss + logger.warning('Queue is full. Dropping telemetry.') def puts(self, items, block=True, timeout=None): if block and timeout is not None: From 6721a56597a4a3907a7c0c06d1a3de2c9da87c11 Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Wed, 23 Sep 2020 12:49:04 -0700 Subject: [PATCH 64/79] Move heartbeat init logic into worker (#952) --- .../examples/traces/client.py | 2 +- .../heartbeat_metrics/heartbeat.py | 30 +++++++++------- .../ext/azure/trace_exporter/__init__.py | 4 ++- .../tests/test_azure_heartbeat_metrics.py | 34 +++++++++++++++++-- opencensus/metrics/transport.py | 5 ++- 5 files changed, 58 insertions(+), 17 deletions(-) diff --git a/contrib/opencensus-ext-azure/examples/traces/client.py b/contrib/opencensus-ext-azure/examples/traces/client.py index 004c79ab2..255492df6 100644 --- a/contrib/opencensus-ext-azure/examples/traces/client.py +++ b/contrib/opencensus-ext-azure/examples/traces/client.py @@ -26,6 +26,6 @@ tracer = Tracer(exporter=AzureExporter(), sampler=ProbabilitySampler(1.0)) with tracer.span(name='parent'): with tracer.span(name='child'): - response = requests.get(url='http://localhost:8080/') + response = requests.get(url='http://example.com/') print(response.status_code) print(response.text) diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/heartbeat.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/heartbeat.py index 11527a360..a41fe8c89 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/heartbeat.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/heartbeat_metrics/heartbeat.py @@ -37,20 +37,23 @@ class HeartbeatMetric: def __init__(self): self.vm_data = {} self.vm_retry = False + self.init = False self.properties = OrderedDict() - self._init_properties() - self.heartbeat = LongGauge( - HeartbeatMetric.NAME, - 'Heartbeat metric with custom dimensions', - 'count', - list(self.properties.keys()), - ) - self.heartbeat.get_or_create_time_series( - list(self.properties.values()) - ) def get_metrics(self): - if self.vm_retry: + if not self.init: + self._init_properties() + self.heartbeat = LongGauge( + HeartbeatMetric.NAME, + 'Heartbeat metric with custom dimensions', + 'count', + list(self.properties.keys()), + ) + self.heartbeat.get_or_create_time_series( + list(self.properties.values()) + ) + self.init = True + elif self.vm_retry: # Only need to possibly update if vm retry if self._get_azure_compute_metadata() and not self.vm_retry: self._populate_vm_data() @@ -64,7 +67,10 @@ def get_metrics(self): self.heartbeat.get_or_create_time_series( list(self.properties.values()) ) - return [self.heartbeat.get_metric(datetime.datetime.utcnow())] + if self.heartbeat: + return [self.heartbeat.get_metric(datetime.datetime.utcnow())] + else: + return [] def _init_properties(self): self.properties[LabelKey("sdk", '')] = LabelValue( diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py index c67e64b87..17e4f3d3b 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import atexit import json import logging @@ -58,6 +59,7 @@ def __init__(self, **options): ) self._telemetry_processors = [] super(AzureExporter, self).__init__(**options) + atexit.register(self._stop, self.options.grace_period) heartbeat_metrics.enable_heartbeat_metrics( self.options.connection_string, self.options.instrumentation_key) @@ -181,4 +183,4 @@ def emit(self, batch, event=None): def _stop(self, timeout=None): self.storage.close() - return self._worker.stop(timeout) + self._worker.stop(timeout) diff --git a/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py b/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py index 729ebc2a4..a8ad2d564 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py @@ -84,7 +84,16 @@ def test_enable_heartbeat_metrics_exists(self, transport_mock): def test_heartbeat_metric_init(self): metric = heartbeat_metrics.HeartbeatMetric() + self.assertEqual(len(metric.vm_data), 0) + self.assertFalse(metric.vm_retry) + self.assertFalse(metric.init) + self.assertEqual(len(metric.properties), 0) + def test_heartbeat_metric_get_metric_init(self): + metric = heartbeat_metrics.HeartbeatMetric() + self.assertFalse(metric.init) + metrics = metric.get_metrics() + self.assertTrue(metric.init) self.assertEqual(metric.NAME, 'Heartbeat') keys = list(metric.properties.keys()) values = list(metric.properties.values()) @@ -115,6 +124,7 @@ def test_heartbeat_metric_init(self): gauge._len_label_keys, len(metric.properties.keys()) ) + self.assertEqual(len(metrics), 1) @mock.patch.dict( os.environ, @@ -126,7 +136,9 @@ def test_heartbeat_metric_init(self): ) def test_heartbeat_metric_init_webapp(self): metric = heartbeat_metrics.HeartbeatMetric() - + self.assertFalse(metric.init) + metric.get_metrics() + self.assertTrue(metric.init) self.assertEqual(metric.NAME, 'Heartbeat') keys = list(metric.properties.keys()) values = list(metric.properties.values()) @@ -156,7 +168,9 @@ def test_heartbeat_metric_init_webapp(self): ) def test_heartbeat_metric_init_functionapp(self): metric = heartbeat_metrics.HeartbeatMetric() - + self.assertFalse(metric.init) + metric.get_metrics() + self.assertTrue(metric.init) self.assertEqual(metric.NAME, 'Heartbeat') keys = list(metric.properties.keys()) values = list(metric.properties.values()) @@ -186,6 +200,10 @@ def test_heartbeat_metric_init_vm(self): ) ) metric = heartbeat_metrics.HeartbeatMetric() + self.assertFalse(metric.init) + self.assertFalse(metric.vm_retry) + metric.get_metrics() + self.assertTrue(metric.init) self.assertFalse(metric.vm_retry) self.assertEqual(metric.NAME, 'Heartbeat') keys = list(metric.properties.keys()) @@ -213,6 +231,10 @@ def test_heartbeat_metric_not_vm(self): throw(requests.exceptions.ConnectionError) ): metric = heartbeat_metrics.HeartbeatMetric() + self.assertFalse(metric.init) + self.assertFalse(metric.vm_retry) + metric.get_metrics() + self.assertTrue(metric.init) self.assertFalse(metric.vm_retry) self.assertEqual(metric.NAME, 'Heartbeat') keys = list(metric.properties.keys()) @@ -224,6 +246,10 @@ def test_heartbeat_metric_not_vm_timeout(self): throw(requests.Timeout) ): metric = heartbeat_metrics.HeartbeatMetric() + self.assertFalse(metric.init) + self.assertFalse(metric.vm_retry) + metric.get_metrics() + self.assertTrue(metric.init) self.assertFalse(metric.vm_retry) self.assertEqual(metric.NAME, 'Heartbeat') keys = list(metric.properties.keys()) @@ -235,6 +261,10 @@ def test_heartbeat_metric_vm_retry(self): throw(requests.exceptions.RequestException) ): metric = heartbeat_metrics.HeartbeatMetric() + self.assertFalse(metric.init) + self.assertFalse(metric.vm_retry) + metric.get_metrics() + self.assertTrue(metric.init) self.assertTrue(metric.vm_retry) keys = list(metric.properties.keys()) self.assertEqual(len(keys), 2) diff --git a/opencensus/metrics/transport.py b/opencensus/metrics/transport.py index 88a59b8ac..d27b06f32 100644 --- a/opencensus/metrics/transport.py +++ b/opencensus/metrics/transport.py @@ -86,7 +86,10 @@ def run(self): def close(self): try: - return self.func(*self.args, **self.kwargs) + # Suppress request tracking on flush + execution_context.set_is_exporter(True) + self.func(*self.args, **self.kwargs) + execution_context.set_is_exporter(False) except Exception as ex: logger.exception("Error handling metric flush: {}".format(ex)) self.cancel() From 07588770a68c259768c9a372c1a5e3b027711a7b Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Tue, 13 Oct 2020 13:09:34 -0400 Subject: [PATCH 65/79] Release for v0.7.11 (#958) --- .isort.cfg | 2 +- .travis.yml | 4 - CHANGELOG.md | 3 + context/opencensus-context/CHANGELOG.md | 2 - context/opencensus-context/version.py | 2 +- contrib/opencensus-correlation/version.py | 2 +- contrib/opencensus-ext-azure/CHANGELOG.md | 3 + .../opencensus/ext/azure/common/exporter.py | 2 +- .../opencensus/ext/azure/common/version.py | 2 +- .../ext/azure/log_exporter/__init__.py | 18 +++++ contrib/opencensus-ext-azure/setup.py | 2 +- .../tests/test_azure_standard_metrics.py | 4 +- contrib/opencensus-ext-datadog/CHANGELOG.md | 2 + contrib/opencensus-ext-datadog/setup.py | 2 +- contrib/opencensus-ext-datadog/version.py | 2 +- contrib/opencensus-ext-dbapi/setup.py | 2 +- contrib/opencensus-ext-dbapi/version.py | 2 +- contrib/opencensus-ext-django/setup.py | 2 +- contrib/opencensus-ext-django/version.py | 2 +- contrib/opencensus-ext-flask/CHANGELOG.md | 2 + contrib/opencensus-ext-flask/setup.py | 2 +- contrib/opencensus-ext-flask/version.py | 2 +- contrib/opencensus-ext-gevent/setup.py | 2 +- contrib/opencensus-ext-gevent/version.py | 2 +- .../setup.py | 6 +- .../version.py | 2 +- contrib/opencensus-ext-grpc/setup.py | 2 +- contrib/opencensus-ext-grpc/version.py | 2 +- contrib/opencensus-ext-httplib/CHANGELOG.md | 2 +- contrib/opencensus-ext-httplib/setup.py | 2 +- contrib/opencensus-ext-httplib/version.py | 2 +- contrib/opencensus-ext-jaeger/setup.py | 2 +- contrib/opencensus-ext-jaeger/version.py | 2 +- contrib/opencensus-ext-logging/setup.py | 2 +- contrib/opencensus-ext-logging/version.py | 2 +- contrib/opencensus-ext-mysql/setup.py | 4 +- contrib/opencensus-ext-mysql/version.py | 2 +- contrib/opencensus-ext-ocagent/setup.py | 2 +- contrib/opencensus-ext-ocagent/version.py | 2 +- contrib/opencensus-ext-postgresql/setup.py | 2 +- contrib/opencensus-ext-postgresql/version.py | 2 +- contrib/opencensus-ext-prometheus/setup.py | 2 +- contrib/opencensus-ext-prometheus/version.py | 2 +- contrib/opencensus-ext-pymongo/setup.py | 2 +- contrib/opencensus-ext-pymongo/version.py | 2 +- contrib/opencensus-ext-pymysql/setup.py | 4 +- contrib/opencensus-ext-pymysql/version.py | 2 +- contrib/opencensus-ext-pyramid/setup.py | 2 +- contrib/opencensus-ext-pyramid/version.py | 2 +- contrib/opencensus-ext-requests/CHANGELOG.md | 2 +- contrib/opencensus-ext-requests/setup.py | 2 +- contrib/opencensus-ext-requests/version.py | 2 +- contrib/opencensus-ext-sqlalchemy/setup.py | 2 +- contrib/opencensus-ext-sqlalchemy/version.py | 2 +- .../opencensus-ext-stackdriver/CHANGELOG.md | 23 +++++- contrib/opencensus-ext-stackdriver/setup.py | 2 +- contrib/opencensus-ext-stackdriver/version.py | 2 +- contrib/opencensus-ext-threading/setup.py | 2 +- contrib/opencensus-ext-threading/version.py | 2 +- contrib/opencensus-ext-zipkin/setup.py | 2 +- contrib/opencensus-ext-zipkin/version.py | 2 +- opencensus/common/transports/async_.py | 2 +- opencensus/common/version/__init__.py | 2 +- opencensus/metrics/transport.py | 2 +- setup.py | 2 +- tox.ini | 79 ++++++++----------- 66 files changed, 144 insertions(+), 116 deletions(-) diff --git a/.isort.cfg b/.isort.cfg index 4f378fabf..834ec46e7 100644 --- a/.isort.cfg +++ b/.isort.cfg @@ -14,4 +14,4 @@ line_length=79 multi_line_output=3 known_future_library = six,six.moves,__future__ known_third_party=google,mock,pymysql,sqlalchemy,psycopg2,mysql,requests,django,pytest,grpc,flask,bitarray,prometheus_client,psutil,pymongo,wrapt,thrift,retrying,pyramid,werkzeug,gevent -known_first_party=opencensus \ No newline at end of file +known_first_party=opencensus diff --git a/.travis.yml b/.travis.yml index dbd282a60..9a06769a1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,7 +15,3 @@ install: script: - tox - touch docs/.nojekyll - -branches: - only: - - master diff --git a/CHANGELOG.md b/CHANGELOG.md index 14ea915ab..742ebd370 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,9 @@ ## Unreleased +## 0.7.11 +Released 2020-10-13 + - PeriodicMetricTask flush on exit ([#943](https://github.com/census-instrumentation/opencensus-python/pull/943)) diff --git a/context/opencensus-context/CHANGELOG.md b/context/opencensus-context/CHANGELOG.md index a49592ac1..55518cfea 100644 --- a/context/opencensus-context/CHANGELOG.md +++ b/context/opencensus-context/CHANGELOG.md @@ -2,8 +2,6 @@ ## Unreleased -## opencensus-ext-context 0.1.2 - ## 0.1.2 Released 2020-06-29 diff --git a/context/opencensus-context/version.py b/context/opencensus-context/version.py index ff18aeb50..3f601a176 100644 --- a/context/opencensus-context/version.py +++ b/context/opencensus-context/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.2.dev0' +__version__ = '0.1.2' diff --git a/contrib/opencensus-correlation/version.py b/contrib/opencensus-correlation/version.py index deb2f374d..8088a1980 100644 --- a/contrib/opencensus-correlation/version.py +++ b/contrib/opencensus-correlation/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.4.dev0' +__version__ = '0.3.0' diff --git a/contrib/opencensus-ext-azure/CHANGELOG.md b/contrib/opencensus-ext-azure/CHANGELOG.md index 7f719df48..31103f11f 100644 --- a/contrib/opencensus-ext-azure/CHANGELOG.md +++ b/contrib/opencensus-ext-azure/CHANGELOG.md @@ -2,6 +2,9 @@ ## Unreleased +## 1.0.5 +Released 2020-10-13 + - Attach rate metrics via Heartbeat for Web and Function apps ([#930](https://github.com/census-instrumentation/opencensus-python/pull/930)) - Attach rate metrics for VM diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/exporter.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/exporter.py index a4c0b9df2..afb0ee388 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/exporter.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/exporter.py @@ -67,7 +67,7 @@ def __init__(self, src, dst): def run(self): # pragma: NO COVER # Indicate that this thread is an exporter thread. - # Used to suppress tracking of requests in this thread + # Used to suppress tracking of requests in this thread. execution_context.set_is_exporter(True) src = self.src dst = self.dst diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/version.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/version.py index 71eb269bd..e3ba8af95 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/version.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '1.0.dev0' +__version__ = '1.0.5' diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py index f74d0c24e..2a7bafcd1 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py @@ -81,6 +81,24 @@ def _export(self, batch, event=None): # pragma: NO COVER if event: event.set() + def _export(self, batch, event=None): # pragma: NO COVER + try: + if batch: + envelopes = [self.log_record_to_envelope(x) for x in batch] + envelopes = self.apply_telemetry_processors(envelopes) + result = self._transmit(envelopes) + if result > 0: + self.storage.put(envelopes, result) + if event: + if isinstance(event, QueueExitEvent): + self._transmit_from_storage() # send files before exit + return + if len(batch) < self.options.max_batch_size: + self._transmit_from_storage() + finally: + if event: + event.set() + def close(self): self.storage.close() self._worker.stop() diff --git a/contrib/opencensus-ext-azure/setup.py b/contrib/opencensus-ext-azure/setup.py index d4f228478..11e770fca 100644 --- a/contrib/opencensus-ext-azure/setup.py +++ b/contrib/opencensus-ext-azure/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.8.dev0, < 1.0.0', + 'opencensus >= 0.7.11, < 1.0.0', 'psutil >= 5.6.3', 'requests >= 2.19.0', ], diff --git a/contrib/opencensus-ext-azure/tests/test_azure_standard_metrics.py b/contrib/opencensus-ext-azure/tests/test_azure_standard_metrics.py index 317c0e2c5..785c987d2 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_standard_metrics.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_standard_metrics.py @@ -78,7 +78,7 @@ def test_get_process_private_bytes_metric(self): '\\Process(??APP_WIN32_PROC??)\\Private Bytes') def test_get_process_private_bytes(self): - with mock.patch('opencensus.ext.azure.metrics_exporter' + + with mock.patch('opencensus.ext.azure.metrics_exporter' '.standard_metrics.process.PROCESS') as process_mock: memory = collections.namedtuple('memory', 'rss') pmem = memory(rss=100) @@ -90,7 +90,7 @@ def test_get_process_private_bytes(self): @mock.patch('opencensus.ext.azure.metrics_exporter' '.standard_metrics.process.logger') def test_get_process_private_bytes_exception(self, logger_mock): - with mock.patch('opencensus.ext.azure.metrics_exporter' + + with mock.patch('opencensus.ext.azure.metrics_exporter' '.standard_metrics.process.PROCESS') as process_mock: process_mock.memory_info.side_effect = Exception() standard_metrics.ProcessMemoryMetric.get_value() diff --git a/contrib/opencensus-ext-datadog/CHANGELOG.md b/contrib/opencensus-ext-datadog/CHANGELOG.md index b93e3f3d1..c6bcf394c 100644 --- a/contrib/opencensus-ext-datadog/CHANGELOG.md +++ b/contrib/opencensus-ext-datadog/CHANGELOG.md @@ -1,5 +1,7 @@ # Changelog +## Unreleased + ## 0.1.0 Released 2019-11-26 diff --git a/contrib/opencensus-ext-datadog/setup.py b/contrib/opencensus-ext-datadog/setup.py index 804bbdcba..f87c97931 100644 --- a/contrib/opencensus-ext-datadog/setup.py +++ b/contrib/opencensus-ext-datadog/setup.py @@ -39,7 +39,7 @@ include_package_data=True, install_requires=[ 'bitarray >= 1.0.1, < 2.0.0', - 'opencensus >= 0.8.dev0, < 1.0.0', + 'opencensus >= 0.7.6, < 1.0.0', 'requests >= 2.19.0', ], extras_require={}, diff --git a/contrib/opencensus-ext-datadog/version.py b/contrib/opencensus-ext-datadog/version.py index f3a64a892..d323c46d6 100644 --- a/contrib/opencensus-ext-datadog/version.py +++ b/contrib/opencensus-ext-datadog/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.1.dev0' +__version__ = '0.1.0' diff --git a/contrib/opencensus-ext-dbapi/setup.py b/contrib/opencensus-ext-dbapi/setup.py index f55daf5d6..27f430844 100644 --- a/contrib/opencensus-ext-dbapi/setup.py +++ b/contrib/opencensus-ext-dbapi/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.8.dev0, < 1.0.0', + 'opencensus >= 0.7.0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-dbapi/version.py b/contrib/opencensus-ext-dbapi/version.py index ff18aeb50..3f601a176 100644 --- a/contrib/opencensus-ext-dbapi/version.py +++ b/contrib/opencensus-ext-dbapi/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.2.dev0' +__version__ = '0.1.2' diff --git a/contrib/opencensus-ext-django/setup.py b/contrib/opencensus-ext-django/setup.py index b2acf735e..f6d3b3ab0 100644 --- a/contrib/opencensus-ext-django/setup.py +++ b/contrib/opencensus-ext-django/setup.py @@ -44,7 +44,7 @@ long_description=open('README.rst').read(), install_requires=[ 'Django >= 1.11', - 'opencensus >= 0.8.dev0, < 1.0.0', + 'opencensus >= 0.7.0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-django/version.py b/contrib/opencensus-ext-django/version.py index dffc606db..c652125f7 100644 --- a/contrib/opencensus-ext-django/version.py +++ b/contrib/opencensus-ext-django/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.8.dev0' +__version__ = '0.7.2' diff --git a/contrib/opencensus-ext-flask/CHANGELOG.md b/contrib/opencensus-ext-flask/CHANGELOG.md index 2b8246fd3..befd70cef 100644 --- a/contrib/opencensus-ext-flask/CHANGELOG.md +++ b/contrib/opencensus-ext-flask/CHANGELOG.md @@ -13,6 +13,8 @@ Released 2019-08-26 - Updated `http.status_code` attribute to be an int. ([#755](https://github.com/census-instrumentation/opencensus-python/pull/755)) +- Fixes value for `http.route` in Flask middleware + ([#759](https://github.com/census-instrumentation/opencensus-python/pull/759)) ## 0.7.1 Released 2019-08-05 diff --git a/contrib/opencensus-ext-flask/setup.py b/contrib/opencensus-ext-flask/setup.py index e3fc3a640..52baa7d38 100644 --- a/contrib/opencensus-ext-flask/setup.py +++ b/contrib/opencensus-ext-flask/setup.py @@ -40,7 +40,7 @@ long_description=open('README.rst').read(), install_requires=[ 'flask >= 0.12.3, < 2.0.0', - 'opencensus >= 0.8.dev0, < 1.0.0', + 'opencensus >= 0.7.1, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-flask/version.py b/contrib/opencensus-ext-flask/version.py index dffc606db..b7a1f8944 100644 --- a/contrib/opencensus-ext-flask/version.py +++ b/contrib/opencensus-ext-flask/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.8.dev0' +__version__ = '0.7.3' diff --git a/contrib/opencensus-ext-gevent/setup.py b/contrib/opencensus-ext-gevent/setup.py index 022df4a14..d15a3953b 100644 --- a/contrib/opencensus-ext-gevent/setup.py +++ b/contrib/opencensus-ext-gevent/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.8.dev0, < 1.0.0', + 'opencensus >= 0.7.0, < 1.0.0', 'gevent >= 1.3' ], extras_require={}, diff --git a/contrib/opencensus-ext-gevent/version.py b/contrib/opencensus-ext-gevent/version.py index ff18aeb50..d323c46d6 100644 --- a/contrib/opencensus-ext-gevent/version.py +++ b/contrib/opencensus-ext-gevent/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.2.dev0' +__version__ = '0.1.0' diff --git a/contrib/opencensus-ext-google-cloud-clientlibs/setup.py b/contrib/opencensus-ext-google-cloud-clientlibs/setup.py index a66b4c683..b9a8ffb87 100644 --- a/contrib/opencensus-ext-google-cloud-clientlibs/setup.py +++ b/contrib/opencensus-ext-google-cloud-clientlibs/setup.py @@ -39,9 +39,9 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.8.dev0, < 1.0.0', - 'opencensus-ext-grpc >= 0.4.dev0, < 1.0.0', - 'opencensus-ext-requests >= 0.2.dev0, < 1.0.0', + 'opencensus >= 0.7.0, < 1.0.0', + 'opencensus-ext-grpc >= 0.3.0, < 1.0.0', + 'opencensus-ext-requests >= 0.1.2, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-google-cloud-clientlibs/version.py b/contrib/opencensus-ext-google-cloud-clientlibs/version.py index ff18aeb50..3f601a176 100644 --- a/contrib/opencensus-ext-google-cloud-clientlibs/version.py +++ b/contrib/opencensus-ext-google-cloud-clientlibs/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.2.dev0' +__version__ = '0.1.2' diff --git a/contrib/opencensus-ext-grpc/setup.py b/contrib/opencensus-ext-grpc/setup.py index 31b96b390..c3dea1566 100644 --- a/contrib/opencensus-ext-grpc/setup.py +++ b/contrib/opencensus-ext-grpc/setup.py @@ -40,7 +40,7 @@ long_description=open('README.rst').read(), install_requires=[ 'grpcio >= 1.0.0, < 2.0.0', - 'opencensus >= 0.8.dev0, < 1.0.0', + 'opencensus >= 0.7.1, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-grpc/version.py b/contrib/opencensus-ext-grpc/version.py index dffc606db..752777753 100644 --- a/contrib/opencensus-ext-grpc/version.py +++ b/contrib/opencensus-ext-grpc/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.8.dev0' +__version__ = '0.7.1' diff --git a/contrib/opencensus-ext-httplib/CHANGELOG.md b/contrib/opencensus-ext-httplib/CHANGELOG.md index 605fd3a33..6c59e7ffe 100644 --- a/contrib/opencensus-ext-httplib/CHANGELOG.md +++ b/contrib/opencensus-ext-httplib/CHANGELOG.md @@ -16,7 +16,7 @@ Released 2019-08-26 ## 0.7.1 Released 2019-08-06 - - Support exporter changes in `opencensus>=0.7.0` +- Support exporter changes in `opencensus>=0.7.0` ## 0.1.3 Released 2019-05-31 diff --git a/contrib/opencensus-ext-httplib/setup.py b/contrib/opencensus-ext-httplib/setup.py index 340b43406..080c35998 100644 --- a/contrib/opencensus-ext-httplib/setup.py +++ b/contrib/opencensus-ext-httplib/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.8.dev0, < 1.0.0', + 'opencensus >= 0.7.0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-httplib/version.py b/contrib/opencensus-ext-httplib/version.py index dffc606db..b7a1f8944 100644 --- a/contrib/opencensus-ext-httplib/version.py +++ b/contrib/opencensus-ext-httplib/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.8.dev0' +__version__ = '0.7.3' diff --git a/contrib/opencensus-ext-jaeger/setup.py b/contrib/opencensus-ext-jaeger/setup.py index 688958f73..693a92f12 100644 --- a/contrib/opencensus-ext-jaeger/setup.py +++ b/contrib/opencensus-ext-jaeger/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.8.dev0, < 1.0.0', + 'opencensus >= 0.7.1, < 1.0.0', 'thrift >= 0.10.0', ], extras_require={}, diff --git a/contrib/opencensus-ext-jaeger/version.py b/contrib/opencensus-ext-jaeger/version.py index dffc606db..752777753 100644 --- a/contrib/opencensus-ext-jaeger/version.py +++ b/contrib/opencensus-ext-jaeger/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.8.dev0' +__version__ = '0.7.1' diff --git a/contrib/opencensus-ext-logging/setup.py b/contrib/opencensus-ext-logging/setup.py index 76cf383d3..64bc5a5ed 100644 --- a/contrib/opencensus-ext-logging/setup.py +++ b/contrib/opencensus-ext-logging/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.8.dev0, < 1.0.0', + 'opencensus >= 0.7.0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-logging/version.py b/contrib/opencensus-ext-logging/version.py index ff18aeb50..d323c46d6 100644 --- a/contrib/opencensus-ext-logging/version.py +++ b/contrib/opencensus-ext-logging/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.2.dev0' +__version__ = '0.1.0' diff --git a/contrib/opencensus-ext-mysql/setup.py b/contrib/opencensus-ext-mysql/setup.py index 3456658f3..25540421c 100644 --- a/contrib/opencensus-ext-mysql/setup.py +++ b/contrib/opencensus-ext-mysql/setup.py @@ -40,8 +40,8 @@ long_description=open('README.rst').read(), install_requires=[ 'mysql-connector >= 2.1.6, < 3.0.0', - 'opencensus >= 0.8.dev0, < 1.0.0', - 'opencensus-ext-dbapi >= 0.2.dev0, < 1.0.0', + 'opencensus >= 0.7.0, < 1.0.0', + 'opencensus-ext-dbapi >= 0.1.2, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-mysql/version.py b/contrib/opencensus-ext-mysql/version.py index ff18aeb50..3f601a176 100644 --- a/contrib/opencensus-ext-mysql/version.py +++ b/contrib/opencensus-ext-mysql/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.2.dev0' +__version__ = '0.1.2' diff --git a/contrib/opencensus-ext-ocagent/setup.py b/contrib/opencensus-ext-ocagent/setup.py index 4ac94a067..3e34f2369 100644 --- a/contrib/opencensus-ext-ocagent/setup.py +++ b/contrib/opencensus-ext-ocagent/setup.py @@ -40,7 +40,7 @@ long_description=open('README.rst').read(), install_requires=[ 'grpcio >= 1.0.0, < 2.0.0', - 'opencensus >= 0.8.dev0, < 1.0.0', + 'opencensus >= 0.7.1, < 1.0.0', 'opencensus-proto >= 0.1.0, < 1.0.0', ], extras_require={}, diff --git a/contrib/opencensus-ext-ocagent/version.py b/contrib/opencensus-ext-ocagent/version.py index dffc606db..752777753 100644 --- a/contrib/opencensus-ext-ocagent/version.py +++ b/contrib/opencensus-ext-ocagent/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.8.dev0' +__version__ = '0.7.1' diff --git a/contrib/opencensus-ext-postgresql/setup.py b/contrib/opencensus-ext-postgresql/setup.py index 41bd59806..111ee03d6 100644 --- a/contrib/opencensus-ext-postgresql/setup.py +++ b/contrib/opencensus-ext-postgresql/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.8.dev0, < 1.0.0', + 'opencensus >= 0.7.0, < 1.0.0', 'psycopg2-binary >= 2.7.3.1', ], extras_require={}, diff --git a/contrib/opencensus-ext-postgresql/version.py b/contrib/opencensus-ext-postgresql/version.py index ff18aeb50..3f601a176 100644 --- a/contrib/opencensus-ext-postgresql/version.py +++ b/contrib/opencensus-ext-postgresql/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.2.dev0' +__version__ = '0.1.2' diff --git a/contrib/opencensus-ext-prometheus/setup.py b/contrib/opencensus-ext-prometheus/setup.py index 2fca7c76f..8a9e82b3d 100644 --- a/contrib/opencensus-ext-prometheus/setup.py +++ b/contrib/opencensus-ext-prometheus/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.8.dev0, < 1.0.0', + 'opencensus >= 0.7.0, < 1.0.0', 'prometheus_client >= 0.5.0, < 1.0.0', ], extras_require={}, diff --git a/contrib/opencensus-ext-prometheus/version.py b/contrib/opencensus-ext-prometheus/version.py index bf7c8163b..20fd9b8ed 100644 --- a/contrib/opencensus-ext-prometheus/version.py +++ b/contrib/opencensus-ext-prometheus/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.3.dev0' +__version__ = '0.2.1' diff --git a/contrib/opencensus-ext-pymongo/setup.py b/contrib/opencensus-ext-pymongo/setup.py index 63adca99e..5c8936628 100644 --- a/contrib/opencensus-ext-pymongo/setup.py +++ b/contrib/opencensus-ext-pymongo/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.8.dev0, < 1.0.0', + 'opencensus >= 0.7.1, < 1.0.0', 'pymongo >= 3.1.0', ], extras_require={}, diff --git a/contrib/opencensus-ext-pymongo/version.py b/contrib/opencensus-ext-pymongo/version.py index dffc606db..752777753 100644 --- a/contrib/opencensus-ext-pymongo/version.py +++ b/contrib/opencensus-ext-pymongo/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.8.dev0' +__version__ = '0.7.1' diff --git a/contrib/opencensus-ext-pymysql/setup.py b/contrib/opencensus-ext-pymysql/setup.py index f8502034d..58fcd84ca 100644 --- a/contrib/opencensus-ext-pymysql/setup.py +++ b/contrib/opencensus-ext-pymysql/setup.py @@ -40,8 +40,8 @@ long_description=open('README.rst').read(), install_requires=[ 'PyMySQL >= 0.7.11, < 1.0.0', - 'opencensus >= 0.8.dev0, < 1.0.0', - 'opencensus-ext-dbapi >= 0.2.dev0, < 1.0.0', + 'opencensus >= 0.7.0, < 1.0.0', + 'opencensus-ext-dbapi >= 0.1.2, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-pymysql/version.py b/contrib/opencensus-ext-pymysql/version.py index ff18aeb50..3f601a176 100644 --- a/contrib/opencensus-ext-pymysql/version.py +++ b/contrib/opencensus-ext-pymysql/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.2.dev0' +__version__ = '0.1.2' diff --git a/contrib/opencensus-ext-pyramid/setup.py b/contrib/opencensus-ext-pyramid/setup.py index 7fcd90ef3..43aba4777 100644 --- a/contrib/opencensus-ext-pyramid/setup.py +++ b/contrib/opencensus-ext-pyramid/setup.py @@ -40,7 +40,7 @@ long_description=open('README.rst').read(), install_requires=[ 'pyramid >= 1.9.1, < 2.0.0', - 'opencensus >= 0.8.dev0, < 1.0.0', + 'opencensus >= 0.7.0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-pyramid/version.py b/contrib/opencensus-ext-pyramid/version.py index dffc606db..752777753 100644 --- a/contrib/opencensus-ext-pyramid/version.py +++ b/contrib/opencensus-ext-pyramid/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.8.dev0' +__version__ = '0.7.1' diff --git a/contrib/opencensus-ext-requests/CHANGELOG.md b/contrib/opencensus-ext-requests/CHANGELOG.md index 4c2c4cf24..d0b090412 100644 --- a/contrib/opencensus-ext-requests/CHANGELOG.md +++ b/contrib/opencensus-ext-requests/CHANGELOG.md @@ -20,7 +20,7 @@ Released 2019-08-26 ## 0.7.1 Released 2019-08-06 - - Support exporter changes in `opencensus>=0.7.0` +- Support exporter changes in `opencensus>=0.7.0` ## 0.1.2 Released 2019-04-24 diff --git a/contrib/opencensus-ext-requests/setup.py b/contrib/opencensus-ext-requests/setup.py index 37aca2d26..b04a277fd 100644 --- a/contrib/opencensus-ext-requests/setup.py +++ b/contrib/opencensus-ext-requests/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.8.dev0, < 1.0.0', + 'opencensus >= 0.7.3, < 1.0.0', 'wrapt >= 1.0.0, < 2.0.0', ], extras_require={}, diff --git a/contrib/opencensus-ext-requests/version.py b/contrib/opencensus-ext-requests/version.py index dffc606db..b7a1f8944 100644 --- a/contrib/opencensus-ext-requests/version.py +++ b/contrib/opencensus-ext-requests/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.8.dev0' +__version__ = '0.7.3' diff --git a/contrib/opencensus-ext-sqlalchemy/setup.py b/contrib/opencensus-ext-sqlalchemy/setup.py index 30090c4f4..31da80847 100644 --- a/contrib/opencensus-ext-sqlalchemy/setup.py +++ b/contrib/opencensus-ext-sqlalchemy/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.8.dev0, < 1.0.0', + 'opencensus >= 0.7.0, < 1.0.0', 'SQLAlchemy >= 1.1.14, < 2.0.0', ], extras_require={}, diff --git a/contrib/opencensus-ext-sqlalchemy/version.py b/contrib/opencensus-ext-sqlalchemy/version.py index ff18aeb50..3f601a176 100644 --- a/contrib/opencensus-ext-sqlalchemy/version.py +++ b/contrib/opencensus-ext-sqlalchemy/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.2.dev0' +__version__ = '0.1.2' diff --git a/contrib/opencensus-ext-stackdriver/CHANGELOG.md b/contrib/opencensus-ext-stackdriver/CHANGELOG.md index 7dd8184aa..92869a205 100644 --- a/contrib/opencensus-ext-stackdriver/CHANGELOG.md +++ b/contrib/opencensus-ext-stackdriver/CHANGELOG.md @@ -1,6 +1,8 @@ # Changelog -## Unreleased +## 0.7.4 +Released 2020-10-13 + - Change default transporter in stackdriver exporter ([#929](https://github.com/census-instrumentation/opencensus-python/pull/929)) @@ -23,6 +25,25 @@ Released 2019-08-05 - Support exporter changes in `opencensus>=0.7.0` +## 0.7.3 +Released 2020-06-29 + + - Add mean property for distribution values + ([#919](https://github.com/census-instrumentation/opencensus-python/pull/919)) + +## 0.7.2 +Released 2019-08-26 + + - Delete SD integ test metric descriptors + ([#770](https://github.com/census-instrumentation/opencensus-python/pull/770)) + - Updated `http.status_code` attribute to be an int. + ([#755](https://github.com/census-instrumentation/opencensus-python/pull/755)) + +## 0.7.1 +Released 2019-08-05 + + - Support exporter changes in `opencensus>=0.7.0` + ## 0.4.0 Released 2019-05-31 diff --git a/contrib/opencensus-ext-stackdriver/setup.py b/contrib/opencensus-ext-stackdriver/setup.py index f87f47ab8..4714779f4 100644 --- a/contrib/opencensus-ext-stackdriver/setup.py +++ b/contrib/opencensus-ext-stackdriver/setup.py @@ -42,7 +42,7 @@ 'google-cloud-monitoring >= 0.30.0, < 1.0.0', 'google-cloud-trace >= 0.20.0, < 1.0.0', 'rsa <= 4.0; python_version<="3.4"', - 'opencensus >= 0.8.dev0, < 1.0.0', + 'opencensus >= 0.7.11, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-stackdriver/version.py b/contrib/opencensus-ext-stackdriver/version.py index dffc606db..d5d5f1a28 100644 --- a/contrib/opencensus-ext-stackdriver/version.py +++ b/contrib/opencensus-ext-stackdriver/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.8.dev0' +__version__ = '0.7.4' diff --git a/contrib/opencensus-ext-threading/setup.py b/contrib/opencensus-ext-threading/setup.py index 963e01e89..2f7889e52 100644 --- a/contrib/opencensus-ext-threading/setup.py +++ b/contrib/opencensus-ext-threading/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.8.dev0, < 1.0.0', + 'opencensus >= 0.7.0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-threading/version.py b/contrib/opencensus-ext-threading/version.py index ff18aeb50..3f601a176 100644 --- a/contrib/opencensus-ext-threading/version.py +++ b/contrib/opencensus-ext-threading/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.2.dev0' +__version__ = '0.1.2' diff --git a/contrib/opencensus-ext-zipkin/setup.py b/contrib/opencensus-ext-zipkin/setup.py index b9d24c277..9904384cc 100644 --- a/contrib/opencensus-ext-zipkin/setup.py +++ b/contrib/opencensus-ext-zipkin/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.8.dev0, < 1.0.0', + 'opencensus >= 0.7.0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-zipkin/version.py b/contrib/opencensus-ext-zipkin/version.py index bf7c8163b..ee24582a0 100644 --- a/contrib/opencensus-ext-zipkin/version.py +++ b/contrib/opencensus-ext-zipkin/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.3.dev0' +__version__ = '0.2.2' diff --git a/opencensus/common/transports/async_.py b/opencensus/common/transports/async_.py index 56e726119..cc63ba139 100644 --- a/opencensus/common/transports/async_.py +++ b/opencensus/common/transports/async_.py @@ -93,7 +93,7 @@ def _thread_main(self): batches to the specified tracing backend using the exporter. """ # Indicate that this thread is an exporter thread. - # Used to suppress tracking of requests in this thread + # Used to suppress tracking of requests in this thread. execution_context.set_is_exporter(True) quit_ = False diff --git a/opencensus/common/version/__init__.py b/opencensus/common/version/__init__.py index dffc606db..366bd7b59 100644 --- a/opencensus/common/version/__init__.py +++ b/opencensus/common/version/__init__.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.8.dev0' +__version__ = '0.7.11' diff --git a/opencensus/metrics/transport.py b/opencensus/metrics/transport.py index d27b06f32..e85be1eee 100644 --- a/opencensus/metrics/transport.py +++ b/opencensus/metrics/transport.py @@ -80,7 +80,7 @@ def func(*aa, **kw): def run(self): # Indicate that this thread is an exporter thread. - # Used to suppress tracking of requests in this thread + # Used to suppress tracking of requests in this thread. execution_context.set_is_exporter(True) super(PeriodicMetricTask, self).run() diff --git a/setup.py b/setup.py index c060e343d..30604f4b3 100644 --- a/setup.py +++ b/setup.py @@ -40,7 +40,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus-context == 0.2.dev0', + 'opencensus-context == 0.1.2', 'google-api-core >= 1.0.0, < 2.0.0', ], extras_require={}, diff --git a/tox.ini b/tox.ini index a460651bf..64c5cb604 100644 --- a/tox.ini +++ b/tox.ini @@ -1,58 +1,43 @@ [tox] envlist = - py{27,34,35,36,37}-unit - py37-lint - py37-setup - py37-docs + py{27,37}-unit [testenv] install_command = python -m pip install {opts} {packages} deps = - unit,lint: mock==3.0.5 - unit,lint: pytest==4.6.4 - unit,lint: pytest-cov - unit,lint: retrying - unit,lint: unittest2 - unit,lint,py37-setup,docs: -e context/opencensus-context - unit,lint,docs: -e contrib/opencensus-correlation - unit,lint,docs: -e . - unit,lint: -e contrib/opencensus-ext-azure - unit,lint: -e contrib/opencensus-ext-datadog - unit,lint: -e contrib/opencensus-ext-dbapi - unit,lint: -e contrib/opencensus-ext-django - unit,lint: -e contrib/opencensus-ext-flask - unit,lint: -e contrib/opencensus-ext-gevent - unit,lint: -e contrib/opencensus-ext-grpc - unit,lint: -e contrib/opencensus-ext-httplib - unit,lint: -e contrib/opencensus-ext-jaeger - unit,lint: -e contrib/opencensus-ext-logging - unit,lint: -e contrib/opencensus-ext-mysql - unit,lint: -e contrib/opencensus-ext-ocagent - unit,lint: -e contrib/opencensus-ext-postgresql - unit,lint: -e contrib/opencensus-ext-prometheus - unit,lint: -e contrib/opencensus-ext-pymongo - unit,lint: -e contrib/opencensus-ext-pymysql - unit,lint: -e contrib/opencensus-ext-pyramid - unit,lint: -e contrib/opencensus-ext-requests - unit,lint: -e contrib/opencensus-ext-sqlalchemy - unit,lint: -e contrib/opencensus-ext-stackdriver - unit,lint: -e contrib/opencensus-ext-threading - unit,lint: -e contrib/opencensus-ext-zipkin - unit,lint: -e contrib/opencensus-ext-google-cloud-clientlibs - lint: flake8 - lint: isort ~= 4.3.21 - setup: docutils - setup: pygments - docs: setuptools >= 36.4.0 - docs: sphinx >= 1.6.3 + unit: mock==3.0.5 + unit: pytest==4.6.4 + unit: pytest-cov + unit: retrying + unit: unittest2 + unit: -e context/opencensus-context + unit: -e contrib/opencensus-correlation + unit: -e . + unit: -e contrib/opencensus-ext-azure + unit: -e contrib/opencensus-ext-datadog + unit: -e contrib/opencensus-ext-dbapi + unit: -e contrib/opencensus-ext-django + unit: -e contrib/opencensus-ext-flask + unit: -e contrib/opencensus-ext-gevent + unit: -e contrib/opencensus-ext-grpc + unit: -e contrib/opencensus-ext-httplib + unit: -e contrib/opencensus-ext-jaeger + unit: -e contrib/opencensus-ext-logging + unit: -e contrib/opencensus-ext-mysql + unit: -e contrib/opencensus-ext-ocagent + unit: -e contrib/opencensus-ext-postgresql + unit: -e contrib/opencensus-ext-prometheus + unit: -e contrib/opencensus-ext-pymongo + unit: -e contrib/opencensus-ext-pymysql + unit: -e contrib/opencensus-ext-pyramid + unit: -e contrib/opencensus-ext-requests + unit: -e contrib/opencensus-ext-sqlalchemy + unit: -e contrib/opencensus-ext-stackdriver + unit: -e contrib/opencensus-ext-threading + unit: -e contrib/opencensus-ext-zipkin + unit: -e contrib/opencensus-ext-google-cloud-clientlibs commands = unit: py.test --quiet --cov={envdir}/opencensus --cov=context --cov=contrib --cov-report term-missing --cov-config=.coveragerc --cov-fail-under=97 tests/unit/ context/ contrib/ ; TODO system tests - lint: isort --check-only --diff --recursive . - lint: flake8 context/ contrib/ opencensus/ tests/ examples/ - ; lint: - bash ./scripts/pylint.sh - py37-setup: python setup.py check --restructuredtext --strict - py37-docs: bash ./scripts/update_docs.sh - ; TODO deployment From 511289fa4cc4bbd6e013c8a33c5dc570a895ed2c Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Tue, 13 Oct 2020 13:52:15 -0400 Subject: [PATCH 66/79] Revert "Release for v0.7.11 (#958)" (#959) --- .isort.cfg | 2 +- .travis.yml | 4 + CHANGELOG.md | 3 - context/opencensus-context/CHANGELOG.md | 2 + context/opencensus-context/version.py | 2 +- contrib/opencensus-correlation/version.py | 2 +- contrib/opencensus-ext-azure/CHANGELOG.md | 3 - .../opencensus/ext/azure/common/exporter.py | 2 +- .../opencensus/ext/azure/common/version.py | 2 +- .../ext/azure/log_exporter/__init__.py | 18 ----- contrib/opencensus-ext-azure/setup.py | 2 +- .../tests/test_azure_standard_metrics.py | 4 +- contrib/opencensus-ext-datadog/CHANGELOG.md | 2 - contrib/opencensus-ext-datadog/setup.py | 2 +- contrib/opencensus-ext-datadog/version.py | 2 +- contrib/opencensus-ext-dbapi/setup.py | 2 +- contrib/opencensus-ext-dbapi/version.py | 2 +- contrib/opencensus-ext-django/setup.py | 2 +- contrib/opencensus-ext-django/version.py | 2 +- contrib/opencensus-ext-flask/CHANGELOG.md | 2 - contrib/opencensus-ext-flask/setup.py | 2 +- contrib/opencensus-ext-flask/version.py | 2 +- contrib/opencensus-ext-gevent/setup.py | 2 +- contrib/opencensus-ext-gevent/version.py | 2 +- .../setup.py | 6 +- .../version.py | 2 +- contrib/opencensus-ext-grpc/setup.py | 2 +- contrib/opencensus-ext-grpc/version.py | 2 +- contrib/opencensus-ext-httplib/CHANGELOG.md | 2 +- contrib/opencensus-ext-httplib/setup.py | 2 +- contrib/opencensus-ext-httplib/version.py | 2 +- contrib/opencensus-ext-jaeger/setup.py | 2 +- contrib/opencensus-ext-jaeger/version.py | 2 +- contrib/opencensus-ext-logging/setup.py | 2 +- contrib/opencensus-ext-logging/version.py | 2 +- contrib/opencensus-ext-mysql/setup.py | 4 +- contrib/opencensus-ext-mysql/version.py | 2 +- contrib/opencensus-ext-ocagent/setup.py | 2 +- contrib/opencensus-ext-ocagent/version.py | 2 +- contrib/opencensus-ext-postgresql/setup.py | 2 +- contrib/opencensus-ext-postgresql/version.py | 2 +- contrib/opencensus-ext-prometheus/setup.py | 2 +- contrib/opencensus-ext-prometheus/version.py | 2 +- contrib/opencensus-ext-pymongo/setup.py | 2 +- contrib/opencensus-ext-pymongo/version.py | 2 +- contrib/opencensus-ext-pymysql/setup.py | 4 +- contrib/opencensus-ext-pymysql/version.py | 2 +- contrib/opencensus-ext-pyramid/setup.py | 2 +- contrib/opencensus-ext-pyramid/version.py | 2 +- contrib/opencensus-ext-requests/CHANGELOG.md | 2 +- contrib/opencensus-ext-requests/setup.py | 2 +- contrib/opencensus-ext-requests/version.py | 2 +- contrib/opencensus-ext-sqlalchemy/setup.py | 2 +- contrib/opencensus-ext-sqlalchemy/version.py | 2 +- .../opencensus-ext-stackdriver/CHANGELOG.md | 23 +----- contrib/opencensus-ext-stackdriver/setup.py | 2 +- contrib/opencensus-ext-stackdriver/version.py | 2 +- contrib/opencensus-ext-threading/setup.py | 2 +- contrib/opencensus-ext-threading/version.py | 2 +- contrib/opencensus-ext-zipkin/setup.py | 2 +- contrib/opencensus-ext-zipkin/version.py | 2 +- opencensus/common/transports/async_.py | 2 +- opencensus/common/version/__init__.py | 2 +- opencensus/metrics/transport.py | 2 +- setup.py | 2 +- tox.ini | 79 +++++++++++-------- 66 files changed, 116 insertions(+), 144 deletions(-) diff --git a/.isort.cfg b/.isort.cfg index 834ec46e7..4f378fabf 100644 --- a/.isort.cfg +++ b/.isort.cfg @@ -14,4 +14,4 @@ line_length=79 multi_line_output=3 known_future_library = six,six.moves,__future__ known_third_party=google,mock,pymysql,sqlalchemy,psycopg2,mysql,requests,django,pytest,grpc,flask,bitarray,prometheus_client,psutil,pymongo,wrapt,thrift,retrying,pyramid,werkzeug,gevent -known_first_party=opencensus +known_first_party=opencensus \ No newline at end of file diff --git a/.travis.yml b/.travis.yml index 9a06769a1..dbd282a60 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,3 +15,7 @@ install: script: - tox - touch docs/.nojekyll + +branches: + only: + - master diff --git a/CHANGELOG.md b/CHANGELOG.md index 742ebd370..14ea915ab 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,9 +2,6 @@ ## Unreleased -## 0.7.11 -Released 2020-10-13 - - PeriodicMetricTask flush on exit ([#943](https://github.com/census-instrumentation/opencensus-python/pull/943)) diff --git a/context/opencensus-context/CHANGELOG.md b/context/opencensus-context/CHANGELOG.md index 55518cfea..a49592ac1 100644 --- a/context/opencensus-context/CHANGELOG.md +++ b/context/opencensus-context/CHANGELOG.md @@ -2,6 +2,8 @@ ## Unreleased +## opencensus-ext-context 0.1.2 + ## 0.1.2 Released 2020-06-29 diff --git a/context/opencensus-context/version.py b/context/opencensus-context/version.py index 3f601a176..ff18aeb50 100644 --- a/context/opencensus-context/version.py +++ b/context/opencensus-context/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.1.2' +__version__ = '0.2.dev0' diff --git a/contrib/opencensus-correlation/version.py b/contrib/opencensus-correlation/version.py index 8088a1980..deb2f374d 100644 --- a/contrib/opencensus-correlation/version.py +++ b/contrib/opencensus-correlation/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.3.0' +__version__ = '0.4.dev0' diff --git a/contrib/opencensus-ext-azure/CHANGELOG.md b/contrib/opencensus-ext-azure/CHANGELOG.md index 31103f11f..7f719df48 100644 --- a/contrib/opencensus-ext-azure/CHANGELOG.md +++ b/contrib/opencensus-ext-azure/CHANGELOG.md @@ -2,9 +2,6 @@ ## Unreleased -## 1.0.5 -Released 2020-10-13 - - Attach rate metrics via Heartbeat for Web and Function apps ([#930](https://github.com/census-instrumentation/opencensus-python/pull/930)) - Attach rate metrics for VM diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/exporter.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/exporter.py index afb0ee388..a4c0b9df2 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/exporter.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/exporter.py @@ -67,7 +67,7 @@ def __init__(self, src, dst): def run(self): # pragma: NO COVER # Indicate that this thread is an exporter thread. - # Used to suppress tracking of requests in this thread. + # Used to suppress tracking of requests in this thread execution_context.set_is_exporter(True) src = self.src dst = self.dst diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/version.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/version.py index e3ba8af95..71eb269bd 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/version.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '1.0.5' +__version__ = '1.0.dev0' diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py index 2a7bafcd1..f74d0c24e 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py @@ -81,24 +81,6 @@ def _export(self, batch, event=None): # pragma: NO COVER if event: event.set() - def _export(self, batch, event=None): # pragma: NO COVER - try: - if batch: - envelopes = [self.log_record_to_envelope(x) for x in batch] - envelopes = self.apply_telemetry_processors(envelopes) - result = self._transmit(envelopes) - if result > 0: - self.storage.put(envelopes, result) - if event: - if isinstance(event, QueueExitEvent): - self._transmit_from_storage() # send files before exit - return - if len(batch) < self.options.max_batch_size: - self._transmit_from_storage() - finally: - if event: - event.set() - def close(self): self.storage.close() self._worker.stop() diff --git a/contrib/opencensus-ext-azure/setup.py b/contrib/opencensus-ext-azure/setup.py index 11e770fca..d4f228478 100644 --- a/contrib/opencensus-ext-azure/setup.py +++ b/contrib/opencensus-ext-azure/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.11, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', 'psutil >= 5.6.3', 'requests >= 2.19.0', ], diff --git a/contrib/opencensus-ext-azure/tests/test_azure_standard_metrics.py b/contrib/opencensus-ext-azure/tests/test_azure_standard_metrics.py index 785c987d2..317c0e2c5 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_standard_metrics.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_standard_metrics.py @@ -78,7 +78,7 @@ def test_get_process_private_bytes_metric(self): '\\Process(??APP_WIN32_PROC??)\\Private Bytes') def test_get_process_private_bytes(self): - with mock.patch('opencensus.ext.azure.metrics_exporter' + with mock.patch('opencensus.ext.azure.metrics_exporter' + '.standard_metrics.process.PROCESS') as process_mock: memory = collections.namedtuple('memory', 'rss') pmem = memory(rss=100) @@ -90,7 +90,7 @@ def test_get_process_private_bytes(self): @mock.patch('opencensus.ext.azure.metrics_exporter' '.standard_metrics.process.logger') def test_get_process_private_bytes_exception(self, logger_mock): - with mock.patch('opencensus.ext.azure.metrics_exporter' + with mock.patch('opencensus.ext.azure.metrics_exporter' + '.standard_metrics.process.PROCESS') as process_mock: process_mock.memory_info.side_effect = Exception() standard_metrics.ProcessMemoryMetric.get_value() diff --git a/contrib/opencensus-ext-datadog/CHANGELOG.md b/contrib/opencensus-ext-datadog/CHANGELOG.md index c6bcf394c..b93e3f3d1 100644 --- a/contrib/opencensus-ext-datadog/CHANGELOG.md +++ b/contrib/opencensus-ext-datadog/CHANGELOG.md @@ -1,7 +1,5 @@ # Changelog -## Unreleased - ## 0.1.0 Released 2019-11-26 diff --git a/contrib/opencensus-ext-datadog/setup.py b/contrib/opencensus-ext-datadog/setup.py index f87c97931..804bbdcba 100644 --- a/contrib/opencensus-ext-datadog/setup.py +++ b/contrib/opencensus-ext-datadog/setup.py @@ -39,7 +39,7 @@ include_package_data=True, install_requires=[ 'bitarray >= 1.0.1, < 2.0.0', - 'opencensus >= 0.7.6, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', 'requests >= 2.19.0', ], extras_require={}, diff --git a/contrib/opencensus-ext-datadog/version.py b/contrib/opencensus-ext-datadog/version.py index d323c46d6..f3a64a892 100644 --- a/contrib/opencensus-ext-datadog/version.py +++ b/contrib/opencensus-ext-datadog/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.1.0' +__version__ = '0.1.dev0' diff --git a/contrib/opencensus-ext-dbapi/setup.py b/contrib/opencensus-ext-dbapi/setup.py index 27f430844..f55daf5d6 100644 --- a/contrib/opencensus-ext-dbapi/setup.py +++ b/contrib/opencensus-ext-dbapi/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-dbapi/version.py b/contrib/opencensus-ext-dbapi/version.py index 3f601a176..ff18aeb50 100644 --- a/contrib/opencensus-ext-dbapi/version.py +++ b/contrib/opencensus-ext-dbapi/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.1.2' +__version__ = '0.2.dev0' diff --git a/contrib/opencensus-ext-django/setup.py b/contrib/opencensus-ext-django/setup.py index f6d3b3ab0..b2acf735e 100644 --- a/contrib/opencensus-ext-django/setup.py +++ b/contrib/opencensus-ext-django/setup.py @@ -44,7 +44,7 @@ long_description=open('README.rst').read(), install_requires=[ 'Django >= 1.11', - 'opencensus >= 0.7.0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-django/version.py b/contrib/opencensus-ext-django/version.py index c652125f7..dffc606db 100644 --- a/contrib/opencensus-ext-django/version.py +++ b/contrib/opencensus-ext-django/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.7.2' +__version__ = '0.8.dev0' diff --git a/contrib/opencensus-ext-flask/CHANGELOG.md b/contrib/opencensus-ext-flask/CHANGELOG.md index befd70cef..2b8246fd3 100644 --- a/contrib/opencensus-ext-flask/CHANGELOG.md +++ b/contrib/opencensus-ext-flask/CHANGELOG.md @@ -13,8 +13,6 @@ Released 2019-08-26 - Updated `http.status_code` attribute to be an int. ([#755](https://github.com/census-instrumentation/opencensus-python/pull/755)) -- Fixes value for `http.route` in Flask middleware - ([#759](https://github.com/census-instrumentation/opencensus-python/pull/759)) ## 0.7.1 Released 2019-08-05 diff --git a/contrib/opencensus-ext-flask/setup.py b/contrib/opencensus-ext-flask/setup.py index 52baa7d38..e3fc3a640 100644 --- a/contrib/opencensus-ext-flask/setup.py +++ b/contrib/opencensus-ext-flask/setup.py @@ -40,7 +40,7 @@ long_description=open('README.rst').read(), install_requires=[ 'flask >= 0.12.3, < 2.0.0', - 'opencensus >= 0.7.1, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-flask/version.py b/contrib/opencensus-ext-flask/version.py index b7a1f8944..dffc606db 100644 --- a/contrib/opencensus-ext-flask/version.py +++ b/contrib/opencensus-ext-flask/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.7.3' +__version__ = '0.8.dev0' diff --git a/contrib/opencensus-ext-gevent/setup.py b/contrib/opencensus-ext-gevent/setup.py index d15a3953b..022df4a14 100644 --- a/contrib/opencensus-ext-gevent/setup.py +++ b/contrib/opencensus-ext-gevent/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', 'gevent >= 1.3' ], extras_require={}, diff --git a/contrib/opencensus-ext-gevent/version.py b/contrib/opencensus-ext-gevent/version.py index d323c46d6..ff18aeb50 100644 --- a/contrib/opencensus-ext-gevent/version.py +++ b/contrib/opencensus-ext-gevent/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.1.0' +__version__ = '0.2.dev0' diff --git a/contrib/opencensus-ext-google-cloud-clientlibs/setup.py b/contrib/opencensus-ext-google-cloud-clientlibs/setup.py index b9a8ffb87..a66b4c683 100644 --- a/contrib/opencensus-ext-google-cloud-clientlibs/setup.py +++ b/contrib/opencensus-ext-google-cloud-clientlibs/setup.py @@ -39,9 +39,9 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.0, < 1.0.0', - 'opencensus-ext-grpc >= 0.3.0, < 1.0.0', - 'opencensus-ext-requests >= 0.1.2, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', + 'opencensus-ext-grpc >= 0.4.dev0, < 1.0.0', + 'opencensus-ext-requests >= 0.2.dev0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-google-cloud-clientlibs/version.py b/contrib/opencensus-ext-google-cloud-clientlibs/version.py index 3f601a176..ff18aeb50 100644 --- a/contrib/opencensus-ext-google-cloud-clientlibs/version.py +++ b/contrib/opencensus-ext-google-cloud-clientlibs/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.1.2' +__version__ = '0.2.dev0' diff --git a/contrib/opencensus-ext-grpc/setup.py b/contrib/opencensus-ext-grpc/setup.py index c3dea1566..31b96b390 100644 --- a/contrib/opencensus-ext-grpc/setup.py +++ b/contrib/opencensus-ext-grpc/setup.py @@ -40,7 +40,7 @@ long_description=open('README.rst').read(), install_requires=[ 'grpcio >= 1.0.0, < 2.0.0', - 'opencensus >= 0.7.1, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-grpc/version.py b/contrib/opencensus-ext-grpc/version.py index 752777753..dffc606db 100644 --- a/contrib/opencensus-ext-grpc/version.py +++ b/contrib/opencensus-ext-grpc/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.7.1' +__version__ = '0.8.dev0' diff --git a/contrib/opencensus-ext-httplib/CHANGELOG.md b/contrib/opencensus-ext-httplib/CHANGELOG.md index 6c59e7ffe..605fd3a33 100644 --- a/contrib/opencensus-ext-httplib/CHANGELOG.md +++ b/contrib/opencensus-ext-httplib/CHANGELOG.md @@ -16,7 +16,7 @@ Released 2019-08-26 ## 0.7.1 Released 2019-08-06 -- Support exporter changes in `opencensus>=0.7.0` + - Support exporter changes in `opencensus>=0.7.0` ## 0.1.3 Released 2019-05-31 diff --git a/contrib/opencensus-ext-httplib/setup.py b/contrib/opencensus-ext-httplib/setup.py index 080c35998..340b43406 100644 --- a/contrib/opencensus-ext-httplib/setup.py +++ b/contrib/opencensus-ext-httplib/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-httplib/version.py b/contrib/opencensus-ext-httplib/version.py index b7a1f8944..dffc606db 100644 --- a/contrib/opencensus-ext-httplib/version.py +++ b/contrib/opencensus-ext-httplib/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.7.3' +__version__ = '0.8.dev0' diff --git a/contrib/opencensus-ext-jaeger/setup.py b/contrib/opencensus-ext-jaeger/setup.py index 693a92f12..688958f73 100644 --- a/contrib/opencensus-ext-jaeger/setup.py +++ b/contrib/opencensus-ext-jaeger/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.1, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', 'thrift >= 0.10.0', ], extras_require={}, diff --git a/contrib/opencensus-ext-jaeger/version.py b/contrib/opencensus-ext-jaeger/version.py index 752777753..dffc606db 100644 --- a/contrib/opencensus-ext-jaeger/version.py +++ b/contrib/opencensus-ext-jaeger/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.7.1' +__version__ = '0.8.dev0' diff --git a/contrib/opencensus-ext-logging/setup.py b/contrib/opencensus-ext-logging/setup.py index 64bc5a5ed..76cf383d3 100644 --- a/contrib/opencensus-ext-logging/setup.py +++ b/contrib/opencensus-ext-logging/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-logging/version.py b/contrib/opencensus-ext-logging/version.py index d323c46d6..ff18aeb50 100644 --- a/contrib/opencensus-ext-logging/version.py +++ b/contrib/opencensus-ext-logging/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.1.0' +__version__ = '0.2.dev0' diff --git a/contrib/opencensus-ext-mysql/setup.py b/contrib/opencensus-ext-mysql/setup.py index 25540421c..3456658f3 100644 --- a/contrib/opencensus-ext-mysql/setup.py +++ b/contrib/opencensus-ext-mysql/setup.py @@ -40,8 +40,8 @@ long_description=open('README.rst').read(), install_requires=[ 'mysql-connector >= 2.1.6, < 3.0.0', - 'opencensus >= 0.7.0, < 1.0.0', - 'opencensus-ext-dbapi >= 0.1.2, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', + 'opencensus-ext-dbapi >= 0.2.dev0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-mysql/version.py b/contrib/opencensus-ext-mysql/version.py index 3f601a176..ff18aeb50 100644 --- a/contrib/opencensus-ext-mysql/version.py +++ b/contrib/opencensus-ext-mysql/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.1.2' +__version__ = '0.2.dev0' diff --git a/contrib/opencensus-ext-ocagent/setup.py b/contrib/opencensus-ext-ocagent/setup.py index 3e34f2369..4ac94a067 100644 --- a/contrib/opencensus-ext-ocagent/setup.py +++ b/contrib/opencensus-ext-ocagent/setup.py @@ -40,7 +40,7 @@ long_description=open('README.rst').read(), install_requires=[ 'grpcio >= 1.0.0, < 2.0.0', - 'opencensus >= 0.7.1, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', 'opencensus-proto >= 0.1.0, < 1.0.0', ], extras_require={}, diff --git a/contrib/opencensus-ext-ocagent/version.py b/contrib/opencensus-ext-ocagent/version.py index 752777753..dffc606db 100644 --- a/contrib/opencensus-ext-ocagent/version.py +++ b/contrib/opencensus-ext-ocagent/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.7.1' +__version__ = '0.8.dev0' diff --git a/contrib/opencensus-ext-postgresql/setup.py b/contrib/opencensus-ext-postgresql/setup.py index 111ee03d6..41bd59806 100644 --- a/contrib/opencensus-ext-postgresql/setup.py +++ b/contrib/opencensus-ext-postgresql/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', 'psycopg2-binary >= 2.7.3.1', ], extras_require={}, diff --git a/contrib/opencensus-ext-postgresql/version.py b/contrib/opencensus-ext-postgresql/version.py index 3f601a176..ff18aeb50 100644 --- a/contrib/opencensus-ext-postgresql/version.py +++ b/contrib/opencensus-ext-postgresql/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.1.2' +__version__ = '0.2.dev0' diff --git a/contrib/opencensus-ext-prometheus/setup.py b/contrib/opencensus-ext-prometheus/setup.py index 8a9e82b3d..2fca7c76f 100644 --- a/contrib/opencensus-ext-prometheus/setup.py +++ b/contrib/opencensus-ext-prometheus/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', 'prometheus_client >= 0.5.0, < 1.0.0', ], extras_require={}, diff --git a/contrib/opencensus-ext-prometheus/version.py b/contrib/opencensus-ext-prometheus/version.py index 20fd9b8ed..bf7c8163b 100644 --- a/contrib/opencensus-ext-prometheus/version.py +++ b/contrib/opencensus-ext-prometheus/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.2.1' +__version__ = '0.3.dev0' diff --git a/contrib/opencensus-ext-pymongo/setup.py b/contrib/opencensus-ext-pymongo/setup.py index 5c8936628..63adca99e 100644 --- a/contrib/opencensus-ext-pymongo/setup.py +++ b/contrib/opencensus-ext-pymongo/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.1, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', 'pymongo >= 3.1.0', ], extras_require={}, diff --git a/contrib/opencensus-ext-pymongo/version.py b/contrib/opencensus-ext-pymongo/version.py index 752777753..dffc606db 100644 --- a/contrib/opencensus-ext-pymongo/version.py +++ b/contrib/opencensus-ext-pymongo/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.7.1' +__version__ = '0.8.dev0' diff --git a/contrib/opencensus-ext-pymysql/setup.py b/contrib/opencensus-ext-pymysql/setup.py index 58fcd84ca..f8502034d 100644 --- a/contrib/opencensus-ext-pymysql/setup.py +++ b/contrib/opencensus-ext-pymysql/setup.py @@ -40,8 +40,8 @@ long_description=open('README.rst').read(), install_requires=[ 'PyMySQL >= 0.7.11, < 1.0.0', - 'opencensus >= 0.7.0, < 1.0.0', - 'opencensus-ext-dbapi >= 0.1.2, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', + 'opencensus-ext-dbapi >= 0.2.dev0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-pymysql/version.py b/contrib/opencensus-ext-pymysql/version.py index 3f601a176..ff18aeb50 100644 --- a/contrib/opencensus-ext-pymysql/version.py +++ b/contrib/opencensus-ext-pymysql/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.1.2' +__version__ = '0.2.dev0' diff --git a/contrib/opencensus-ext-pyramid/setup.py b/contrib/opencensus-ext-pyramid/setup.py index 43aba4777..7fcd90ef3 100644 --- a/contrib/opencensus-ext-pyramid/setup.py +++ b/contrib/opencensus-ext-pyramid/setup.py @@ -40,7 +40,7 @@ long_description=open('README.rst').read(), install_requires=[ 'pyramid >= 1.9.1, < 2.0.0', - 'opencensus >= 0.7.0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-pyramid/version.py b/contrib/opencensus-ext-pyramid/version.py index 752777753..dffc606db 100644 --- a/contrib/opencensus-ext-pyramid/version.py +++ b/contrib/opencensus-ext-pyramid/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.7.1' +__version__ = '0.8.dev0' diff --git a/contrib/opencensus-ext-requests/CHANGELOG.md b/contrib/opencensus-ext-requests/CHANGELOG.md index d0b090412..4c2c4cf24 100644 --- a/contrib/opencensus-ext-requests/CHANGELOG.md +++ b/contrib/opencensus-ext-requests/CHANGELOG.md @@ -20,7 +20,7 @@ Released 2019-08-26 ## 0.7.1 Released 2019-08-06 -- Support exporter changes in `opencensus>=0.7.0` + - Support exporter changes in `opencensus>=0.7.0` ## 0.1.2 Released 2019-04-24 diff --git a/contrib/opencensus-ext-requests/setup.py b/contrib/opencensus-ext-requests/setup.py index b04a277fd..37aca2d26 100644 --- a/contrib/opencensus-ext-requests/setup.py +++ b/contrib/opencensus-ext-requests/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.3, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', 'wrapt >= 1.0.0, < 2.0.0', ], extras_require={}, diff --git a/contrib/opencensus-ext-requests/version.py b/contrib/opencensus-ext-requests/version.py index b7a1f8944..dffc606db 100644 --- a/contrib/opencensus-ext-requests/version.py +++ b/contrib/opencensus-ext-requests/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.7.3' +__version__ = '0.8.dev0' diff --git a/contrib/opencensus-ext-sqlalchemy/setup.py b/contrib/opencensus-ext-sqlalchemy/setup.py index 31da80847..30090c4f4 100644 --- a/contrib/opencensus-ext-sqlalchemy/setup.py +++ b/contrib/opencensus-ext-sqlalchemy/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', 'SQLAlchemy >= 1.1.14, < 2.0.0', ], extras_require={}, diff --git a/contrib/opencensus-ext-sqlalchemy/version.py b/contrib/opencensus-ext-sqlalchemy/version.py index 3f601a176..ff18aeb50 100644 --- a/contrib/opencensus-ext-sqlalchemy/version.py +++ b/contrib/opencensus-ext-sqlalchemy/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.1.2' +__version__ = '0.2.dev0' diff --git a/contrib/opencensus-ext-stackdriver/CHANGELOG.md b/contrib/opencensus-ext-stackdriver/CHANGELOG.md index 92869a205..7dd8184aa 100644 --- a/contrib/opencensus-ext-stackdriver/CHANGELOG.md +++ b/contrib/opencensus-ext-stackdriver/CHANGELOG.md @@ -1,8 +1,6 @@ # Changelog -## 0.7.4 -Released 2020-10-13 - +## Unreleased - Change default transporter in stackdriver exporter ([#929](https://github.com/census-instrumentation/opencensus-python/pull/929)) @@ -25,25 +23,6 @@ Released 2019-08-05 - Support exporter changes in `opencensus>=0.7.0` -## 0.7.3 -Released 2020-06-29 - - - Add mean property for distribution values - ([#919](https://github.com/census-instrumentation/opencensus-python/pull/919)) - -## 0.7.2 -Released 2019-08-26 - - - Delete SD integ test metric descriptors - ([#770](https://github.com/census-instrumentation/opencensus-python/pull/770)) - - Updated `http.status_code` attribute to be an int. - ([#755](https://github.com/census-instrumentation/opencensus-python/pull/755)) - -## 0.7.1 -Released 2019-08-05 - - - Support exporter changes in `opencensus>=0.7.0` - ## 0.4.0 Released 2019-05-31 diff --git a/contrib/opencensus-ext-stackdriver/setup.py b/contrib/opencensus-ext-stackdriver/setup.py index 4714779f4..f87f47ab8 100644 --- a/contrib/opencensus-ext-stackdriver/setup.py +++ b/contrib/opencensus-ext-stackdriver/setup.py @@ -42,7 +42,7 @@ 'google-cloud-monitoring >= 0.30.0, < 1.0.0', 'google-cloud-trace >= 0.20.0, < 1.0.0', 'rsa <= 4.0; python_version<="3.4"', - 'opencensus >= 0.7.11, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-stackdriver/version.py b/contrib/opencensus-ext-stackdriver/version.py index d5d5f1a28..dffc606db 100644 --- a/contrib/opencensus-ext-stackdriver/version.py +++ b/contrib/opencensus-ext-stackdriver/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.7.4' +__version__ = '0.8.dev0' diff --git a/contrib/opencensus-ext-threading/setup.py b/contrib/opencensus-ext-threading/setup.py index 2f7889e52..963e01e89 100644 --- a/contrib/opencensus-ext-threading/setup.py +++ b/contrib/opencensus-ext-threading/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-threading/version.py b/contrib/opencensus-ext-threading/version.py index 3f601a176..ff18aeb50 100644 --- a/contrib/opencensus-ext-threading/version.py +++ b/contrib/opencensus-ext-threading/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.1.2' +__version__ = '0.2.dev0' diff --git a/contrib/opencensus-ext-zipkin/setup.py b/contrib/opencensus-ext-zipkin/setup.py index 9904384cc..b9d24c277 100644 --- a/contrib/opencensus-ext-zipkin/setup.py +++ b/contrib/opencensus-ext-zipkin/setup.py @@ -39,7 +39,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus >= 0.7.0, < 1.0.0', + 'opencensus >= 0.8.dev0, < 1.0.0', ], extras_require={}, license='Apache-2.0', diff --git a/contrib/opencensus-ext-zipkin/version.py b/contrib/opencensus-ext-zipkin/version.py index ee24582a0..bf7c8163b 100644 --- a/contrib/opencensus-ext-zipkin/version.py +++ b/contrib/opencensus-ext-zipkin/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.2.2' +__version__ = '0.3.dev0' diff --git a/opencensus/common/transports/async_.py b/opencensus/common/transports/async_.py index cc63ba139..56e726119 100644 --- a/opencensus/common/transports/async_.py +++ b/opencensus/common/transports/async_.py @@ -93,7 +93,7 @@ def _thread_main(self): batches to the specified tracing backend using the exporter. """ # Indicate that this thread is an exporter thread. - # Used to suppress tracking of requests in this thread. + # Used to suppress tracking of requests in this thread execution_context.set_is_exporter(True) quit_ = False diff --git a/opencensus/common/version/__init__.py b/opencensus/common/version/__init__.py index 366bd7b59..dffc606db 100644 --- a/opencensus/common/version/__init__.py +++ b/opencensus/common/version/__init__.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.7.11' +__version__ = '0.8.dev0' diff --git a/opencensus/metrics/transport.py b/opencensus/metrics/transport.py index e85be1eee..d27b06f32 100644 --- a/opencensus/metrics/transport.py +++ b/opencensus/metrics/transport.py @@ -80,7 +80,7 @@ def func(*aa, **kw): def run(self): # Indicate that this thread is an exporter thread. - # Used to suppress tracking of requests in this thread. + # Used to suppress tracking of requests in this thread execution_context.set_is_exporter(True) super(PeriodicMetricTask, self).run() diff --git a/setup.py b/setup.py index 30604f4b3..c060e343d 100644 --- a/setup.py +++ b/setup.py @@ -40,7 +40,7 @@ include_package_data=True, long_description=open('README.rst').read(), install_requires=[ - 'opencensus-context == 0.1.2', + 'opencensus-context == 0.2.dev0', 'google-api-core >= 1.0.0, < 2.0.0', ], extras_require={}, diff --git a/tox.ini b/tox.ini index 64c5cb604..a460651bf 100644 --- a/tox.ini +++ b/tox.ini @@ -1,43 +1,58 @@ [tox] envlist = - py{27,37}-unit + py{27,34,35,36,37}-unit + py37-lint + py37-setup + py37-docs [testenv] install_command = python -m pip install {opts} {packages} deps = - unit: mock==3.0.5 - unit: pytest==4.6.4 - unit: pytest-cov - unit: retrying - unit: unittest2 - unit: -e context/opencensus-context - unit: -e contrib/opencensus-correlation - unit: -e . - unit: -e contrib/opencensus-ext-azure - unit: -e contrib/opencensus-ext-datadog - unit: -e contrib/opencensus-ext-dbapi - unit: -e contrib/opencensus-ext-django - unit: -e contrib/opencensus-ext-flask - unit: -e contrib/opencensus-ext-gevent - unit: -e contrib/opencensus-ext-grpc - unit: -e contrib/opencensus-ext-httplib - unit: -e contrib/opencensus-ext-jaeger - unit: -e contrib/opencensus-ext-logging - unit: -e contrib/opencensus-ext-mysql - unit: -e contrib/opencensus-ext-ocagent - unit: -e contrib/opencensus-ext-postgresql - unit: -e contrib/opencensus-ext-prometheus - unit: -e contrib/opencensus-ext-pymongo - unit: -e contrib/opencensus-ext-pymysql - unit: -e contrib/opencensus-ext-pyramid - unit: -e contrib/opencensus-ext-requests - unit: -e contrib/opencensus-ext-sqlalchemy - unit: -e contrib/opencensus-ext-stackdriver - unit: -e contrib/opencensus-ext-threading - unit: -e contrib/opencensus-ext-zipkin - unit: -e contrib/opencensus-ext-google-cloud-clientlibs + unit,lint: mock==3.0.5 + unit,lint: pytest==4.6.4 + unit,lint: pytest-cov + unit,lint: retrying + unit,lint: unittest2 + unit,lint,py37-setup,docs: -e context/opencensus-context + unit,lint,docs: -e contrib/opencensus-correlation + unit,lint,docs: -e . + unit,lint: -e contrib/opencensus-ext-azure + unit,lint: -e contrib/opencensus-ext-datadog + unit,lint: -e contrib/opencensus-ext-dbapi + unit,lint: -e contrib/opencensus-ext-django + unit,lint: -e contrib/opencensus-ext-flask + unit,lint: -e contrib/opencensus-ext-gevent + unit,lint: -e contrib/opencensus-ext-grpc + unit,lint: -e contrib/opencensus-ext-httplib + unit,lint: -e contrib/opencensus-ext-jaeger + unit,lint: -e contrib/opencensus-ext-logging + unit,lint: -e contrib/opencensus-ext-mysql + unit,lint: -e contrib/opencensus-ext-ocagent + unit,lint: -e contrib/opencensus-ext-postgresql + unit,lint: -e contrib/opencensus-ext-prometheus + unit,lint: -e contrib/opencensus-ext-pymongo + unit,lint: -e contrib/opencensus-ext-pymysql + unit,lint: -e contrib/opencensus-ext-pyramid + unit,lint: -e contrib/opencensus-ext-requests + unit,lint: -e contrib/opencensus-ext-sqlalchemy + unit,lint: -e contrib/opencensus-ext-stackdriver + unit,lint: -e contrib/opencensus-ext-threading + unit,lint: -e contrib/opencensus-ext-zipkin + unit,lint: -e contrib/opencensus-ext-google-cloud-clientlibs + lint: flake8 + lint: isort ~= 4.3.21 + setup: docutils + setup: pygments + docs: setuptools >= 36.4.0 + docs: sphinx >= 1.6.3 commands = unit: py.test --quiet --cov={envdir}/opencensus --cov=context --cov=contrib --cov-report term-missing --cov-config=.coveragerc --cov-fail-under=97 tests/unit/ context/ contrib/ ; TODO system tests + lint: isort --check-only --diff --recursive . + lint: flake8 context/ contrib/ opencensus/ tests/ examples/ + ; lint: - bash ./scripts/pylint.sh + py37-setup: python setup.py check --restructuredtext --strict + py37-docs: bash ./scripts/update_docs.sh + ; TODO deployment From 812105e6021f47fbfa77fe7b32e1797a83bbe7ad Mon Sep 17 00:00:00 2001 From: Evan Smith Date: Mon, 26 Oct 2020 16:21:50 +0100 Subject: [PATCH 67/79] Fix comment, AsyncTransport is the default (#964) --- .../opencensus/ext/stackdriver/trace_exporter/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/trace_exporter/__init__.py b/contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/trace_exporter/__init__.py index cf7fe0d79..b09e69647 100644 --- a/contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/trace_exporter/__init__.py +++ b/contrib/opencensus-ext-stackdriver/opencensus/ext/stackdriver/trace_exporter/__init__.py @@ -175,8 +175,8 @@ class StackdriverExporter(base_exporter.Exporter): :param transport: Class for creating new transport objects. It should extend from the base_exporter :class:`.Transport` type and implement :meth:`.Transport.export`. Defaults to - :class:`.SyncTransport`. The other option is - :class:`.AsyncTransport`. + :class:`.AsyncTransport`. The other option is + :class:`.SyncTransport`. """ def __init__(self, client=None, project_id=None, From 1517fbb5bf9e58fca4cb48082eecb9b5d20265d0 Mon Sep 17 00:00:00 2001 From: Aaron Abbott Date: Fri, 4 Dec 2020 15:21:09 -0500 Subject: [PATCH 68/79] replace travis (part 1): add empty github workflow (#979) --- .github/workflows/build.yml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 .github/workflows/build.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 000000000..0e5b28d73 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,14 @@ +name: Build + +on: + pull_request: + branches: + - master + +jobs: + build: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v2 + - name: Build and test + run: echo "Hello world!" From 868640cfcbdaa64679e38b8541c988675bcdddf0 Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Fri, 4 Dec 2020 17:26:52 -0500 Subject: [PATCH 69/79] Remove @c24t and @reyang from CODEOWNERS (#978) --- .github/CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index a94f95b81..fa94a04c4 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -2,4 +2,4 @@ # This file controls who is tagged for review for any given pull request. # For anything not explicitly taken by someone else: -* @census-instrumentation/global-owners @aabmass @c24t @hectorhdzg @lzchen @reyang @songy23 @victoraugustolls +* @census-instrumentation/global-owners @aabmass @hectorhdzg @lzchen @songy23 @victoraugustolls From 0d0185397e82f74ac86060402da23ee25cc6c7d1 Mon Sep 17 00:00:00 2001 From: Aaron Abbott Date: Sat, 5 Dec 2020 11:17:32 -0500 Subject: [PATCH 70/79] replace travis (part 2): run tests in github actions (#980) * replace travis (part 2): run tests in github actions * undo azure test change * log WEBSITE_SITE_NAME * log more * mock out azure metadata server, which exists in GHA environment * split out lint, setup, docs * Revert "split out lint, setup, docs" This reverts commit 44b1c7d1b1aa4b6e263e48faa400ed0996e14780. * skip test --- .github/workflows/build.yml | 34 ++++++++++++++++--- .travis.yml | 21 ------------ .../tests/test_azure_heartbeat_metrics.py | 2 ++ 3 files changed, 32 insertions(+), 25 deletions(-) delete mode 100644 .travis.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 0e5b28d73..3c3cf5c45 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -7,8 +7,34 @@ on: jobs: build: - runs-on: ubuntu-20.04 + # 18.04 needed for python3.4 + runs-on: ubuntu-18.04 + env: + # We use these variables to convert between tox and GHA version literals + py27: 2.7 + py34: 3.4 + py35: 3.5 + py36: 3.6 + py37: 3.7 + strategy: + # ensures the entire test matrix is run, even if one permutation fails + fail-fast: false + matrix: + python-version: [py27, py34, py35, py36, py37] steps: - - uses: actions/checkout@v2 - - name: Build and test - run: echo "Hello world!" + - name: Checkout code + uses: actions/checkout@v2 + - name: Set up Python ${{ env[matrix.python-version] }} + uses: actions/setup-python@v2 + with: + python-version: ${{ env[matrix.python-version] }} + - name: Install tox + run: pip install -U tox-factor + - name: Cache tox environment + uses: actions/cache@v2 + with: + path: .tox + # bump version prefix to fully reset caches + key: v1-tox-${{ matrix.python-version }}-${{ hashFiles('tox.ini') }} + - name: run tox + run: tox -f ${{ matrix.python-version }} diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index dbd282a60..000000000 --- a/.travis.yml +++ /dev/null @@ -1,21 +0,0 @@ -dist: xenial - -language: python - -python: - - '2.7' - - '3.4' - - '3.5' - - '3.6' - - '3.7' - -install: - - pip install tox-travis - -script: - - tox - - touch docs/.nojekyll - -branches: - only: - - master diff --git a/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py b/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py index a8ad2d564..f3a1b004d 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py @@ -89,6 +89,8 @@ def test_heartbeat_metric_init(self): self.assertFalse(metric.init) self.assertEqual(len(metric.properties), 0) + # TODO @lzchen #981 + @unittest.skip("Failing because github workflow runs on Azure") def test_heartbeat_metric_get_metric_init(self): metric = heartbeat_metrics.HeartbeatMetric() self.assertFalse(metric.init) From 577b17675fddbed62e566d7027891f21303e9610 Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Mon, 7 Dec 2020 11:40:55 -0500 Subject: [PATCH 71/79] test (#982) --- .../tests/test_azure_heartbeat_metrics.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py b/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py index f3a1b004d..08e14f808 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_heartbeat_metrics.py @@ -89,9 +89,13 @@ def test_heartbeat_metric_init(self): self.assertFalse(metric.init) self.assertEqual(len(metric.properties), 0) - # TODO @lzchen #981 - @unittest.skip("Failing because github workflow runs on Azure") - def test_heartbeat_metric_get_metric_init(self): + @mock.patch( + 'requests.get', + throw(requests.exceptions.ConnectionError) + ) + @mock.patch('os.environ.get') + def test_heartbeat_metric_get_metric_init(self, environ_mock): + environ_mock.return_value = None metric = heartbeat_metrics.HeartbeatMetric() self.assertFalse(metric.init) metrics = metric.get_metrics() From a3931d9239bd44855d0dc4ead418677dc729907d Mon Sep 17 00:00:00 2001 From: Aaron Abbott Date: Tue, 8 Dec 2020 10:54:58 -0700 Subject: [PATCH 72/79] Extract byte size from proto-plus messages (#976) --- contrib/opencensus-ext-grpc/CHANGELOG.md | 3 +++ .../opencensus/ext/grpc/utils.py | 13 ++++++++++++- .../tests/test_server_interceptor.py | 14 ++++++++++++++ 3 files changed, 29 insertions(+), 1 deletion(-) diff --git a/contrib/opencensus-ext-grpc/CHANGELOG.md b/contrib/opencensus-ext-grpc/CHANGELOG.md index 6ff58e2d5..d06dd0866 100644 --- a/contrib/opencensus-ext-grpc/CHANGELOG.md +++ b/contrib/opencensus-ext-grpc/CHANGELOG.md @@ -2,6 +2,9 @@ ## Unreleased +- Extract byte size from proto-plus messages + ([#976](https://github.com/census-instrumentation/opencensus-python/pull/976)) + ## 0.7.1 Released 2019-08-05 diff --git a/contrib/opencensus-ext-grpc/opencensus/ext/grpc/utils.py b/contrib/opencensus-ext-grpc/opencensus/ext/grpc/utils.py index 0cc213bb3..7a36c3d84 100644 --- a/contrib/opencensus-ext-grpc/opencensus/ext/grpc/utils.py +++ b/contrib/opencensus-ext-grpc/opencensus/ext/grpc/utils.py @@ -6,6 +6,17 @@ from opencensus.trace import execution_context, time_event +def extract_byte_size(proto_message): + """Gets the byte size from a google.protobuf or proto-plus message""" + if hasattr(proto_message, "ByteSize"): + # google.protobuf message + return proto_message.ByteSize() + if hasattr(type(proto_message), "pb"): + # proto-plus message + return type(proto_message).pb(proto_message).ByteSize() + return None + + def add_message_event(proto_message, span, message_event_type, message_id=1): """Adds a MessageEvent to the span based off of the given protobuf message @@ -15,7 +26,7 @@ def add_message_event(proto_message, span, message_event_type, message_id=1): datetime.utcnow(), message_id, type=message_event_type, - uncompressed_size_bytes=proto_message.ByteSize() + uncompressed_size_bytes=extract_byte_size(proto_message), ) ) diff --git a/contrib/opencensus-ext-grpc/tests/test_server_interceptor.py b/contrib/opencensus-ext-grpc/tests/test_server_interceptor.py index 680de1002..850c036fa 100644 --- a/contrib/opencensus-ext-grpc/tests/test_server_interceptor.py +++ b/contrib/opencensus-ext-grpc/tests/test_server_interceptor.py @@ -167,6 +167,20 @@ def test__wrap_rpc_behavior_none(self): new_handler = server_interceptor._wrap_rpc_behavior(None, lambda: None) self.assertEqual(new_handler, None) + def test_extract_byte_size(self): + # should work with a google.protobuf message + google_protobuf_mock = mock.Mock() + google_protobuf_mock.ByteSize.return_value = 5 + self.assertEqual(grpc_utils.extract_byte_size(google_protobuf_mock), 5) + + # should work with a proto-plus style message + protoplus_protobuf_mock = mock.Mock(spec=[]) + type(protoplus_protobuf_mock).pb = mock.Mock() + type(protoplus_protobuf_mock).pb.return_value.ByteSize.return_value = 5 + self.assertEqual( + grpc_utils.extract_byte_size(protoplus_protobuf_mock), 5 + ) + class MockTracer(object): def __init__(self, *args, **kwargs): From 82deeb3873373e952e709da77f80f29d9ba03098 Mon Sep 17 00:00:00 2001 From: Aaron Abbott Date: Tue, 8 Dec 2020 11:53:37 -0700 Subject: [PATCH 73/79] run CI workflow on master branch commits as well (#983) --- .github/workflows/build.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 3c3cf5c45..79688a726 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,6 +1,9 @@ name: Build on: + push: + branches-ignore: + - 'release/*' pull_request: branches: - master From 2d16ea41e0041bd546a271b8199b712220e1aed5 Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Tue, 8 Dec 2020 19:10:36 -0500 Subject: [PATCH 74/79] Remove blacklist sensitive language (#977) --- .pylintrc | 4 +- CHANGELOG.md | 2 + README.rst | 8 ++-- .../examples/datadog.py | 2 +- contrib/opencensus-ext-django/CHANGELOG.md | 3 ++ .../opencensus/ext/django/middleware.py | 24 +++++------ .../tests/test_django_middleware.py | 12 +++--- contrib/opencensus-ext-flask/CHANGELOG.md | 3 ++ contrib/opencensus-ext-flask/README.rst | 2 +- .../opencensus/ext/flask/flask_middleware.py | 40 +++++++++++-------- .../tests/test_flask_middleware.py | 6 +-- .../opencensus/ext/httplib/trace.py | 6 +-- .../tests/test_httplib_trace.py | 4 +- contrib/opencensus-ext-pyramid/CHANGELOG.md | 3 ++ .../opencensus/ext/pyramid/config.py | 2 +- .../ext/pyramid/pyramid_middleware.py | 8 ++-- .../tests/test_pyramid_config.py | 10 ++--- .../tests/test_pyramid_middleware.py | 4 +- contrib/opencensus-ext-requests/README.rst | 2 +- .../opencensus/ext/requests/trace.py | 12 +++--- .../tests/test_requests_trace.py | 8 ++-- docs/trace/usage.rst | 20 +++++----- opencensus/trace/utils.py | 38 +++++++++--------- tests/system/trace/django/app/settings.py | 2 +- tests/unit/trace/test_ext_utils.py | 12 +++--- 25 files changed, 128 insertions(+), 109 deletions(-) diff --git a/.pylintrc b/.pylintrc index 417d372ee..e34531789 100644 --- a/.pylintrc +++ b/.pylintrc @@ -5,11 +5,11 @@ # run arbitrary code. extension-pkg-whitelist= -# Add files or directories to the blacklist. They should be base names, not +# Add files or directories to the excludelist. They should be base names, not # paths. ignore=CVS -# Add files or directories matching the regex patterns to the blacklist. The +# Add files or directories matching the regex patterns to the excludelist. The # regex matches against base names, not paths. ignore-patterns= diff --git a/CHANGELOG.md b/CHANGELOG.md index 14ea915ab..59d18767f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,8 @@ - PeriodicMetricTask flush on exit ([#943](https://github.com/census-instrumentation/opencensus-python/pull/943)) +- Change blacklist to excludelist +([#977](https://github.com/census-instrumentation/opencensus-python/pull/977)) ## 0.7.10 Released 2020-06-29 diff --git a/README.rst b/README.rst index 3532ec6c1..4bc43845d 100644 --- a/README.rst +++ b/README.rst @@ -118,9 +118,9 @@ Customization There are several things you can customize in OpenCensus: -* **Blacklist**, which excludes certain hosts and paths from being tracked. +* **Excludelist**, which excludes certain hosts and paths from being tracked. By default, the health check path for the App Engine flexible environment is - not tracked, you can turn it on by excluding it from the blacklist setting. + not tracked, you can turn it on by excluding it from the excludelist setting. * **Exporter**, which sends the traces. By default, the traces are printed to stdout in JSON format. You can choose @@ -174,8 +174,8 @@ information, please read the 'OPENCENSUS': { 'TRACE': { - 'BLACKLIST_HOSTNAMES': ['localhost', '127.0.0.1'], - 'BLACKLIST_PATHS': ['_ah/health'], + 'EXCLUDELIST_HOSTNAMES': ['localhost', '127.0.0.1'], + 'EXCLUDELIST_PATHS': ['_ah/health'], 'SAMPLER': 'opencensus.trace.samplers.ProbabilitySampler(rate=1)', 'EXPORTER': '''opencensus.ext.ocagent.trace_exporter.TraceExporter( service_name='foobar', diff --git a/contrib/opencensus-ext-datadog/examples/datadog.py b/contrib/opencensus-ext-datadog/examples/datadog.py index d415323e6..a74e67c44 100644 --- a/contrib/opencensus-ext-datadog/examples/datadog.py +++ b/contrib/opencensus-ext-datadog/examples/datadog.py @@ -6,7 +6,7 @@ app = Flask(__name__) middleware = FlaskMiddleware(app, - blacklist_paths=['/healthz'], + excludelist_paths=['/healthz'], sampler=AlwaysOnSampler(), exporter=DatadogTraceExporter( Options(service='python-export-test', diff --git a/contrib/opencensus-ext-django/CHANGELOG.md b/contrib/opencensus-ext-django/CHANGELOG.md index 79d6d9a43..0a92e17f0 100644 --- a/contrib/opencensus-ext-django/CHANGELOG.md +++ b/contrib/opencensus-ext-django/CHANGELOG.md @@ -2,6 +2,9 @@ ## Unreleased +- Change blacklist to excludelist +([#977](https://github.com/census-instrumentation/opencensus-python/pull/977)) + ## 0.7.2 Released 2019-09-30 diff --git a/contrib/opencensus-ext-django/opencensus/ext/django/middleware.py b/contrib/opencensus-ext-django/opencensus/ext/django/middleware.py index 53c26131c..e3774dbbd 100644 --- a/contrib/opencensus-ext-django/opencensus/ext/django/middleware.py +++ b/contrib/opencensus-ext-django/opencensus/ext/django/middleware.py @@ -46,8 +46,8 @@ REQUEST_THREAD_LOCAL_KEY = 'django_request' SPAN_THREAD_LOCAL_KEY = 'django_span' -BLACKLIST_PATHS = 'BLACKLIST_PATHS' -BLACKLIST_HOSTNAMES = 'BLACKLIST_HOSTNAMES' +EXCLUDELIST_PATHS = 'EXCLUDELIST_PATHS' +EXCLUDELIST_HOSTNAMES = 'EXCLUDELIST_HOSTNAMES' log = logging.getLogger(__name__) @@ -160,9 +160,9 @@ def __init__(self, get_response=None): if isinstance(self.propagator, six.string_types): self.propagator = configuration.load(self.propagator) - self.blacklist_paths = settings.get(BLACKLIST_PATHS, None) + self.excludelist_paths = settings.get(EXCLUDELIST_PATHS, None) - self.blacklist_hostnames = settings.get(BLACKLIST_HOSTNAMES, None) + self.excludelist_hostnames = settings.get(EXCLUDELIST_HOSTNAMES, None) if django.VERSION >= (2,): # pragma: NO COVER connection.execute_wrappers.append(_trace_db_call) @@ -173,8 +173,8 @@ def process_request(self, request): :type request: :class:`~django.http.request.HttpRequest` :param request: Django http request. """ - # Do not trace if the url is blacklisted - if utils.disable_tracing_url(request.path, self.blacklist_paths): + # Do not trace if the url is excludelisted + if utils.disable_tracing_url(request.path, self.excludelist_paths): return # Add the request to thread local @@ -183,8 +183,8 @@ def process_request(self, request): request) execution_context.set_opencensus_attr( - 'blacklist_hostnames', - self.blacklist_hostnames) + 'excludelist_hostnames', + self.excludelist_hostnames) try: # Start tracing this request @@ -234,8 +234,8 @@ def process_view(self, request, view_func, *args, **kwargs): function name add set it as the span name. """ - # Do not trace if the url is blacklisted - if utils.disable_tracing_url(request.path, self.blacklist_paths): + # Do not trace if the url is excludelisted + if utils.disable_tracing_url(request.path, self.excludelist_paths): return try: @@ -248,8 +248,8 @@ def process_view(self, request, view_func, *args, **kwargs): log.error('Failed to trace request', exc_info=True) def process_response(self, request, response): - # Do not trace if the url is blacklisted - if utils.disable_tracing_url(request.path, self.blacklist_paths): + # Do not trace if the url is excludelisted + if utils.disable_tracing_url(request.path, self.excludelist_paths): return response try: diff --git a/contrib/opencensus-ext-django/tests/test_django_middleware.py b/contrib/opencensus-ext-django/tests/test_django_middleware.py index 714e01eeb..dacf1545f 100644 --- a/contrib/opencensus-ext-django/tests/test_django_middleware.py +++ b/contrib/opencensus-ext-django/tests/test_django_middleware.py @@ -127,17 +127,17 @@ def test_process_request(self): self.assertEqual(span.name, 'mock.mock.Mock') - def test_blacklist_path(self): + def test_excludelist_path(self): from opencensus.ext.django import middleware execution_context.clear() - blacklist_paths = ['test_blacklist_path'] + excludelist_paths = ['test_excludelist_path'] settings = type('Test', (object,), {}) settings.OPENCENSUS = { 'TRACE': { 'SAMPLER': 'opencensus.trace.samplers.AlwaysOnSampler()', # noqa - 'BLACKLIST_PATHS': blacklist_paths, + 'EXCLUDELIST_PATHS': excludelist_paths, 'EXPORTER': mock.Mock(), } } @@ -148,11 +148,11 @@ def test_blacklist_path(self): with patch_settings: middleware_obj = middleware.OpencensusMiddleware() - django_request = RequestFactory().get('/test_blacklist_path') + django_request = RequestFactory().get('/test_excludelist_path') disabled = utils.disable_tracing_url(django_request.path, - blacklist_paths) + excludelist_paths) self.assertTrue(disabled) - self.assertEqual(middleware_obj.blacklist_paths, blacklist_paths) + self.assertEqual(middleware_obj.excludelist_paths, excludelist_paths) # test process_request middleware_obj.process_request(django_request) diff --git a/contrib/opencensus-ext-flask/CHANGELOG.md b/contrib/opencensus-ext-flask/CHANGELOG.md index 2b8246fd3..a5808a446 100644 --- a/contrib/opencensus-ext-flask/CHANGELOG.md +++ b/contrib/opencensus-ext-flask/CHANGELOG.md @@ -2,6 +2,9 @@ ## Unreleased +- Change blacklist to excludelist +([#977](https://github.com/census-instrumentation/opencensus-python/pull/977)) + ## 0.7.3 Released 2019-10-01 diff --git a/contrib/opencensus-ext-flask/README.rst b/contrib/opencensus-ext-flask/README.rst index 797e2bbbc..1252f291a 100644 --- a/contrib/opencensus-ext-flask/README.rst +++ b/contrib/opencensus-ext-flask/README.rst @@ -22,7 +22,7 @@ Usage from opencensus.ext.flask.flask_middleware import FlaskMiddleware app = Flask(__name__) - middleware = FlaskMiddleware(app, blacklist_paths=['_ah/health']) + middleware = FlaskMiddleware(app, excludelist_paths=['_ah/health']) @app.route('/') def hello(): diff --git a/contrib/opencensus-ext-flask/opencensus/ext/flask/flask_middleware.py b/contrib/opencensus-ext-flask/opencensus/ext/flask/flask_middleware.py index 39b220e02..8e1d6dbfe 100644 --- a/contrib/opencensus-ext-flask/opencensus/ext/flask/flask_middleware.py +++ b/contrib/opencensus-ext-flask/opencensus/ext/flask/flask_middleware.py @@ -40,8 +40,8 @@ HTTP_URL = attributes_helper.COMMON_ATTRIBUTES['HTTP_URL'] HTTP_STATUS_CODE = attributes_helper.COMMON_ATTRIBUTES['HTTP_STATUS_CODE'] -BLACKLIST_PATHS = 'BLACKLIST_PATHS' -BLACKLIST_HOSTNAMES = 'BLACKLIST_HOSTNAMES' +EXCLUDELIST_PATHS = 'EXCLUDELIST_PATHS' +EXCLUDELIST_HOSTNAMES = 'EXCLUDELIST_HOSTNAMES' log = logging.getLogger(__name__) @@ -52,8 +52,8 @@ class FlaskMiddleware(object): :type app: :class: `~flask.Flask` :param app: A flask application. - :type blacklist_paths: list - :param blacklist_paths: Paths that do not trace. + :type excludelist_paths: list + :param excludelist_paths: Paths that do not trace. :type sampler: :class:`~opencensus.trace.samplers.base.Sampler` :param sampler: A sampler. It should extend from the base @@ -76,10 +76,10 @@ class FlaskMiddleware(object): :class:`.TextFormatPropagator`. """ - def __init__(self, app=None, blacklist_paths=None, sampler=None, + def __init__(self, app=None, excludelist_paths=None, sampler=None, exporter=None, propagator=None): self.app = app - self.blacklist_paths = blacklist_paths + self.excludelist_paths = excludelist_paths self.sampler = sampler self.exporter = exporter self.propagator = propagator @@ -112,10 +112,10 @@ def init_app(self, app): if isinstance(self.propagator, six.string_types): self.propagator = configuration.load(self.propagator) - self.blacklist_paths = settings.get(BLACKLIST_PATHS, - self.blacklist_paths) + self.excludelist_paths = settings.get(EXCLUDELIST_PATHS, + self.excludelist_paths) - self.blacklist_hostnames = settings.get(BLACKLIST_HOSTNAMES, None) + self.excludelist_hostnames = settings.get(EXCLUDELIST_HOSTNAMES, None) self.setup_trace() @@ -129,8 +129,10 @@ def _before_request(self): See: http://flask.pocoo.org/docs/0.12/api/#flask.Flask.before_request """ - # Do not trace if the url is blacklisted - if utils.disable_tracing_url(flask.request.url, self.blacklist_paths): + # Do not trace if the url is in the exclude list + if utils.disable_tracing_url( + flask.request.url, self.excludelist_paths + ): return try: @@ -161,8 +163,8 @@ def _before_request(self): HTTP_URL, str(flask.request.url) ) execution_context.set_opencensus_attr( - 'blacklist_hostnames', - self.blacklist_hostnames + 'excludelist_hostnames', + self.excludelist_hostnames ) except Exception: # pragma: NO COVER log.error('Failed to trace request', exc_info=True) @@ -172,8 +174,10 @@ def _after_request(self, response): See: http://flask.pocoo.org/docs/0.12/api/#flask.Flask.after_request """ - # Do not trace if the url is blacklisted - if utils.disable_tracing_url(flask.request.url, self.blacklist_paths): + # Do not trace if the url is in the exclude list + if utils.disable_tracing_url( + flask.request.url, self.excludelist_paths + ): return response try: @@ -193,8 +197,10 @@ def _after_request(self, response): return response def _teardown_request(self, exception): - # Do not trace if the url is blacklisted - if utils.disable_tracing_url(flask.request.url, self.blacklist_paths): + # Do not trace if the url is in the exclude list + if utils.disable_tracing_url( + flask.request.url, self.excludelist_paths + ): return try: diff --git a/contrib/opencensus-ext-flask/tests/test_flask_middleware.py b/contrib/opencensus-ext-flask/tests/test_flask_middleware.py index 55b54c33d..4ef5a9982 100644 --- a/contrib/opencensus-ext-flask/tests/test_flask_middleware.py +++ b/contrib/opencensus-ext-flask/tests/test_flask_middleware.py @@ -165,14 +165,14 @@ def test__before_request(self): span_context = tracer.span_context self.assertEqual(span_context.trace_id, trace_id) - def test__before_request_blacklist(self): + def test__before_request_excludelist(self): flask_trace_header = 'traceparent' trace_id = '2dd43a1d6b2549c6bc2a1a54c2fc0b05' span_id = '6e0c63257de34c92' flask_trace_id = '00-{}-{}-00'.format(trace_id, span_id) app = self.create_app() - # Use the AlwaysOnSampler here to prove that the blacklist takes + # Use the AlwaysOnSampler here to prove that the excludelist takes # precedence over the sampler flask_middleware.FlaskMiddleware(app=app, sampler=samplers.AlwaysOnSampler()) @@ -349,7 +349,7 @@ def test__after_request_invalid_url(self): self.assertEqual(span.attributes, expected_attributes) assert isinstance(span.parent_span, base.NullContextManager) - def test__after_request_blacklist(self): + def test__after_request_excludelist(self): flask_trace_header = 'traceparent' trace_id = '2dd43a1d6b2549c6bc2a1a54c2fc0b05' span_id = '6e0c63257de34c92' diff --git a/contrib/opencensus-ext-httplib/opencensus/ext/httplib/trace.py b/contrib/opencensus-ext-httplib/opencensus/ext/httplib/trace.py index 9df2b6308..5ab3faa3e 100644 --- a/contrib/opencensus-ext-httplib/opencensus/ext/httplib/trace.py +++ b/contrib/opencensus-ext-httplib/opencensus/ext/httplib/trace.py @@ -65,10 +65,10 @@ def call(self, method, url, body, headers, *args, **kwargs): return request_func(self, method, url, body, headers, *args, **kwargs) _tracer = execution_context.get_opencensus_tracer() - blacklist_hostnames = execution_context.get_opencensus_attr( - 'blacklist_hostnames') + excludelist_hostnames = execution_context.get_opencensus_attr( + 'excludelist_hostnames') dest_url = '{}:{}'.format(self.host, self.port) - if utils.disable_tracing_hostname(dest_url, blacklist_hostnames): + if utils.disable_tracing_hostname(dest_url, excludelist_hostnames): return request_func(self, method, url, body, headers, *args, **kwargs) _span = _tracer.start_span() diff --git a/contrib/opencensus-ext-httplib/tests/test_httplib_trace.py b/contrib/opencensus-ext-httplib/tests/test_httplib_trace.py index eed7d8f42..cf0a30e94 100644 --- a/contrib/opencensus-ext-httplib/tests/test_httplib_trace.py +++ b/contrib/opencensus-ext-httplib/tests/test_httplib_trace.py @@ -103,7 +103,7 @@ def test_wrap_httplib_request(self): self.assertEqual(span_module.SpanKind.CLIENT, mock_tracer.span.span_kind) - def test_wrap_httplib_request_blacklist_ok(self): + def test_wrap_httplib_request_excludelist_ok(self): mock_span = mock.Mock() span_id = '1234' mock_span.span_id = span_id @@ -139,7 +139,7 @@ def test_wrap_httplib_request_blacklist_ok(self): 'traceparent': '00-123-456-01', }) - def test_wrap_httplib_request_blacklist_nok(self): + def test_wrap_httplib_request_excludelist_nok(self): mock_span = mock.Mock() span_id = '1234' mock_span.span_id = span_id diff --git a/contrib/opencensus-ext-pyramid/CHANGELOG.md b/contrib/opencensus-ext-pyramid/CHANGELOG.md index fd4831563..c5dc96999 100644 --- a/contrib/opencensus-ext-pyramid/CHANGELOG.md +++ b/contrib/opencensus-ext-pyramid/CHANGELOG.md @@ -2,6 +2,9 @@ ## Unreleased +- Change blacklist to excludelist +([#977](https://github.com/census-instrumentation/opencensus-python/pull/977)) + ## 0.7.1 Released 2019-08-26 diff --git a/contrib/opencensus-ext-pyramid/opencensus/ext/pyramid/config.py b/contrib/opencensus-ext-pyramid/opencensus/ext/pyramid/config.py index 2a119cfb2..575432a2b 100644 --- a/contrib/opencensus-ext-pyramid/opencensus/ext/pyramid/config.py +++ b/contrib/opencensus-ext-pyramid/opencensus/ext/pyramid/config.py @@ -21,7 +21,7 @@ 'PROPAGATOR': trace_context_http_header_format.TraceContextPropagator(), # https://cloud.google.com/appengine/docs/flexible/python/ # how-instances-are-managed#health_checking - 'BLACKLIST_PATHS': ['_ah/health'], + 'EXCLUDELIST_PATHS': ['_ah/health'], } diff --git a/contrib/opencensus-ext-pyramid/opencensus/ext/pyramid/pyramid_middleware.py b/contrib/opencensus-ext-pyramid/opencensus/ext/pyramid/pyramid_middleware.py index 3b853ddc5..76fcd59c7 100644 --- a/contrib/opencensus-ext-pyramid/opencensus/ext/pyramid/pyramid_middleware.py +++ b/contrib/opencensus-ext-pyramid/opencensus/ext/pyramid/pyramid_middleware.py @@ -27,7 +27,7 @@ HTTP_URL = attributes_helper.COMMON_ATTRIBUTES['HTTP_URL'] HTTP_STATUS_CODE = attributes_helper.COMMON_ATTRIBUTES['HTTP_STATUS_CODE'] -BLACKLIST_PATHS = 'BLACKLIST_PATHS' +EXCLUDELIST_PATHS = 'EXCLUDELIST_PATHS' log = logging.getLogger(__name__) @@ -62,7 +62,7 @@ def __init__(self, handler, registry): self.exporter = settings.EXPORTER self.propagator = settings.PROPAGATOR - self._blacklist_paths = settings.BLACKLIST_PATHS + self._excludelist_paths = settings.EXCLUDELIST_PATHS def __call__(self, request): self._before_request(request) @@ -74,7 +74,7 @@ def __call__(self, request): return response def _before_request(self, request): - if utils.disable_tracing_url(request.path, self._blacklist_paths): + if utils.disable_tracing_url(request.path, self._excludelist_paths): return try: @@ -113,7 +113,7 @@ def _before_request(self, request): log.error('Failed to trace request', exc_info=True) def _after_request(self, request, response): - if utils.disable_tracing_url(request.path, self._blacklist_paths): + if utils.disable_tracing_url(request.path, self._excludelist_paths): return try: diff --git a/contrib/opencensus-ext-pyramid/tests/test_pyramid_config.py b/contrib/opencensus-ext-pyramid/tests/test_pyramid_config.py index f31ad66e9..4ff7addb0 100644 --- a/contrib/opencensus-ext-pyramid/tests/test_pyramid_config.py +++ b/contrib/opencensus-ext-pyramid/tests/test_pyramid_config.py @@ -29,14 +29,14 @@ def test_trace_settings_default(self): assert trace_settings.SAMPLER == default_config['SAMPLER'] assert trace_settings.EXPORTER == default_config['EXPORTER'] assert trace_settings.PROPAGATOR == default_config['PROPAGATOR'] - assert trace_settings.BLACKLIST_PATHS == default_config[ - 'BLACKLIST_PATHS'] + assert trace_settings.EXCLUDELIST_PATHS == default_config[ + 'EXCLUDELIST_PATHS'] def test_trace_settings_override(self): mock_sampler = mock.Mock() mock_exporter = mock.Mock() mock_propagator = mock.Mock() - mock_blacklist_paths = ['foo/bar'] + mock_excludelist_paths = ['foo/bar'] registry = mock.Mock() registry.settings = { @@ -45,7 +45,7 @@ def test_trace_settings_override(self): 'SAMPLER': mock_sampler, 'EXPORTER': mock_exporter, 'PROPAGATOR': mock_propagator, - 'BLACKLIST_PATHS': mock_blacklist_paths, + 'EXCLUDELIST_PATHS': mock_excludelist_paths, }, }, } @@ -55,7 +55,7 @@ def test_trace_settings_override(self): assert trace_settings.SAMPLER == mock_sampler assert trace_settings.EXPORTER == mock_exporter assert trace_settings.PROPAGATOR == mock_propagator - assert trace_settings.BLACKLIST_PATHS == mock_blacklist_paths + assert trace_settings.EXCLUDELIST_PATHS == mock_excludelist_paths def test_trace_settings_invalid(self): registry = mock.Mock() diff --git a/contrib/opencensus-ext-pyramid/tests/test_pyramid_middleware.py b/contrib/opencensus-ext-pyramid/tests/test_pyramid_middleware.py index 6cfd9dd8b..63121b740 100644 --- a/contrib/opencensus-ext-pyramid/tests/test_pyramid_middleware.py +++ b/contrib/opencensus-ext-pyramid/tests/test_pyramid_middleware.py @@ -167,7 +167,7 @@ def dummy_handler(request): span_context = tracer.span_context self.assertEqual(span_context.trace_id, trace_id) - def test__before_request_blacklist(self): + def test__before_request_excludelist(self): pyramid_trace_header = 'traceparent' trace_id = '2dd43a1d6b2549c6bc2a1a54c2fc0b05' span_id = '6e0c63257de34c92' @@ -248,7 +248,7 @@ def dummy_handler(request): self.assertEqual(span.attributes, expected_attributes) - def test__after_request_blacklist(self): + def test__after_request_excludelist(self): pyramid_trace_header = 'traceparent' trace_id = '2dd43a1d6b2549c6bc2a1a54c2fc0b05' span_id = '6e0c63257de34c92' diff --git a/contrib/opencensus-ext-requests/README.rst b/contrib/opencensus-ext-requests/README.rst index 54762d7c0..dd0257d20 100644 --- a/contrib/opencensus-ext-requests/README.rst +++ b/contrib/opencensus-ext-requests/README.rst @@ -13,7 +13,7 @@ You can enable requests integration by specifying ``'requests'`` to ``trace_inte It's possible to configure a list of URL you don't want traced. By default the request to exporter won't be traced. It's configurable by giving an array of hostname/port to the attribute -``blacklist_hostnames`` in OpenCensus context's attributes: +``excludelist_hostnames`` in OpenCensus context's attributes: Only the hostname must be specified if only the hostname is specified in the URL request. diff --git a/contrib/opencensus-ext-requests/opencensus/ext/requests/trace.py b/contrib/opencensus-ext-requests/opencensus/ext/requests/trace.py index c99862018..c2c42be13 100644 --- a/contrib/opencensus-ext-requests/opencensus/ext/requests/trace.py +++ b/contrib/opencensus-ext-requests/opencensus/ext/requests/trace.py @@ -74,14 +74,14 @@ def call(url, *args, **kwargs): # Check if request was sent from an exporter. If so, do not wrap. if execution_context.is_exporter(): return requests_func(url, *args, **kwargs) - blacklist_hostnames = execution_context.get_opencensus_attr( - 'blacklist_hostnames') + excludelist_hostnames = execution_context.get_opencensus_attr( + 'excludelist_hostnames') parsed_url = urlparse(url) if parsed_url.port is None: dest_url = parsed_url.hostname else: dest_url = '{}:{}'.format(parsed_url.hostname, parsed_url.port) - if utils.disable_tracing_hostname(dest_url, blacklist_hostnames): + if utils.disable_tracing_hostname(dest_url, excludelist_hostnames): return requests_func(url, *args, **kwargs) path = parsed_url.path if parsed_url.path else '/' @@ -145,14 +145,14 @@ def wrap_session_request(wrapped, instance, args, kwargs): method = kwargs.get('method') or args[0] url = kwargs.get('url') or args[1] - blacklist_hostnames = execution_context.get_opencensus_attr( - 'blacklist_hostnames') + excludelist_hostnames = execution_context.get_opencensus_attr( + 'excludelist_hostnames') parsed_url = urlparse(url) if parsed_url.port is None: dest_url = parsed_url.hostname else: dest_url = '{}:{}'.format(parsed_url.hostname, parsed_url.port) - if utils.disable_tracing_hostname(dest_url, blacklist_hostnames): + if utils.disable_tracing_hostname(dest_url, excludelist_hostnames): return wrapped(*args, **kwargs) path = parsed_url.path if parsed_url.path else '/' diff --git a/contrib/opencensus-ext-requests/tests/test_requests_trace.py b/contrib/opencensus-ext-requests/tests/test_requests_trace.py index c3957ae71..6cd0a75f5 100644 --- a/contrib/opencensus-ext-requests/tests/test_requests_trace.py +++ b/contrib/opencensus-ext-requests/tests/test_requests_trace.py @@ -124,7 +124,7 @@ def test_wrap_requests(self): mock_tracer.current_span.status.__dict__ ) - def test_wrap_requests_blacklist_ok(self): + def test_wrap_requests_excludelist_ok(self): mock_return = mock.Mock() mock_return.status_code = 200 return_value = mock_return @@ -157,7 +157,7 @@ def test_wrap_requests_blacklist_ok(self): self.assertEqual(expected_name, mock_tracer.current_span.name) - def test_wrap_requests_blacklist_nok(self): + def test_wrap_requests_excludelist_nok(self): mock_return = mock.Mock() mock_return.status_code = 200 return_value = mock_return @@ -413,7 +413,7 @@ def test_wrap_session_request(self): mock_tracer.current_span.status.__dict__ ) - def test_wrap_session_request_blacklist_ok(self): + def test_wrap_session_request_excludelist_ok(self): def wrapped(*args, **kwargs): result = mock.Mock() result.status_code = 200 @@ -448,7 +448,7 @@ def wrapped(*args, **kwargs): expected_name = '/' self.assertEqual(expected_name, mock_tracer.current_span.name) - def test_wrap_session_request_blacklist_nok(self): + def test_wrap_session_request_excludelist_nok(self): def wrapped(*args, **kwargs): result = mock.Mock() result.status_code = 200 diff --git a/docs/trace/usage.rst b/docs/trace/usage.rst index f0ed9a452..3c858c84c 100644 --- a/docs/trace/usage.rst +++ b/docs/trace/usage.rst @@ -148,13 +148,13 @@ This example shows how to use the ``GoogleCloudFormatPropagator``: # Serialize header = propagator.to_header(span_context) -Blacklist Paths -~~~~~~~~~~~~~~~ +Excludelist Paths +~~~~~~~~~~~~~~~~~ You can specify which paths you do not want to trace by configuring the -blacklist paths. +excludelist paths. -This example shows how to configure the blacklist to ignore the `_ah/health` endpoint +This example shows how to configure the excludelist to ignore the `_ah/health` endpoint for a Flask application: .. code:: python @@ -163,21 +163,21 @@ for a Flask application: app = flask.Flask(__name__) - blacklist_paths = ['_ah/health'] - middleware = FlaskMiddleware(app, blacklist_paths=blacklist_paths) + excludelist_paths = ['_ah/health'] + middleware = FlaskMiddleware(app, excludelist_paths=excludelist_paths) -For Django, you can configure the blacklist in the ``OPENCENSUS_TRACE_PARAMS`` in ``settings.py``: +For Django, you can configure the excludelist in the ``OPENCENSUS_TRACE_PARAMS`` in ``settings.py``: .. code:: python OPENCENSUS_TRACE_PARAMS: { ... - 'BLACKLIST_PATHS': ['_ah/health',], + 'EXCLUDELIST_PATHS': ['_ah/health',], } .. note:: By default the health check path for the App Engine flexible environment is not traced, - but you can turn it on by excluding it from the blacklist setting. + but you can turn it on by excluding it from the excludelist setting. Framework Integration --------------------- @@ -235,7 +235,7 @@ setting in ``settings.py``: .. code:: python OPENCENSUS_TRACE_PARAMS = { - 'BLACKLIST_PATHS': ['/_ah/health'], + 'EXCLUDELIST_PATHS': ['/_ah/health'], 'GCP_EXPORTER_PROJECT': None, 'SAMPLING_RATE': 0.5, 'SERVICE_NAME': 'my_service', diff --git a/opencensus/trace/utils.py b/opencensus/trace/utils.py index 7aabf73ab..5b9be3473 100644 --- a/opencensus/trace/utils.py +++ b/opencensus/trace/utils.py @@ -19,10 +19,10 @@ from opencensus.trace import execution_context from opencensus.trace.status import Status -# By default the blacklist urls are not tracing, currently just include the +# By default the excludelist urls are not tracing, currently just include the # health check url. The paths are literal string matched instead of regular # expressions. Do not include the '/' at the beginning of the path. -DEFAULT_BLACKLIST_PATHS = [ +DEFAULT_EXCLUDELIST_PATHS = [ '_ah/health', ] @@ -42,20 +42,20 @@ def get_func_name(func): return func_name -def disable_tracing_url(url, blacklist_paths=None): - """Disable tracing on the provided blacklist paths, by default not tracing +def disable_tracing_url(url, excludelist_paths=None): + """Disable tracing on the provided excludelist paths, by default not tracing the health check request. - If the url path starts with the blacklisted path, return True. + If the url path starts with the excludelisted path, return True. - :type blacklist_paths: list - :param blacklist_paths: Paths that not tracing. + :type excludelist_paths: list + :param excludelist_paths: Paths that not tracing. :rtype: bool :returns: True if not tracing, False if tracing. """ - if blacklist_paths is None: - blacklist_paths = DEFAULT_BLACKLIST_PATHS + if excludelist_paths is None: + excludelist_paths = DEFAULT_EXCLUDELIST_PATHS # Remove the 'https?|ftp://' if exists url = re.sub(URL_PATTERN, '', url) @@ -63,39 +63,39 @@ def disable_tracing_url(url, blacklist_paths=None): # Split the url by the first '/' and get the path part url_path = url.split('/', 1)[1] - for path in blacklist_paths: + for path in excludelist_paths: if url_path.startswith(path): return True return False -def disable_tracing_hostname(url, blacklist_hostnames=None): - """Disable tracing for the provided blacklist URLs, by default not tracing +def disable_tracing_hostname(url, excludelist_hostnames=None): + """Disable tracing for the provided excludelist URLs, by default not tracing the exporter url. - If the url path starts with the blacklisted path, return True. + If the url path starts with the excludelisted path, return True. - :type blacklist_hostnames: list - :param blacklist_hostnames: URL that not tracing. + :type excludelist_hostnames: list + :param excludelist_hostnames: URL that not tracing. :rtype: bool :returns: True if not tracing, False if tracing. """ - if blacklist_hostnames is None: + if excludelist_hostnames is None: # Exporter host_name are not traced by default _tracer = execution_context.get_opencensus_tracer() try: - blacklist_hostnames = [ + excludelist_hostnames = [ '{}:{}'.format( _tracer.exporter.host_name, _tracer.exporter.port ) ] except(AttributeError): - blacklist_hostnames = [] + excludelist_hostnames = [] - return url in blacklist_hostnames + return url in excludelist_hostnames def status_from_http_code(http_code): diff --git a/tests/system/trace/django/app/settings.py b/tests/system/trace/django/app/settings.py index 3bba77f31..c60837af9 100644 --- a/tests/system/trace/django/app/settings.py +++ b/tests/system/trace/django/app/settings.py @@ -71,7 +71,7 @@ 'PROPAGATOR': 'opencensus.trace.propagation.google_cloud_format.' 'GoogleCloudFormatPropagator()', - 'BLACKLIST_PATHS': [ + 'EXCLUDELIST_PATHS': [ '_ah/health', ], } diff --git a/tests/unit/trace/test_ext_utils.py b/tests/unit/trace/test_ext_utils.py index 0816b2d7e..e322158a7 100644 --- a/tests/unit/trace/test_ext_utils.py +++ b/tests/unit/trace/test_ext_utils.py @@ -53,9 +53,9 @@ def test_disable_tracing_url_default(self): def test_disable_tracing_url_explicit(self): url = 'http://127.0.0.1:8080/test_no_tracing' - blacklist_paths = ['test_no_tracing'] + excludelist_paths = ['test_no_tracing'] - disable_tracing = utils.disable_tracing_url(url, blacklist_paths) + disable_tracing = utils.disable_tracing_url(url, excludelist_paths) self.assertTrue(disable_tracing) def test_disable_tracing_hostname_default(self): @@ -65,14 +65,16 @@ def test_disable_tracing_hostname_default(self): self.assertFalse(disable_tracing) def test_disable_tracing_hostname_explicit(self): - blacklist_paths = ['127.0.0.1', '192.168.0.1:80'] + excludelist_paths = ['127.0.0.1', '192.168.0.1:80'] url = '127.0.0.1:8080' - disable_tracing = utils.disable_tracing_hostname(url, blacklist_paths) + disable_tracing = utils.disable_tracing_hostname( + url, excludelist_paths) self.assertFalse(disable_tracing) url = '127.0.0.1:80' - disable_tracing = utils.disable_tracing_hostname(url, blacklist_paths) + disable_tracing = utils.disable_tracing_hostname( + url, excludelist_paths) self.assertFalse(disable_tracing) def test_grpc_code_from_http_code(self): From 82379aaa99d2771dd0112551cd96b25dd52193b8 Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Tue, 8 Dec 2020 23:59:24 -0500 Subject: [PATCH 75/79] Disable heartbeat metrics in azure exporters (#984) --- contrib/opencensus-ext-azure/CHANGELOG.md | 2 ++ .../opencensus/ext/azure/log_exporter/__init__.py | 3 --- .../opencensus/ext/azure/metrics_exporter/__init__.py | 5 ----- .../opencensus/ext/azure/trace_exporter/__init__.py | 3 --- .../tests/test_azure_metrics_exporter.py | 1 + 5 files changed, 3 insertions(+), 11 deletions(-) diff --git a/contrib/opencensus-ext-azure/CHANGELOG.md b/contrib/opencensus-ext-azure/CHANGELOG.md index 7f719df48..c740fafb3 100644 --- a/contrib/opencensus-ext-azure/CHANGELOG.md +++ b/contrib/opencensus-ext-azure/CHANGELOG.md @@ -12,6 +12,8 @@ ([#946](https://github.com/census-instrumentation/opencensus-python/pull/946)) - Added queue capacity configuration for exporters ([#949](https://github.com/census-instrumentation/opencensus-python/pull/949)) +- Disable heartbeat metrics in exporters + ([#984](https://github.com/census-instrumentation/opencensus-python/pull/984)) ## 1.0.4 Released 2020-06-29 diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py index f74d0c24e..e7a9b511d 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py @@ -30,7 +30,6 @@ ) from opencensus.ext.azure.common.storage import LocalFileStorage from opencensus.ext.azure.common.transport import TransportMixin -from opencensus.ext.azure.metrics_exporter import heartbeat_metrics from opencensus.trace import execution_context logger = logging.getLogger(__name__) @@ -60,8 +59,6 @@ def __init__(self, **options): self._queue = Queue(capacity=self.options.queue_capacity) self._worker = Worker(self._queue, self) self._worker.start() - heartbeat_metrics.enable_heartbeat_metrics( - self.options.connection_string, self.options.instrumentation_key) def _export(self, batch, event=None): # pragma: NO COVER try: diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py index bd523809a..17ee88c50 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/metrics_exporter/__init__.py @@ -153,9 +153,4 @@ def new_metrics_exporter(**options): producers, exporter, interval=exporter.options.export_interval) - from opencensus.ext.azure.metrics_exporter import heartbeat_metrics - heartbeat_metrics.enable_heartbeat_metrics( - exporter.options.connection_string, - exporter.options.instrumentation_key - ) return exporter diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py index 17e4f3d3b..42ffe6ffe 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/trace_exporter/__init__.py @@ -28,7 +28,6 @@ ) from opencensus.ext.azure.common.storage import LocalFileStorage from opencensus.ext.azure.common.transport import TransportMixin -from opencensus.ext.azure.metrics_exporter import heartbeat_metrics from opencensus.trace.span import SpanKind try: @@ -60,8 +59,6 @@ def __init__(self, **options): self._telemetry_processors = [] super(AzureExporter, self).__init__(**options) atexit.register(self._stop, self.options.grace_period) - heartbeat_metrics.enable_heartbeat_metrics( - self.options.connection_string, self.options.instrumentation_key) def span_data_to_envelope(self, sd): envelope = Envelope( diff --git a/contrib/opencensus-ext-azure/tests/test_azure_metrics_exporter.py b/contrib/opencensus-ext-azure/tests/test_azure_metrics_exporter.py index 497422c17..1a9685cc7 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_metrics_exporter.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_metrics_exporter.py @@ -234,6 +234,7 @@ def test_new_metrics_exporter_no_standard_metrics(self, exporter_mock): self.assertFalse(isinstance(exporter_mock.call_args[0][0][0], producer_class)) + @unittest.skip("Skip because disabling heartbeat metrics") @mock.patch('opencensus.ext.azure.metrics_exporter' '.transport.get_exporter_thread') def test_new_metrics_exporter_heartbeat(self, exporter_mock): From a7ebb745e471046fe0d7127c50a460ffa91edcb2 Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Mon, 14 Dec 2020 10:34:38 -0500 Subject: [PATCH 76/79] Loosen ikey validation to follow GUID (#986) --- contrib/opencensus-ext-azure/CHANGELOG.md | 2 ++ .../opencensus/ext/azure/common/transport.py | 1 - .../opencensus/ext/azure/common/utils.py | 7 ++----- .../tests/test_azure_utils.py | 18 ++++++++++-------- 4 files changed, 14 insertions(+), 14 deletions(-) diff --git a/contrib/opencensus-ext-azure/CHANGELOG.md b/contrib/opencensus-ext-azure/CHANGELOG.md index c740fafb3..9aa43b71f 100644 --- a/contrib/opencensus-ext-azure/CHANGELOG.md +++ b/contrib/opencensus-ext-azure/CHANGELOG.md @@ -14,6 +14,8 @@ ([#949](https://github.com/census-instrumentation/opencensus-python/pull/949)) - Disable heartbeat metrics in exporters ([#984](https://github.com/census-instrumentation/opencensus-python/pull/984)) +- Loosen instrumentation key validation to GUID + ([#984](https://github.com/census-instrumentation/opencensus-python/pull/984)) ## 1.0.4 Released 2020-06-29 diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/transport.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/transport.py index 4e7401b77..feca72979 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/transport.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/transport.py @@ -76,7 +76,6 @@ def _transmit(self, envelopes): except Exception: pass if response.status_code == 200: - logger.info('Transmission succeeded: %s.', text) return 0 if response.status_code == 206: # Partial Content if data: diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/utils.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/utils.py index 4907f74af..75c839ee1 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/common/utils.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/common/utils.py @@ -58,12 +58,9 @@ def timestamp_to_iso_str(timestamp): return to_iso_str(datetime.datetime.utcfromtimestamp(timestamp)) -# Validate UUID format -# Specs taken from https://tools.ietf.org/html/rfc4122 +# Validate GUID format uuid_regex_pattern = re.compile('^[0-9a-f]{8}-' - '[0-9a-f]{4}-' - '[1-5][0-9a-f]{3}-' - '[89ab][0-9a-f]{3}-' + '([0-9a-f]{4}-){3}' '[0-9a-f]{12}$') diff --git a/contrib/opencensus-ext-azure/tests/test_azure_utils.py b/contrib/opencensus-ext-azure/tests/test_azure_utils.py index 47ef484d8..75aa0bb06 100644 --- a/contrib/opencensus-ext-azure/tests/test_azure_utils.py +++ b/contrib/opencensus-ext-azure/tests/test_azure_utils.py @@ -121,12 +121,14 @@ def test_invalid_key_section5_hex(self): self.assertRaises(ValueError, lambda: utils.validate_instrumentation_key(key)) - def test_invalid_key_version(self): - key = '1234abcd-5678-6efa-8abc-1234567890ab' - self.assertRaises(ValueError, - lambda: utils.validate_instrumentation_key(key)) + def test_valid_key_section2_hex(self): + key = '1234abcd-567a-4efa-8abc-1234567890ab' + self.assertIsNone(utils.validate_instrumentation_key(key)) - def test_invalid_key_variant(self): - key = '1234abcd-5678-4efa-2abc-1234567890ab' - self.assertRaises(ValueError, - lambda: utils.validate_instrumentation_key(key)) + def test_valid_key_section3_hex(self): + key = '1234abcd-5678-befa-8abc-1234567890ab' + self.assertIsNone(utils.validate_instrumentation_key(key)) + + def test_valid_key_section4_hex(self): + key = '1234abcd-5678-4efa-cabc-1234567890ab' + self.assertIsNone(utils.validate_instrumentation_key(key)) From f6c9bd478edb8baf5877834e8e091be178a8401b Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 17 Dec 2020 12:12:27 -0700 Subject: [PATCH 77/79] Specify `google-cloud-trace<1.0.0` in README (#989) This library is not compatible with versions of google-cloud-trace >= 1.0.0. This should be taken care of by the setup.py, but add this just in case folks install `google-cloud-trace` first. https://github.com/census-instrumentation/opencensus-python/blob/812105e6021f47fbfa77fe7b32e1797a83bbe7ad/contrib/opencensus-ext-stackdriver/setup.py#L43 --- contrib/opencensus-ext-stackdriver/README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/contrib/opencensus-ext-stackdriver/README.rst b/contrib/opencensus-ext-stackdriver/README.rst index d938ad12f..3dd36e177 100644 --- a/contrib/opencensus-ext-stackdriver/README.rst +++ b/contrib/opencensus-ext-stackdriver/README.rst @@ -32,8 +32,8 @@ This example shows how to report the traces to Stackdriver Trace: :: - pip install google-cloud-trace - pipenv install google-cloud-trace + pip install google-cloud-trace<1.0.0 + pipenv install google-cloud-trace<1.0.0 By default, traces are exported asynchronously, to reduce latency during your code's execution. If you would like to export data on the main thread From 60d3913ec5fa65e4a78bc7ece61a3047a8cd4e9f Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Thu, 14 Jan 2021 14:51:25 -0800 Subject: [PATCH 78/79] build --- .github/workflows/build.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 79688a726..bf62bcf53 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -5,8 +5,6 @@ on: branches-ignore: - 'release/*' pull_request: - branches: - - master jobs: build: From 631e8ed5babae0d46b3dfb936512c6deca5d71fd Mon Sep 17 00:00:00 2001 From: Leighton Chen Date: Thu, 14 Jan 2021 16:46:31 -0800 Subject: [PATCH 79/79] remove function --- .../ext/azure/log_exporter/__init__.py | 22 +++---------------- 1 file changed, 3 insertions(+), 19 deletions(-) diff --git a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py index 2a7bafcd1..45b104adf 100644 --- a/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py +++ b/contrib/opencensus-ext-azure/opencensus/ext/azure/log_exporter/__init__.py @@ -81,24 +81,6 @@ def _export(self, batch, event=None): # pragma: NO COVER if event: event.set() - def _export(self, batch, event=None): # pragma: NO COVER - try: - if batch: - envelopes = [self.log_record_to_envelope(x) for x in batch] - envelopes = self.apply_telemetry_processors(envelopes) - result = self._transmit(envelopes) - if result > 0: - self.storage.put(envelopes, result) - if event: - if isinstance(event, QueueExitEvent): - self._transmit_from_storage() # send files before exit - return - if len(batch) < self.options.max_batch_size: - self._transmit_from_storage() - finally: - if event: - event.set() - def close(self): self.storage.close() self._worker.stop() @@ -167,7 +149,9 @@ def __init__(self, probability=1.0): self.probability = probability def filter(self, record): - return random.random() < self.probability + val = random.random() + print(val) + return val < self.probability class AzureLogHandler(TransportMixin, ProcessorMixin, BaseLogHandler):