From 31c9788091cbbf34e047fd5c51f6f24588acd37c Mon Sep 17 00:00:00 2001 From: Cody Lee Date: Tue, 1 Oct 2019 08:31:46 -0500 Subject: [PATCH 01/15] add support for SLOs --- datadog/api/service_level_objectives.py | 65 ++++ datadog/dogshell/service_level_objective.py | 312 ++++++++++++++++++++ 2 files changed, 377 insertions(+) create mode 100644 datadog/api/service_level_objectives.py create mode 100644 datadog/dogshell/service_level_objective.py diff --git a/datadog/api/service_level_objectives.py b/datadog/api/service_level_objectives.py new file mode 100644 index 000000000..b71d35efe --- /dev/null +++ b/datadog/api/service_level_objectives.py @@ -0,0 +1,65 @@ +from datadog.api.resources import GetableAPIResource, CreateableAPIResource, \ + UpdatableAPIResource, ListableAPIResource, DeletableAPIResource, \ + ActionAPIResource + + +class ServiceLevelObjective(GetableAPIResource, CreateableAPIResource, UpdatableAPIResource, + ListableAPIResource, DeletableAPIResource, ActionAPIResource): + """ + A wrapper around Service Level Objective HTTP API. + """ + _resource_name = 'slo' + + @classmethod + def get_all(cls, query=None, ids=None, offset=0, limit=100, **params): + """ + Get all SLO details. + + :param query: optional search query - syntax in UI && online documentation + :type query: str + + :param ids: optional list of SLO ids to get many specific SLOs at once. + :type ids: list(str) + + :param offset: offset of results to use (default 0) + :type offset: int + + :param limit: limit of results to return (default: 1000) + :type limit: int + + :returns: Dictionary representing the API's JSON response + """ + search_terms = {} + if query: + search_terms["query"] = query + if ids: + search_terms["ids"] = ids + search_terms["offset"] = offset + search_terms["limit"] = limit + + return super(ServiceLevelObjective, cls).get_all(**search_terms) + + + @classmethod + def bulk_delete(cls, ops): + """ + Bulk Delete Timeframes from multiple SLOs. + + :param ops: a dictionary mapping of SLO ID to timeframes to remove. + :type ops: dict(str, list(str)) + + :returns: Dictionary representing the API's JSON response + """ + return super(ServiceLevelObjective, cls)._trigger_class_action('POST', 'bulk_delete', body=ops) + + @classmethod + def delete_many(cls, ids, **params): + """ + Delete Multiple SLOs + + :param ids: a list of SLO IDs to remove + :type ids: list(str) + + :returns: Dictionary representing the API's JSON response + """ + return super(ServiceLevelObjective, cls)._trigger_class_action('DELETE', '', body={"ids": ids}) diff --git a/datadog/dogshell/service_level_objective.py b/datadog/dogshell/service_level_objective.py new file mode 100644 index 000000000..79992a34b --- /dev/null +++ b/datadog/dogshell/service_level_objective.py @@ -0,0 +1,312 @@ +# stdlib +import argparse +import json + +# 3p +from datadog.util.format import pretty_json + +# datadog +from datadog import api +from datadog.dogshell.common import report_errors, report_warnings + + +class MonitorClient(object): + @classmethod + def setup_parser(cls, subparsers): + parser = subparsers.add_parser('service_level_objective', help="Create, edit, and delete service level objectives") + + verb_parsers = parser.add_subparsers(title='Verbs', dest='verb') + verb_parsers.required = True + + create_parser = verb_parsers.add_parser('create', help="Create a SLO") + create_parser.add_argument('--type', required=True, help="type of the SLO, e.g.", choices=["metric", "monitor"]) + create_parser.add_argument('--name', help="name of the SLO", default=None) + create_parser.add_argument('--description', help="description of the SLO", default=None) + create_parser.add_argument('--tags', help="comma-separated list of tags", default=None) + create_parser.add_argument('--numerator', help='numerator metric query (sum of good events)', default=None) + create_parser.add_argument('--denominator', help='denominator metric query (sum of total events)', default=None) + create_parser.add_argument('--monitor_ids', help='explicit monitor_ids to use (CSV)', default=None) + create_parser.add_argument('--monitor_search', help='monitor search terms to use', default=None) + create_parser.set_defaults(func=cls._create) + + file_create_parser = verb_parsers.add_parser('fcreate', help="Create a SLO from file") + file_create_parser.add_argument('file', help='json file holding all details', + type=argparse.FileType('r')) + file_create_parser.set_defaults(func=cls._file_create) + + update_parser = verb_parsers.add_parser('update', help="Update existing SLO") + update_parser.add_argument('slo_id', help="SLO to replace with the new definition") + update_parser.add_argument('--type', required=True, help="type of the SLO (must specify it's original type)", choices=["metric", "monitor"]) + update_parser.add_argument('--name', help="name of the SLO", default=None) + update_parser.add_argument('--description', help="description of the SLO", + default=None) + update_parser.add_argument('--tags', help="comma-separated list of tags", default=None) + update_parser.add_argument('--numerator', help='numerator metric query (sum of good events)', default=None) + update_parser.add_argument('--denominator', help='denominator metric query (sum of total events)', default=None) + update_parser.add_argument('--monitor_ids', help='explicit monitor_ids to use (CSV)', default=None) + update_parser.add_argument('--monitor_search', help='monitor search terms to use', default=None) + update_parser.set_defaults(func=cls._update) + + file_update_parser = verb_parsers.add_parser('fupdate', help="Update existing SLO from file") + file_update_parser.add_argument('file', help='json file holding all details', + type=argparse.FileType('r')) + file_update_parser.set_defaults(func=cls._file_update) + + show_parser = verb_parsers.add_parser('show', help="Show a SLO definition") + show_parser.add_argument('slo_id', help="SLO to show") + show_parser.set_defaults(func=cls._show) + + show_all_parser = verb_parsers.add_parser('show_all', help="Show a list of all SLOs") + show_all_parser.add_argument('--query', help="string to filter SLOs by query (see UI or documentation)") + show_all_parser.add_argument('--slo_ids', help="comma separated list indicating what SLO IDs to get at once") + show_all_parser.add_argument('--offset', help="offset of query pagination", default=0) + show_all_parser.add_argument('--limit', help="limit of query pagination", default=100) + show_all_parser.set_defaults(func=cls._show_all) + + delete_parser = verb_parsers.add_parser('delete', help="Delete a SLO") + delete_parser.add_argument('slo_id', help="SLO to delete") + delete_parser.set_defaults(func=cls._delete) + + delete_many_parser = verb_parsers.add_parser('delete_many', help="Delete a SLO") + delete_many_parser.add_argument('slo_ids', help="comma separated list of SLO IDs to delete") + delete_many_parser.set_defaults(func=cls._delete_many) + + delete_timeframe_parser = verb_parsers.add_parser('delete_many', help="Delete a SLO timeframe") + delete_timeframe_parser.add_argument('slo_id', help="SLO ID to update") + delete_timeframe_parser.add_argument('timeframes', help="CSV of timeframes to delete, e.g. 7d,30d,90d", required=True) + delete_timeframe_parser.set_defaults(func=cls._delete_timeframe) + + @classmethod + def _create(cls, args): + api._timeout = args.timeout + format = args.format + + if args.tags: + tags = sorted(set([t.strip() for t in args.tags.split(',') if t.strip()])) + else: + tags = None + + params = { + "type": args.type, + "name": args.name, + } + if args.description: + params["description"] = args.description + + if args.type == "metric": + params["query"] = { + "numerator": args.numerator, + "denominator": args.denominator + } + elif args.monitor_search: + params["monitor_search"] = args.monitor_search + else: + params["monitor_ids"] = args.monitor_ids + + if args.tags: + params["tags"] = args.tags.split(",") + + res = api.ServiceLevelObjective.create(**params) + report_warnings(res) + report_errors(res) + if format == 'pretty': + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _file_create(cls, args): + api._timeout = args.timeout + format = args.format + slo = json.load(args.file) + + params = { + "type": slo["type"], + "name": slo["name"], + } + if slo.get("description"): + params["description"] = slo["description"] + + if slo["type"] == "metric": + params["query"] = { + "numerator": slo["numerator"], + "denominator": slo["denominator"] + } + elif slo.get("monitor_search"): + params["monitor_search"] = slo["monitor_search"] + else: + params["monitor_ids"] = slo["monitor_ids"] + + if slo.get("tags"): + tags = slo["tags"] + if isinstance(tags, str): + tags = tags.split(",") + params["tags"] = tags + + res = api.ServiceLevelObjective.create(**params) + report_warnings(res) + report_errors(res) + if format == 'pretty': + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _update(cls, args): + api._timeout = args.timeout + format = args.format + + params = { + "type": args.type + } + if args.description: + params["description"] = args.description + + if args.type == "metric": + if args.numerator and args.denominator: + params["query"] = { + "numerator": args.numerator, + "denominator": args.denominator + } + elif args.monitor_search: + params["monitor_search"] = args.monitor_search + else: + params["monitor_ids"] = args.monitor_ids + + if args.tags: + tags = args.tags + if isinstance(tags, str): + tags = tags.split(",") + params["tags"] = tags + res = api.ServiceLevelObjective.update(args.slo_id, **params) + report_warnings(res) + report_errors(res) + if format == 'pretty': + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _file_update(cls, args): + api._timeout = args.timeout + format = args.format + slo = json.load(args.file) + + params = { + "type": slo["type"], + "name": slo["name"], + } + if slo.get("description"): + params["description"] = slo["description"] + + if slo["type"] == "metric": + params["query"] = { + "numerator": slo["numerator"], + "denominator": slo["denominator"] + } + elif slo.get("monitor_search"): + params["monitor_search"] = slo["monitor_search"] + else: + params["monitor_ids"] = slo["monitor_ids"] + + if slo.get("tags"): + tags = slo["tags"] + if isinstance(tags, str): + tags = tags.split(",") + params["tags"] = tags + + res = api.ServiceLevelObjective.update(slo['id'], **params) + report_warnings(res) + report_errors(res) + if format == 'pretty': + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _show(cls, args): + api._timeout = args.timeout + format = args.format + res = api.ServiceLevelObjective.get(args.slo_id) + report_warnings(res) + report_errors(res) + + if args.string_ids: + res["id"] = str(res["id"]) + + if format == 'pretty': + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _show_all(cls, args): + api._timeout = args.timeout + format = args.format + + params = { + "offset": args.offset, + "limit": args.limit, + } + if args.query: + params["query"] = args.query + else: + params["ids"] = args.slo_ids + + res = api.ServiceLevelObjective.get_all(**params) + report_warnings(res) + report_errors(res) + + if format == 'pretty': + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _delete(cls, args): + api._timeout = args.timeout + res = api.ServiceLevelObjective.delete(args.slo_id) + if res is not None: + report_warnings(res) + report_errors(res) + + if format == 'pretty': + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _delete_many(cls, args): + api._timeout = args.timeout + res = api.ServiceLevelObjective.delete_many(args.slo_ids) + if res is not None: + report_warnings(res) + report_errors(res) + + if format == 'pretty': + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _delete_timeframe(cls, args): + api._timeout = args.timeout + + ops = { + args.slo_id: args.timeframes.split(","), + } + + + res = api.ServiceLevelObjective.bulk_delete(ops) + if res is not None: + report_warnings(res) + report_errors(res) + + if format == 'pretty': + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _escape(cls, s): + return s.replace("\r", "\\r").replace("\n", "\\n").replace("\t", "\\t") From 4f5547479e1b9554c18e58d706779ac81ae49f82 Mon Sep 17 00:00:00 2001 From: Cody Lee Date: Thu, 3 Oct 2019 11:11:45 -0500 Subject: [PATCH 02/15] fix conflict; --- datadog/dogshell/service_level_objective.py | 44 +++++++++++++++++++++ tests/integration/api/test_api.py | 21 ++++++++++ 2 files changed, 65 insertions(+) diff --git a/datadog/dogshell/service_level_objective.py b/datadog/dogshell/service_level_objective.py index 79992a34b..2abda3b79 100644 --- a/datadog/dogshell/service_level_objective.py +++ b/datadog/dogshell/service_level_objective.py @@ -23,6 +23,8 @@ def setup_parser(cls, subparsers): create_parser.add_argument('--name', help="name of the SLO", default=None) create_parser.add_argument('--description', help="description of the SLO", default=None) create_parser.add_argument('--tags', help="comma-separated list of tags", default=None) + create_parser.add_argument('--thresholds', help="comma separated list of :[:]", + required=True) create_parser.add_argument('--numerator', help='numerator metric query (sum of good events)', default=None) create_parser.add_argument('--denominator', help='denominator metric query (sum of total events)', default=None) create_parser.add_argument('--monitor_ids', help='explicit monitor_ids to use (CSV)', default=None) @@ -40,6 +42,8 @@ def setup_parser(cls, subparsers): update_parser.add_argument('--name', help="name of the SLO", default=None) update_parser.add_argument('--description', help="description of the SLO", default=None) + create_parser.add_argument('--thresholds', help="comma separated list of :[:]", + required=True) update_parser.add_argument('--tags', help="comma-separated list of tags", default=None) update_parser.add_argument('--numerator', help='numerator metric query (sum of good events)', default=None) update_parser.add_argument('--denominator', help='denominator metric query (sum of total events)', default=None) @@ -90,6 +94,18 @@ def _create(cls, args): "type": args.type, "name": args.name, } + + thresholds = [] + for threshold_str in args.thresholds.split(","): + parts = threshold_str.split(":") + timeframe = parts[0] + target = parts[1] + warning = None + if len(parts) > 2: + warning = parts[2] + thresholds.append({"timeframe": timeframe, "target": target, "warning": warning}) + params["thresholds"] = thresholds + if args.description: params["description"] = args.description @@ -123,7 +139,9 @@ def _file_create(cls, args): params = { "type": slo["type"], "name": slo["name"], + "thresholds": slo["thresholds"] } + if slo.get("description"): params["description"] = slo["description"] @@ -159,6 +177,19 @@ def _update(cls, args): params = { "type": args.type } + + if args.thresholds: + thresholds = [] + for threshold_str in args.thresholds.split(","): + parts = threshold_str.split(":") + timeframe = parts[0] + target = parts[1] + warning = None + if len(parts) > 2: + warning = parts[2] + thresholds.append({"timeframe": timeframe, "target": target, "warning": warning}) + params["thresholds"] = thresholds + if args.description: params["description"] = args.description @@ -196,6 +227,19 @@ def _file_update(cls, args): "type": slo["type"], "name": slo["name"], } + + if slo.get("thresholds"): + thresholds = [] + for threshold_str in args.thresholds.split(","): + parts = threshold_str.split(":") + timeframe = parts[0] + target = parts[1] + warning = None + if len(parts) > 2: + warning = parts[2] + thresholds.append({"timeframe": timeframe, "target": target, "warning": warning}) + params["thresholds"] = thresholds + if slo.get("description"): params["description"] = slo["description"] diff --git a/tests/integration/api/test_api.py b/tests/integration/api/test_api.py index 213e78c42..ebfd0fe72 100644 --- a/tests/integration/api/test_api.py +++ b/tests/integration/api/test_api.py @@ -397,6 +397,27 @@ def test_monitor_crud(self): assert dog.Monitor.delete(monitor["id"]) == {"deleted_monitor_id": monitor["id"]} + def test_service_level_objective_crud(self): + numerator = "sum:my.custom.metric{type:good}.as_count()" + denominator = "sum:my.custom.metric{*}.as_count()" + query = {"numerator": numerator, "denominator": denominator} + thresholds = [{"timeframe": "7d", "target": 90}] + name = "test SLO {}".format(time.time()) + slo = dog.ServiceLevelObjective.create(type="metric", query=query, thresholds=thresholds, name=name, tags=["type:test"]) + assert slo["name"] == name + + numerator2 = "sum:my.custom.metric{type:good,!type:ignored}.as_count()" + denominator2 = "sum:my.custom.metric{!type:ignored}.as_count()" + query = {"numerator": numerator2, "denominator": denominator2} + slo = dog.ServiceLevelObjective.update(type="metric", query=query, thresholds=thresholds, name=name, + tags=["type:test"]) + assert slo["name"] == name + slos = [s for s in dog.ServiceLevelObjective.get_all() if s["id"] == slo["id"]] + assert len(slos) == 1 + + assert dog.ServiceLevelObjective.get(slo["id"])["id"] == slo["id"] + dog.ServiceLevelObjective.delete(slo["id"]) + @pytest.mark.admin_needed def test_monitor_muting(self): query1 = "avg(last_1h):sum:system.net.bytes_rcvd{host:host0} > 100" From 3e7f5b23b31536eb92a6cd7d01d993bb1520d39a Mon Sep 17 00:00:00 2001 From: Cody Lee Date: Tue, 1 Oct 2019 08:48:09 -0500 Subject: [PATCH 03/15] black SLOs --- datadog/api/service_level_objectives.py | 37 ++- datadog/dogshell/service_level_objective.py | 241 +++++++++++++------- 2 files changed, 181 insertions(+), 97 deletions(-) diff --git a/datadog/api/service_level_objectives.py b/datadog/api/service_level_objectives.py index b71d35efe..46eeec361 100644 --- a/datadog/api/service_level_objectives.py +++ b/datadog/api/service_level_objectives.py @@ -1,14 +1,26 @@ -from datadog.api.resources import GetableAPIResource, CreateableAPIResource, \ - UpdatableAPIResource, ListableAPIResource, DeletableAPIResource, \ - ActionAPIResource - - -class ServiceLevelObjective(GetableAPIResource, CreateableAPIResource, UpdatableAPIResource, - ListableAPIResource, DeletableAPIResource, ActionAPIResource): +from datadog.api.resources import ( + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + ListableAPIResource, + DeletableAPIResource, + ActionAPIResource, +) + + +class ServiceLevelObjective( + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + ListableAPIResource, + DeletableAPIResource, + ActionAPIResource, +): """ A wrapper around Service Level Objective HTTP API. """ - _resource_name = 'slo' + + _resource_name = "slo" @classmethod def get_all(cls, query=None, ids=None, offset=0, limit=100, **params): @@ -39,7 +51,6 @@ def get_all(cls, query=None, ids=None, offset=0, limit=100, **params): return super(ServiceLevelObjective, cls).get_all(**search_terms) - @classmethod def bulk_delete(cls, ops): """ @@ -50,7 +61,9 @@ def bulk_delete(cls, ops): :returns: Dictionary representing the API's JSON response """ - return super(ServiceLevelObjective, cls)._trigger_class_action('POST', 'bulk_delete', body=ops) + return super(ServiceLevelObjective, cls)._trigger_class_action( + "POST", "bulk_delete", body=ops + ) @classmethod def delete_many(cls, ids, **params): @@ -62,4 +75,6 @@ def delete_many(cls, ids, **params): :returns: Dictionary representing the API's JSON response """ - return super(ServiceLevelObjective, cls)._trigger_class_action('DELETE', '', body={"ids": ids}) + return super(ServiceLevelObjective, cls)._trigger_class_action( + "DELETE", "", body={"ids": ids} + ) diff --git a/datadog/dogshell/service_level_objective.py b/datadog/dogshell/service_level_objective.py index 2abda3b79..b5e6b5ad2 100644 --- a/datadog/dogshell/service_level_objective.py +++ b/datadog/dogshell/service_level_objective.py @@ -13,71 +13,148 @@ class MonitorClient(object): @classmethod def setup_parser(cls, subparsers): - parser = subparsers.add_parser('service_level_objective', help="Create, edit, and delete service level objectives") + parser = subparsers.add_parser( + "service_level_objective", + help="Create, edit, and delete service level objectives", + ) - verb_parsers = parser.add_subparsers(title='Verbs', dest='verb') + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") verb_parsers.required = True - create_parser = verb_parsers.add_parser('create', help="Create a SLO") - create_parser.add_argument('--type', required=True, help="type of the SLO, e.g.", choices=["metric", "monitor"]) - create_parser.add_argument('--name', help="name of the SLO", default=None) - create_parser.add_argument('--description', help="description of the SLO", default=None) - create_parser.add_argument('--tags', help="comma-separated list of tags", default=None) - create_parser.add_argument('--thresholds', help="comma separated list of :[:]", - required=True) - create_parser.add_argument('--numerator', help='numerator metric query (sum of good events)', default=None) - create_parser.add_argument('--denominator', help='denominator metric query (sum of total events)', default=None) - create_parser.add_argument('--monitor_ids', help='explicit monitor_ids to use (CSV)', default=None) - create_parser.add_argument('--monitor_search', help='monitor search terms to use', default=None) + create_parser = verb_parsers.add_parser("create", help="Create a SLO") + create_parser.add_argument( + "--type", + required=True, + help="type of the SLO, e.g.", + choices=["metric", "monitor"], + ) + create_parser.add_argument("--name", help="name of the SLO", default=None) + create_parser.add_argument( + "--description", help="description of the SLO", default=None + ) + create_parser.add_argument( + "--tags", help="comma-separated list of tags", default=None + ) + create_parser.add_argument( + "--thresholds", + help="comma separated list of :[:]", + required=True, + ) + create_parser.add_argument( + "--numerator", + help="numerator metric query (sum of good events)", + default=None, + ) + create_parser.add_argument( + "--denominator", + help="denominator metric query (sum of total events)", + default=None, + ) + create_parser.add_argument( + "--monitor_ids", help="explicit monitor_ids to use (CSV)", default=None + ) + create_parser.add_argument( + "--monitor_search", help="monitor search terms to use", default=None + ) create_parser.set_defaults(func=cls._create) - file_create_parser = verb_parsers.add_parser('fcreate', help="Create a SLO from file") - file_create_parser.add_argument('file', help='json file holding all details', - type=argparse.FileType('r')) + file_create_parser = verb_parsers.add_parser( + "fcreate", help="Create a SLO from file" + ) + file_create_parser.add_argument( + "file", help="json file holding all details", type=argparse.FileType("r") + ) file_create_parser.set_defaults(func=cls._file_create) - update_parser = verb_parsers.add_parser('update', help="Update existing SLO") - update_parser.add_argument('slo_id', help="SLO to replace with the new definition") - update_parser.add_argument('--type', required=True, help="type of the SLO (must specify it's original type)", choices=["metric", "monitor"]) - update_parser.add_argument('--name', help="name of the SLO", default=None) - update_parser.add_argument('--description', help="description of the SLO", - default=None) - create_parser.add_argument('--thresholds', help="comma separated list of :[:]", - required=True) - update_parser.add_argument('--tags', help="comma-separated list of tags", default=None) - update_parser.add_argument('--numerator', help='numerator metric query (sum of good events)', default=None) - update_parser.add_argument('--denominator', help='denominator metric query (sum of total events)', default=None) - update_parser.add_argument('--monitor_ids', help='explicit monitor_ids to use (CSV)', default=None) - update_parser.add_argument('--monitor_search', help='monitor search terms to use', default=None) + update_parser = verb_parsers.add_parser("update", help="Update existing SLO") + update_parser.add_argument( + "slo_id", help="SLO to replace with the new definition" + ) + update_parser.add_argument( + "--type", + required=True, + help="type of the SLO (must specify it's original type)", + choices=["metric", "monitor"], + ) + update_parser.add_argument("--name", help="name of the SLO", default=None) + update_parser.add_argument( + "--description", help="description of the SLO", default=None + ) + create_parser.add_argument( + "--thresholds", + help="comma separated list of :[:]", + required=True, + ) + update_parser.add_argument( + "--tags", help="comma-separated list of tags", default=None + ) + update_parser.add_argument( + "--numerator", + help="numerator metric query (sum of good events)", + default=None, + ) + update_parser.add_argument( + "--denominator", + help="denominator metric query (sum of total events)", + default=None, + ) + update_parser.add_argument( + "--monitor_ids", help="explicit monitor_ids to use (CSV)", default=None + ) + update_parser.add_argument( + "--monitor_search", help="monitor search terms to use", default=None + ) update_parser.set_defaults(func=cls._update) - file_update_parser = verb_parsers.add_parser('fupdate', help="Update existing SLO from file") - file_update_parser.add_argument('file', help='json file holding all details', - type=argparse.FileType('r')) + file_update_parser = verb_parsers.add_parser( + "fupdate", help="Update existing SLO from file" + ) + file_update_parser.add_argument( + "file", help="json file holding all details", type=argparse.FileType("r") + ) file_update_parser.set_defaults(func=cls._file_update) - show_parser = verb_parsers.add_parser('show', help="Show a SLO definition") - show_parser.add_argument('slo_id', help="SLO to show") + show_parser = verb_parsers.add_parser("show", help="Show a SLO definition") + show_parser.add_argument("slo_id", help="SLO to show") show_parser.set_defaults(func=cls._show) - show_all_parser = verb_parsers.add_parser('show_all', help="Show a list of all SLOs") - show_all_parser.add_argument('--query', help="string to filter SLOs by query (see UI or documentation)") - show_all_parser.add_argument('--slo_ids', help="comma separated list indicating what SLO IDs to get at once") - show_all_parser.add_argument('--offset', help="offset of query pagination", default=0) - show_all_parser.add_argument('--limit', help="limit of query pagination", default=100) + show_all_parser = verb_parsers.add_parser( + "show_all", help="Show a list of all SLOs" + ) + show_all_parser.add_argument( + "--query", help="string to filter SLOs by query (see UI or documentation)" + ) + show_all_parser.add_argument( + "--slo_ids", + help="comma separated list indicating what SLO IDs to get at once", + ) + show_all_parser.add_argument( + "--offset", help="offset of query pagination", default=0 + ) + show_all_parser.add_argument( + "--limit", help="limit of query pagination", default=100 + ) show_all_parser.set_defaults(func=cls._show_all) - delete_parser = verb_parsers.add_parser('delete', help="Delete a SLO") - delete_parser.add_argument('slo_id', help="SLO to delete") + delete_parser = verb_parsers.add_parser("delete", help="Delete a SLO") + delete_parser.add_argument("slo_id", help="SLO to delete") delete_parser.set_defaults(func=cls._delete) - delete_many_parser = verb_parsers.add_parser('delete_many', help="Delete a SLO") - delete_many_parser.add_argument('slo_ids', help="comma separated list of SLO IDs to delete") + delete_many_parser = verb_parsers.add_parser("delete_many", help="Delete a SLO") + delete_many_parser.add_argument( + "slo_ids", help="comma separated list of SLO IDs to delete" + ) delete_many_parser.set_defaults(func=cls._delete_many) - delete_timeframe_parser = verb_parsers.add_parser('delete_many', help="Delete a SLO timeframe") - delete_timeframe_parser.add_argument('slo_id', help="SLO ID to update") - delete_timeframe_parser.add_argument('timeframes', help="CSV of timeframes to delete, e.g. 7d,30d,90d", required=True) + delete_timeframe_parser = verb_parsers.add_parser( + "delete_many", help="Delete a SLO timeframe" + ) + delete_timeframe_parser.add_argument("slo_id", help="SLO ID to update") + delete_timeframe_parser.add_argument( + "timeframes", + help="CSV of timeframes to delete, e.g. 7d,30d,90d", + required=True, + ) delete_timeframe_parser.set_defaults(func=cls._delete_timeframe) @classmethod @@ -86,14 +163,11 @@ def _create(cls, args): format = args.format if args.tags: - tags = sorted(set([t.strip() for t in args.tags.split(',') if t.strip()])) + tags = sorted(set([t.strip() for t in args.tags.split(",") if t.strip()])) else: tags = None - params = { - "type": args.type, - "name": args.name, - } + params = {"type": args.type, "name": args.name} thresholds = [] for threshold_str in args.thresholds.split(","): @@ -103,7 +177,9 @@ def _create(cls, args): warning = None if len(parts) > 2: warning = parts[2] - thresholds.append({"timeframe": timeframe, "target": target, "warning": warning}) + thresholds.append( + {"timeframe": timeframe, "target": target, "warning": warning} + ) params["thresholds"] = thresholds if args.description: @@ -112,7 +188,7 @@ def _create(cls, args): if args.type == "metric": params["query"] = { "numerator": args.numerator, - "denominator": args.denominator + "denominator": args.denominator, } elif args.monitor_search: params["monitor_search"] = args.monitor_search @@ -125,7 +201,7 @@ def _create(cls, args): res = api.ServiceLevelObjective.create(**params) report_warnings(res) report_errors(res) - if format == 'pretty': + if format == "pretty": print(pretty_json(res)) else: print(json.dumps(res)) @@ -139,7 +215,7 @@ def _file_create(cls, args): params = { "type": slo["type"], "name": slo["name"], - "thresholds": slo["thresholds"] + "thresholds": slo["thresholds"], } if slo.get("description"): @@ -147,8 +223,8 @@ def _file_create(cls, args): if slo["type"] == "metric": params["query"] = { - "numerator": slo["numerator"], - "denominator": slo["denominator"] + "numerator": slo["numerator"], + "denominator": slo["denominator"], } elif slo.get("monitor_search"): params["monitor_search"] = slo["monitor_search"] @@ -164,7 +240,7 @@ def _file_create(cls, args): res = api.ServiceLevelObjective.create(**params) report_warnings(res) report_errors(res) - if format == 'pretty': + if format == "pretty": print(pretty_json(res)) else: print(json.dumps(res)) @@ -174,9 +250,7 @@ def _update(cls, args): api._timeout = args.timeout format = args.format - params = { - "type": args.type - } + params = {"type": args.type} if args.thresholds: thresholds = [] @@ -187,7 +261,9 @@ def _update(cls, args): warning = None if len(parts) > 2: warning = parts[2] - thresholds.append({"timeframe": timeframe, "target": target, "warning": warning}) + thresholds.append( + {"timeframe": timeframe, "target": target, "warning": warning} + ) params["thresholds"] = thresholds if args.description: @@ -196,8 +272,8 @@ def _update(cls, args): if args.type == "metric": if args.numerator and args.denominator: params["query"] = { - "numerator": args.numerator, - "denominator": args.denominator + "numerator": args.numerator, + "denominator": args.denominator, } elif args.monitor_search: params["monitor_search"] = args.monitor_search @@ -212,7 +288,7 @@ def _update(cls, args): res = api.ServiceLevelObjective.update(args.slo_id, **params) report_warnings(res) report_errors(res) - if format == 'pretty': + if format == "pretty": print(pretty_json(res)) else: print(json.dumps(res)) @@ -223,10 +299,7 @@ def _file_update(cls, args): format = args.format slo = json.load(args.file) - params = { - "type": slo["type"], - "name": slo["name"], - } + params = {"type": slo["type"], "name": slo["name"]} if slo.get("thresholds"): thresholds = [] @@ -237,7 +310,9 @@ def _file_update(cls, args): warning = None if len(parts) > 2: warning = parts[2] - thresholds.append({"timeframe": timeframe, "target": target, "warning": warning}) + thresholds.append( + {"timeframe": timeframe, "target": target, "warning": warning} + ) params["thresholds"] = thresholds if slo.get("description"): @@ -246,7 +321,7 @@ def _file_update(cls, args): if slo["type"] == "metric": params["query"] = { "numerator": slo["numerator"], - "denominator": slo["denominator"] + "denominator": slo["denominator"], } elif slo.get("monitor_search"): params["monitor_search"] = slo["monitor_search"] @@ -259,10 +334,10 @@ def _file_update(cls, args): tags = tags.split(",") params["tags"] = tags - res = api.ServiceLevelObjective.update(slo['id'], **params) + res = api.ServiceLevelObjective.update(slo["id"], **params) report_warnings(res) report_errors(res) - if format == 'pretty': + if format == "pretty": print(pretty_json(res)) else: print(json.dumps(res)) @@ -278,7 +353,7 @@ def _show(cls, args): if args.string_ids: res["id"] = str(res["id"]) - if format == 'pretty': + if format == "pretty": print(pretty_json(res)) else: print(json.dumps(res)) @@ -288,10 +363,7 @@ def _show_all(cls, args): api._timeout = args.timeout format = args.format - params = { - "offset": args.offset, - "limit": args.limit, - } + params = {"offset": args.offset, "limit": args.limit} if args.query: params["query"] = args.query else: @@ -301,7 +373,7 @@ def _show_all(cls, args): report_warnings(res) report_errors(res) - if format == 'pretty': + if format == "pretty": print(pretty_json(res)) else: print(json.dumps(res)) @@ -314,7 +386,7 @@ def _delete(cls, args): report_warnings(res) report_errors(res) - if format == 'pretty': + if format == "pretty": print(pretty_json(res)) else: print(json.dumps(res)) @@ -327,7 +399,7 @@ def _delete_many(cls, args): report_warnings(res) report_errors(res) - if format == 'pretty': + if format == "pretty": print(pretty_json(res)) else: print(json.dumps(res)) @@ -336,17 +408,14 @@ def _delete_many(cls, args): def _delete_timeframe(cls, args): api._timeout = args.timeout - ops = { - args.slo_id: args.timeframes.split(","), - } - + ops = {args.slo_id: args.timeframes.split(",")} res = api.ServiceLevelObjective.bulk_delete(ops) if res is not None: report_warnings(res) report_errors(res) - if format == 'pretty': + if format == "pretty": print(pretty_json(res)) else: print(json.dumps(res)) From de962e9e4af3d8f5104df0a2c3f085d0cbed613e Mon Sep 17 00:00:00 2001 From: Cody Lee Date: Tue, 1 Oct 2019 08:55:50 -0500 Subject: [PATCH 04/15] fix flake and sort tags --- datadog/dogshell/service_level_objective.py | 19 ++++++------------- 1 file changed, 6 insertions(+), 13 deletions(-) diff --git a/datadog/dogshell/service_level_objective.py b/datadog/dogshell/service_level_objective.py index b5e6b5ad2..dbae00858 100644 --- a/datadog/dogshell/service_level_objective.py +++ b/datadog/dogshell/service_level_objective.py @@ -162,12 +162,11 @@ def _create(cls, args): api._timeout = args.timeout format = args.format + params = {"type": args.type, "name": args.name} + if args.tags: tags = sorted(set([t.strip() for t in args.tags.split(",") if t.strip()])) - else: - tags = None - - params = {"type": args.type, "name": args.name} + params["tags"] = tags thresholds = [] for threshold_str in args.thresholds.split(","): @@ -232,9 +231,7 @@ def _file_create(cls, args): params["monitor_ids"] = slo["monitor_ids"] if slo.get("tags"): - tags = slo["tags"] - if isinstance(tags, str): - tags = tags.split(",") + tags = sorted(set([t.strip() for t in slo["tags"].split(",") if t.strip()])) params["tags"] = tags res = api.ServiceLevelObjective.create(**params) @@ -281,9 +278,7 @@ def _update(cls, args): params["monitor_ids"] = args.monitor_ids if args.tags: - tags = args.tags - if isinstance(tags, str): - tags = tags.split(",") + tags = sorted(set([t.strip() for t in args.tags.split(",") if t.strip()])) params["tags"] = tags res = api.ServiceLevelObjective.update(args.slo_id, **params) report_warnings(res) @@ -329,9 +324,7 @@ def _file_update(cls, args): params["monitor_ids"] = slo["monitor_ids"] if slo.get("tags"): - tags = slo["tags"] - if isinstance(tags, str): - tags = tags.split(",") + tags = sorted(set([t.strip() for t in slo["tags"].split(",") if t.strip()])) params["tags"] = tags res = api.ServiceLevelObjective.update(slo["id"], **params) From edaf6b2807dc37f78b15753a1fdab499933aadbf Mon Sep 17 00:00:00 2001 From: Cody Lee Date: Thu, 3 Oct 2019 11:28:09 -0500 Subject: [PATCH 05/15] add missing import --- datadog/api/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/datadog/api/__init__.py b/datadog/api/__init__.py index 31ca0bc5b..6c28dd46b 100644 --- a/datadog/api/__init__.py +++ b/datadog/api/__init__.py @@ -35,3 +35,4 @@ from datadog.api.service_checks import ServiceCheck from datadog.api.tags import Tag from datadog.api.users import User +from datadog.api.service_level_objectives import ServiceLevelObjective \ No newline at end of file From 509bb59186f5f33117a08a501a61b21fcd95329b Mon Sep 17 00:00:00 2001 From: Cody Lee Date: Thu, 3 Oct 2019 13:28:18 -0500 Subject: [PATCH 06/15] process result data to return only the resource --- datadog/api/service_level_objectives.py | 76 +++++++++++++++++++++++-- 1 file changed, 72 insertions(+), 4 deletions(-) diff --git a/datadog/api/service_level_objectives.py b/datadog/api/service_level_objectives.py index 46eeec361..8f04150d2 100644 --- a/datadog/api/service_level_objectives.py +++ b/datadog/api/service_level_objectives.py @@ -22,6 +22,35 @@ class ServiceLevelObjective( _resource_name = "slo" + @classmethod + def create(cls, attach_host_name=False, method='POST', id=None, params=None, **body): + """ + Create a SLO + + :returns: created SLO details + """ + results = super(ServiceLevelObjective, cls).create(attach_host_name=False, method='POST', id=None, params=None, **body) + if results["error"]: + raise Exception(results["error"]) + else: + return results["data"][0] + + @classmethod + def get(cls, id, **params): + """ + Get a specific SLO details. + + :param id: SLO id to get details for + :type id: str + + :returns: SLO details + """ + results = super(ServiceLevelObjective, cls).get(id, **params) + if results["error"]: + raise Exception(results["error"]) + else: + return results["data"] + @classmethod def get_all(cls, query=None, ids=None, offset=0, limit=100, **params): """ @@ -39,7 +68,7 @@ def get_all(cls, query=None, ids=None, offset=0, limit=100, **params): :param limit: limit of results to return (default: 1000) :type limit: int - :returns: Dictionary representing the API's JSON response + :returns: SLOs matching the query """ search_terms = {} if query: @@ -49,7 +78,43 @@ def get_all(cls, query=None, ids=None, offset=0, limit=100, **params): search_terms["offset"] = offset search_terms["limit"] = limit - return super(ServiceLevelObjective, cls).get_all(**search_terms) + results = super(ServiceLevelObjective, cls).get_all(**search_terms) + if results["error"]: + raise Exception(results["error"]) + else: + return results["data"] + + @classmethod + def update(cls, id, params=None, **body): + """ + Update a specific SLO details. + + :param id: SLO id to update details for + :type id: str + + :returns: SLO details + """ + results = super(ServiceLevelObjective, cls).update(id, params, **body) + if results["error"]: + raise Exception(results["error"]) + else: + return results["data"][0] + + @classmethod + def delete(cls, id, **params): + """ + Delete a specific SLO. + + :param id: SLO id to delete + :type id: str + + :returns: SLO ids removed + """ + results = super(ServiceLevelObjective, cls).delete(id, **params) + if results["error"]: + raise Exception(results["error"]) + else: + return results["data"][0] @classmethod def bulk_delete(cls, ops): @@ -60,11 +125,14 @@ def bulk_delete(cls, ops): :type ops: dict(str, list(str)) :returns: Dictionary representing the API's JSON response + `errors` - errors with operation + `data` - updates and deletions """ return super(ServiceLevelObjective, cls)._trigger_class_action( "POST", "bulk_delete", body=ops ) + @classmethod def delete_many(cls, ids, **params): """ @@ -73,8 +141,8 @@ def delete_many(cls, ids, **params): :param ids: a list of SLO IDs to remove :type ids: list(str) - :returns: Dictionary representing the API's JSON response + :returns: Dictionary representing the API's JSON response see `data` list(slo ids) && `errors` """ return super(ServiceLevelObjective, cls)._trigger_class_action( - "DELETE", "", body={"ids": ids} + "DELETE", "", params=params, body={"ids": ids} ) From 5b39f264a5e436554e7e5102016ec09e92434a29 Mon Sep 17 00:00:00 2001 From: Cody Lee Date: Thu, 3 Oct 2019 13:45:53 -0500 Subject: [PATCH 07/15] black; --- datadog/api/service_level_objectives.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/datadog/api/service_level_objectives.py b/datadog/api/service_level_objectives.py index 8f04150d2..91f3e4b33 100644 --- a/datadog/api/service_level_objectives.py +++ b/datadog/api/service_level_objectives.py @@ -23,13 +23,17 @@ class ServiceLevelObjective( _resource_name = "slo" @classmethod - def create(cls, attach_host_name=False, method='POST', id=None, params=None, **body): + def create( + cls, attach_host_name=False, method="POST", id=None, params=None, **body + ): """ Create a SLO :returns: created SLO details """ - results = super(ServiceLevelObjective, cls).create(attach_host_name=False, method='POST', id=None, params=None, **body) + results = super(ServiceLevelObjective, cls).create( + attach_host_name=False, method="POST", id=None, params=None, **body + ) if results["error"]: raise Exception(results["error"]) else: @@ -132,7 +136,6 @@ def bulk_delete(cls, ops): "POST", "bulk_delete", body=ops ) - @classmethod def delete_many(cls, ids, **params): """ From 7da10a4807847c9addfbf2e0491369d249ad5c26 Mon Sep 17 00:00:00 2001 From: Cody Lee Date: Tue, 8 Oct 2019 15:47:17 -0500 Subject: [PATCH 08/15] fix missing parameter on update --- tests/integration/api/test_api.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/integration/api/test_api.py b/tests/integration/api/test_api.py index ebfd0fe72..17ac69285 100644 --- a/tests/integration/api/test_api.py +++ b/tests/integration/api/test_api.py @@ -403,14 +403,15 @@ def test_service_level_objective_crud(self): query = {"numerator": numerator, "denominator": denominator} thresholds = [{"timeframe": "7d", "target": 90}] name = "test SLO {}".format(time.time()) - slo = dog.ServiceLevelObjective.create(type="metric", query=query, thresholds=thresholds, name=name, tags=["type:test"]) + slo = dog.ServiceLevelObjective.create(type="metric", query=query, thresholds=thresholds, name=name, + tags=["type:test"]) assert slo["name"] == name numerator2 = "sum:my.custom.metric{type:good,!type:ignored}.as_count()" denominator2 = "sum:my.custom.metric{!type:ignored}.as_count()" query = {"numerator": numerator2, "denominator": denominator2} - slo = dog.ServiceLevelObjective.update(type="metric", query=query, thresholds=thresholds, name=name, - tags=["type:test"]) + slo = dog.ServiceLevelObjective.update(id=slo["id"], type="metric", query=query, thresholds=thresholds, + name=name, tags=["type:test"]) assert slo["name"] == name slos = [s for s in dog.ServiceLevelObjective.get_all() if s["id"] == slo["id"]] assert len(slos) == 1 From ea43151e2ef77520ca2d4a7b99c2d49527efcc12 Mon Sep 17 00:00:00 2001 From: Cody Lee Date: Wed, 9 Oct 2019 11:30:42 -0500 Subject: [PATCH 09/15] fixes after PR review --- datadog/api/service_level_objectives.py | 12 +- datadog/dogshell/service_level_objective.py | 149 +++++++++----------- datadog/util/cli.py | 52 +++++++ 3 files changed, 128 insertions(+), 85 deletions(-) create mode 100644 datadog/util/cli.py diff --git a/datadog/api/service_level_objectives.py b/datadog/api/service_level_objectives.py index 91f3e4b33..ab1625a8a 100644 --- a/datadog/api/service_level_objectives.py +++ b/datadog/api/service_level_objectives.py @@ -7,6 +7,8 @@ ActionAPIResource, ) +from datadog.api.exceptions import ApiError + class ServiceLevelObjective( GetableAPIResource, @@ -35,7 +37,7 @@ def create( attach_host_name=False, method="POST", id=None, params=None, **body ) if results["error"]: - raise Exception(results["error"]) + raise ApiError(results["error"]) else: return results["data"][0] @@ -51,7 +53,7 @@ def get(cls, id, **params): """ results = super(ServiceLevelObjective, cls).get(id, **params) if results["error"]: - raise Exception(results["error"]) + raise ApiError(results["error"]) else: return results["data"] @@ -84,7 +86,7 @@ def get_all(cls, query=None, ids=None, offset=0, limit=100, **params): results = super(ServiceLevelObjective, cls).get_all(**search_terms) if results["error"]: - raise Exception(results["error"]) + raise ApiError(results["error"]) else: return results["data"] @@ -100,7 +102,7 @@ def update(cls, id, params=None, **body): """ results = super(ServiceLevelObjective, cls).update(id, params, **body) if results["error"]: - raise Exception(results["error"]) + raise ApiError(results["error"]) else: return results["data"][0] @@ -116,7 +118,7 @@ def delete(cls, id, **params): """ results = super(ServiceLevelObjective, cls).delete(id, **params) if results["error"]: - raise Exception(results["error"]) + raise ApiError(results["error"]) else: return results["data"][0] diff --git a/datadog/dogshell/service_level_objective.py b/datadog/dogshell/service_level_objective.py index dbae00858..618900e21 100644 --- a/datadog/dogshell/service_level_objective.py +++ b/datadog/dogshell/service_level_objective.py @@ -3,6 +3,7 @@ import json # 3p +from datadog.util.cli import set_of_ints, comma_set, comma_list_or_empty from datadog.util.format import pretty_json # datadog @@ -33,11 +34,14 @@ def setup_parser(cls, subparsers): "--description", help="description of the SLO", default=None ) create_parser.add_argument( - "--tags", help="comma-separated list of tags", default=None + "--tags", + help="comma-separated list of tags", + default=None, + type=comma_list_or_empty, ) create_parser.add_argument( "--thresholds", - help="comma separated list of :[:]", + help="comma separated list of :[:[:[:]]", required=True, ) create_parser.add_argument( @@ -51,11 +55,20 @@ def setup_parser(cls, subparsers): default=None, ) create_parser.add_argument( - "--monitor_ids", help="explicit monitor_ids to use (CSV)", default=None + "--monitor_ids", + help="explicit monitor_ids to use (CSV)", + default=None, + type=set_of_ints, ) create_parser.add_argument( "--monitor_search", help="monitor search terms to use", default=None ) + create_parser.add_argument( + "--groups", + help="for a single monitor you can specify the specific groups as a pipe (|) delimited string", + default=None, + type=comma_list_or_empty, + ) create_parser.set_defaults(func=cls._create) file_create_parser = verb_parsers.add_parser( @@ -82,11 +95,14 @@ def setup_parser(cls, subparsers): ) create_parser.add_argument( "--thresholds", - help="comma separated list of :[:]", + help="comma separated list of :[:[:[:]]", required=True, ) update_parser.add_argument( - "--tags", help="comma-separated list of tags", default=None + "--tags", + help="comma-separated list of tags", + default=None, + type=comma_list_or_empty, ) update_parser.add_argument( "--numerator", @@ -99,11 +115,19 @@ def setup_parser(cls, subparsers): default=None, ) update_parser.add_argument( - "--monitor_ids", help="explicit monitor_ids to use (CSV)", default=None + "--monitor_ids", + help="explicit monitor_ids to use (CSV)", + default=None, + type=set_of_ints, ) update_parser.add_argument( "--monitor_search", help="monitor search terms to use", default=None ) + update_parser.add_argument( + "--groups", + help="for a single monitor you can specify the specific groups as a pipe (|) delimited string", + default=None, + ) update_parser.set_defaults(func=cls._update) file_update_parser = verb_parsers.add_parser( @@ -127,6 +151,7 @@ def setup_parser(cls, subparsers): show_all_parser.add_argument( "--slo_ids", help="comma separated list indicating what SLO IDs to get at once", + type=comma_set, ) show_all_parser.add_argument( "--offset", help="offset of query pagination", default=0 @@ -142,7 +167,7 @@ def setup_parser(cls, subparsers): delete_many_parser = verb_parsers.add_parser("delete_many", help="Delete a SLO") delete_many_parser.add_argument( - "slo_ids", help="comma separated list of SLO IDs to delete" + "slo_ids", help="comma separated list of SLO IDs to delete", type=comma_set ) delete_many_parser.set_defaults(func=cls._delete_many) @@ -154,6 +179,7 @@ def setup_parser(cls, subparsers): "timeframes", help="CSV of timeframes to delete, e.g. 7d,30d,90d", required=True, + type=comma_set, ) delete_timeframe_parser.set_defaults(func=cls._delete_timeframe) @@ -172,13 +198,20 @@ def _create(cls, args): for threshold_str in args.thresholds.split(","): parts = threshold_str.split(":") timeframe = parts[0] - target = parts[1] - warning = None + target = float(parts[1]) + + threshold = {"timeframe": timeframe, "target": target} + if len(parts) > 2: - warning = parts[2] - thresholds.append( - {"timeframe": timeframe, "target": target, "warning": warning} - ) + threshold["warning"] = float(parts[2]) + + if len(parts) > 3 and parts[3]: + threshold["target_display"] = parts[3] + + if len(parts) > 4 and parts[4]: + threshold["warning_display"] = parts[4] + + thresholds.append(threshold) params["thresholds"] = thresholds if args.description: @@ -193,9 +226,12 @@ def _create(cls, args): params["monitor_search"] = args.monitor_search else: params["monitor_ids"] = args.monitor_ids + if args.groups and len(args.monitor_ids) == 1: + groups = args.groups.split("|") + params["groups"] = groups if args.tags: - params["tags"] = args.tags.split(",") + params["tags"] = args.tags res = api.ServiceLevelObjective.create(**params) report_warnings(res) @@ -210,31 +246,7 @@ def _file_create(cls, args): api._timeout = args.timeout format = args.format slo = json.load(args.file) - - params = { - "type": slo["type"], - "name": slo["name"], - "thresholds": slo["thresholds"], - } - - if slo.get("description"): - params["description"] = slo["description"] - - if slo["type"] == "metric": - params["query"] = { - "numerator": slo["numerator"], - "denominator": slo["denominator"], - } - elif slo.get("monitor_search"): - params["monitor_search"] = slo["monitor_search"] - else: - params["monitor_ids"] = slo["monitor_ids"] - - if slo.get("tags"): - tags = sorted(set([t.strip() for t in slo["tags"].split(",") if t.strip()])) - params["tags"] = tags - - res = api.ServiceLevelObjective.create(**params) + res = api.ServiceLevelObjective.create(**slo) report_warnings(res) report_errors(res) if format == "pretty": @@ -255,12 +267,19 @@ def _update(cls, args): parts = threshold_str.split(":") timeframe = parts[0] target = parts[1] - warning = None + + threshold = {"timeframe": timeframe, "target": target} + if len(parts) > 2: - warning = parts[2] - thresholds.append( - {"timeframe": timeframe, "target": target, "warning": warning} - ) + threshold["warning"] = float(parts[2]) + + if len(parts) > 3 and parts[3]: + threshold["target_display"] = parts[3] + + if len(parts) > 4 and parts[4]: + threshold["warning_display"] = parts[4] + + thresholds.append(threshold) params["thresholds"] = thresholds if args.description: @@ -276,9 +295,12 @@ def _update(cls, args): params["monitor_search"] = args.monitor_search else: params["monitor_ids"] = args.monitor_ids + if args.groups and len(args.monitor_ids) == 1: + groups = args.groups.split("|") + params["groups"] = groups if args.tags: - tags = sorted(set([t.strip() for t in args.tags.split(",") if t.strip()])) + tags = sorted(set([t.strip() for t in args.tags if t.strip()])) params["tags"] = tags res = api.ServiceLevelObjective.update(args.slo_id, **params) report_warnings(res) @@ -294,40 +316,7 @@ def _file_update(cls, args): format = args.format slo = json.load(args.file) - params = {"type": slo["type"], "name": slo["name"]} - - if slo.get("thresholds"): - thresholds = [] - for threshold_str in args.thresholds.split(","): - parts = threshold_str.split(":") - timeframe = parts[0] - target = parts[1] - warning = None - if len(parts) > 2: - warning = parts[2] - thresholds.append( - {"timeframe": timeframe, "target": target, "warning": warning} - ) - params["thresholds"] = thresholds - - if slo.get("description"): - params["description"] = slo["description"] - - if slo["type"] == "metric": - params["query"] = { - "numerator": slo["numerator"], - "denominator": slo["denominator"], - } - elif slo.get("monitor_search"): - params["monitor_search"] = slo["monitor_search"] - else: - params["monitor_ids"] = slo["monitor_ids"] - - if slo.get("tags"): - tags = sorted(set([t.strip() for t in slo["tags"].split(",") if t.strip()])) - params["tags"] = tags - - res = api.ServiceLevelObjective.update(slo["id"], **params) + res = api.ServiceLevelObjective.update(slo["id"], **slo) report_warnings(res) report_errors(res) if format == "pretty": @@ -401,7 +390,7 @@ def _delete_many(cls, args): def _delete_timeframe(cls, args): api._timeout = args.timeout - ops = {args.slo_id: args.timeframes.split(",")} + ops = {args.slo_id: args.timeframes} res = api.ServiceLevelObjective.bulk_delete(ops) if res is not None: diff --git a/datadog/util/cli.py b/datadog/util/cli.py new file mode 100644 index 000000000..b9e8ffa00 --- /dev/null +++ b/datadog/util/cli.py @@ -0,0 +1,52 @@ +from argparse import ArgumentTypeError +import json + + +def comma_list(list_str, item_func=None): + if not list_str: + raise ArgumentTypeError("Invalid comma list") + item_func = item_func or (lambda i: i) + return [item_func(i.strip()) for i in list_str.split(",") if i.strip()] + + +def comma_set(list_str, item_func=None): + return set(comma_list(list_str, item_func=item_func)) + + +def comma_list_or_empty(list_str): + if not list_str: + return [] + else: + return comma_list(list_str) + + +def list_of_ints(int_csv): + if not int_csv: + raise ArgumentTypeError("Invalid list of ints") + try: + # Try as a [1, 2, 3] list + j = json.loads(int_csv) + if isinstance(j, (list, set)): + j = [int(i) for i in j] + return j + except Exception: + pass + + try: + return [int(i.strip()) for i in int_csv.strip().split(",")] + except Exception: + raise ArgumentTypeError("Invalid list of ints: {0}".format(int_csv)) + + +def list_of_ints_and_strs(csv): + def int_or_str(item): + try: + return int(item) + except ValueError: + return item + + return comma_list(csv, int_or_str) + + +def set_of_ints(int_csv): + return set(list_of_ints(int_csv)) From fee6c661d497877937d775e64c910d57fb7d356a Mon Sep 17 00:00:00 2001 From: Cody Lee Date: Thu, 10 Oct 2019 09:46:03 -0500 Subject: [PATCH 10/15] add option to return raw api response without throwing an ApiError this makes dogshell UX better --- datadog/api/service_level_objectives.py | 57 ++++++++++++++++++--- datadog/dogshell/service_level_objective.py | 14 ++--- 2 files changed, 57 insertions(+), 14 deletions(-) diff --git a/datadog/api/service_level_objectives.py b/datadog/api/service_level_objectives.py index ab1625a8a..53981f44c 100644 --- a/datadog/api/service_level_objectives.py +++ b/datadog/api/service_level_objectives.py @@ -26,39 +26,61 @@ class ServiceLevelObjective( @classmethod def create( - cls, attach_host_name=False, method="POST", id=None, params=None, **body + cls, + attach_host_name=False, + method="POST", + id=None, + params=None, + return_raw=True, + **body ): """ Create a SLO + :param return_raw: return raw results + :type return_raw: bool + :returns: created SLO details """ results = super(ServiceLevelObjective, cls).create( attach_host_name=False, method="POST", id=None, params=None, **body ) + + if return_raw: + return results + if results["error"]: raise ApiError(results["error"]) else: return results["data"][0] @classmethod - def get(cls, id, **params): + def get(cls, id, return_raw=True, **params): """ Get a specific SLO details. :param id: SLO id to get details for :type id: str + :param return_raw: return raw results + :type return_raw: bool + :returns: SLO details """ results = super(ServiceLevelObjective, cls).get(id, **params) + + if return_raw: + return results + if results["error"]: raise ApiError(results["error"]) else: return results["data"] @classmethod - def get_all(cls, query=None, ids=None, offset=0, limit=100, **params): + def get_all( + cls, query=None, ids=None, offset=0, limit=100, return_raw=True, **params + ): """ Get all SLO details. @@ -74,6 +96,9 @@ def get_all(cls, query=None, ids=None, offset=0, limit=100, **params): :param limit: limit of results to return (default: 1000) :type limit: int + :param return_raw: return raw results + :type return_raw: bool + :returns: SLOs matching the query """ search_terms = {} @@ -85,45 +110,63 @@ def get_all(cls, query=None, ids=None, offset=0, limit=100, **params): search_terms["limit"] = limit results = super(ServiceLevelObjective, cls).get_all(**search_terms) + + if return_raw: + return results + if results["error"]: raise ApiError(results["error"]) else: return results["data"] @classmethod - def update(cls, id, params=None, **body): + def update(cls, id, params=None, return_raw=True, **body): """ Update a specific SLO details. :param id: SLO id to update details for :type id: str + :param return_raw: return raw results + :type return_raw: bool + :returns: SLO details """ results = super(ServiceLevelObjective, cls).update(id, params, **body) + + if return_raw: + return results + if results["error"]: raise ApiError(results["error"]) else: return results["data"][0] @classmethod - def delete(cls, id, **params): + def delete(cls, id, return_raw=False, **params): """ Delete a specific SLO. :param id: SLO id to delete :type id: str + :param return_raw: return raw results + :type return_raw: bool + :returns: SLO ids removed """ results = super(ServiceLevelObjective, cls).delete(id, **params) + + if return_raw: + return results + if results["error"]: raise ApiError(results["error"]) else: return results["data"][0] @classmethod - def bulk_delete(cls, ops): + def bulk_delete(cls, ops, **params): """ Bulk Delete Timeframes from multiple SLOs. @@ -135,7 +178,7 @@ def bulk_delete(cls, ops): `data` - updates and deletions """ return super(ServiceLevelObjective, cls)._trigger_class_action( - "POST", "bulk_delete", body=ops + "POST", "bulk_delete", body=ops, params=params ) @classmethod diff --git a/datadog/dogshell/service_level_objective.py b/datadog/dogshell/service_level_objective.py index 618900e21..bc8e71066 100644 --- a/datadog/dogshell/service_level_objective.py +++ b/datadog/dogshell/service_level_objective.py @@ -233,7 +233,7 @@ def _create(cls, args): if args.tags: params["tags"] = args.tags - res = api.ServiceLevelObjective.create(**params) + res = api.ServiceLevelObjective.create(return_raw=True, **params) report_warnings(res) report_errors(res) if format == "pretty": @@ -246,7 +246,7 @@ def _file_create(cls, args): api._timeout = args.timeout format = args.format slo = json.load(args.file) - res = api.ServiceLevelObjective.create(**slo) + res = api.ServiceLevelObjective.create(return_raw=True, **slo) report_warnings(res) report_errors(res) if format == "pretty": @@ -302,7 +302,7 @@ def _update(cls, args): if args.tags: tags = sorted(set([t.strip() for t in args.tags if t.strip()])) params["tags"] = tags - res = api.ServiceLevelObjective.update(args.slo_id, **params) + res = api.ServiceLevelObjective.update(args.slo_id, return_raw=True, **params) report_warnings(res) report_errors(res) if format == "pretty": @@ -316,7 +316,7 @@ def _file_update(cls, args): format = args.format slo = json.load(args.file) - res = api.ServiceLevelObjective.update(slo["id"], **slo) + res = api.ServiceLevelObjective.update(slo["id"], return_raw=True, **slo) report_warnings(res) report_errors(res) if format == "pretty": @@ -328,7 +328,7 @@ def _file_update(cls, args): def _show(cls, args): api._timeout = args.timeout format = args.format - res = api.ServiceLevelObjective.get(args.slo_id) + res = api.ServiceLevelObjective.get(args.slo_id, return_raw=True) report_warnings(res) report_errors(res) @@ -351,7 +351,7 @@ def _show_all(cls, args): else: params["ids"] = args.slo_ids - res = api.ServiceLevelObjective.get_all(**params) + res = api.ServiceLevelObjective.get_all(return_raw=True, **params) report_warnings(res) report_errors(res) @@ -363,7 +363,7 @@ def _show_all(cls, args): @classmethod def _delete(cls, args): api._timeout = args.timeout - res = api.ServiceLevelObjective.delete(args.slo_id) + res = api.ServiceLevelObjective.delete(args.slo_id, return_raw=True) if res is not None: report_warnings(res) report_errors(res) From fb8244ec3fb71945ec88a0feafa6e67ef61d2870 Mon Sep 17 00:00:00 2001 From: Cody Lee Date: Thu, 10 Oct 2019 10:03:16 -0500 Subject: [PATCH 11/15] oops set return_raw default False --- datadog/api/service_level_objectives.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/datadog/api/service_level_objectives.py b/datadog/api/service_level_objectives.py index 53981f44c..082391430 100644 --- a/datadog/api/service_level_objectives.py +++ b/datadog/api/service_level_objectives.py @@ -31,7 +31,7 @@ def create( method="POST", id=None, params=None, - return_raw=True, + return_raw=False, **body ): """ @@ -55,7 +55,7 @@ def create( return results["data"][0] @classmethod - def get(cls, id, return_raw=True, **params): + def get(cls, id, return_raw=False, **params): """ Get a specific SLO details. @@ -79,7 +79,7 @@ def get(cls, id, return_raw=True, **params): @classmethod def get_all( - cls, query=None, ids=None, offset=0, limit=100, return_raw=True, **params + cls, query=None, ids=None, offset=0, limit=100, return_raw=False, **params ): """ Get all SLO details. @@ -120,7 +120,7 @@ def get_all( return results["data"] @classmethod - def update(cls, id, params=None, return_raw=True, **body): + def update(cls, id, params=None, return_raw=False, **body): """ Update a specific SLO details. From 31df1b37572a6c7bc559e36e646d65bfb20e8d31 Mon Sep 17 00:00:00 2001 From: Cody Lee Date: Thu, 10 Oct 2019 11:40:36 -0500 Subject: [PATCH 12/15] add suppress ApiError on base client for status codes to adapt to warnings-style errors for certain status codes update SLO tests --- datadog/api/api_client.py | 13 +++- datadog/api/service_level_objectives.py | 99 ++++++------------------- tests/integration/api/test_api.py | 8 +- 3 files changed, 35 insertions(+), 85 deletions(-) diff --git a/datadog/api/api_client.py b/datadog/api/api_client.py index 9f4f3aa71..c27b1d21d 100644 --- a/datadog/api/api_client.py +++ b/datadog/api/api_client.py @@ -46,7 +46,7 @@ def _get_http_client(cls): @classmethod def submit(cls, method, path, api_version=None, body=None, attach_host_name=False, - response_formatter=None, error_formatter=None, **params): + response_formatter=None, error_formatter=None, suppress_response_errors_on_codes=None, **params): """ Make an HTTP API request @@ -70,6 +70,10 @@ def submit(cls, method, path, api_version=None, body=None, attach_host_name=Fals :param attach_host_name: link the new resource object to the host name :type attach_host_name: bool + :param suppress_response_errors_on_codes: suppress ApiError on `errors` key in the response for the given HTTP + status codes + :type suppress_response_errors_on_codes: None|list(int) + :param params: dictionary to be sent in the query string of the request :type params: dictionary @@ -151,7 +155,10 @@ def submit(cls, method, path, api_version=None, body=None, attach_host_name=Fals raise ValueError('Invalid JSON response: {0}'.format(content)) if response_obj and 'errors' in response_obj: - raise ApiError(response_obj) + # suppress ApiError when specified and just return the response + if not (suppress_response_errors_on_codes and + result.status_code in suppress_response_errors_on_codes): + raise ApiError(response_obj) else: response_obj = None @@ -177,7 +184,7 @@ def submit(cls, method, path, api_version=None, body=None, attach_host_name=Fals raise except ApiError as e: if _mute: - for error in e.args[0]['errors']: + for error in (e.args[0].get('errors') or []): log.error(error) if error_formatter is None: return e.args[0] diff --git a/datadog/api/service_level_objectives.py b/datadog/api/service_level_objectives.py index 082391430..d2b8bc6bc 100644 --- a/datadog/api/service_level_objectives.py +++ b/datadog/api/service_level_objectives.py @@ -7,8 +7,6 @@ ActionAPIResource, ) -from datadog.api.exceptions import ApiError - class ServiceLevelObjective( GetableAPIResource, @@ -26,61 +24,31 @@ class ServiceLevelObjective( @classmethod def create( - cls, - attach_host_name=False, - method="POST", - id=None, - params=None, - return_raw=False, - **body + cls, attach_host_name=False, method="POST", id=None, params=None, **body ): """ Create a SLO - :param return_raw: return raw results - :type return_raw: bool - :returns: created SLO details """ - results = super(ServiceLevelObjective, cls).create( - attach_host_name=False, method="POST", id=None, params=None, **body + return super(ServiceLevelObjective, cls).create( + attach_host_name=False, method="POST", id=None, params=params, **body ) - if return_raw: - return results - - if results["error"]: - raise ApiError(results["error"]) - else: - return results["data"][0] - @classmethod - def get(cls, id, return_raw=False, **params): + def get(cls, id, **params): """ Get a specific SLO details. :param id: SLO id to get details for :type id: str - :param return_raw: return raw results - :type return_raw: bool - :returns: SLO details """ - results = super(ServiceLevelObjective, cls).get(id, **params) - - if return_raw: - return results - - if results["error"]: - raise ApiError(results["error"]) - else: - return results["data"] + return super(ServiceLevelObjective, cls).get(id, **params) @classmethod - def get_all( - cls, query=None, ids=None, offset=0, limit=100, return_raw=False, **params - ): + def get_all(cls, query=None, ids=None, offset=0, limit=100, **params): """ Get all SLO details. @@ -96,9 +64,6 @@ def get_all( :param limit: limit of results to return (default: 1000) :type limit: int - :param return_raw: return raw results - :type return_raw: bool - :returns: SLOs matching the query """ search_terms = {} @@ -109,61 +74,31 @@ def get_all( search_terms["offset"] = offset search_terms["limit"] = limit - results = super(ServiceLevelObjective, cls).get_all(**search_terms) - - if return_raw: - return results - - if results["error"]: - raise ApiError(results["error"]) - else: - return results["data"] + return super(ServiceLevelObjective, cls).get_all(**search_terms) @classmethod - def update(cls, id, params=None, return_raw=False, **body): + def update(cls, id, params=None, **body): """ Update a specific SLO details. :param id: SLO id to update details for :type id: str - :param return_raw: return raw results - :type return_raw: bool - :returns: SLO details """ - results = super(ServiceLevelObjective, cls).update(id, params, **body) - - if return_raw: - return results - - if results["error"]: - raise ApiError(results["error"]) - else: - return results["data"][0] + return super(ServiceLevelObjective, cls).update(id, params, **body) @classmethod - def delete(cls, id, return_raw=False, **params): + def delete(cls, id, **params): """ Delete a specific SLO. :param id: SLO id to delete :type id: str - :param return_raw: return raw results - :type return_raw: bool - :returns: SLO ids removed """ - results = super(ServiceLevelObjective, cls).delete(id, **params) - - if return_raw: - return results - - if results["error"]: - raise ApiError(results["error"]) - else: - return results["data"][0] + return super(ServiceLevelObjective, cls).delete(id, **params) @classmethod def bulk_delete(cls, ops, **params): @@ -178,7 +113,11 @@ def bulk_delete(cls, ops, **params): `data` - updates and deletions """ return super(ServiceLevelObjective, cls)._trigger_class_action( - "POST", "bulk_delete", body=ops, params=params + "POST", + "bulk_delete", + body=ops, + params=params, + suppress_response_errors_on_codes=[200], ) @classmethod @@ -192,5 +131,9 @@ def delete_many(cls, ids, **params): :returns: Dictionary representing the API's JSON response see `data` list(slo ids) && `errors` """ return super(ServiceLevelObjective, cls)._trigger_class_action( - "DELETE", "", params=params, body={"ids": ids} + "DELETE", + "", + params=params, + body={"ids": ids}, + suppress_response_errors_on_codes=[200], ) diff --git a/tests/integration/api/test_api.py b/tests/integration/api/test_api.py index 17ac69285..212842294 100644 --- a/tests/integration/api/test_api.py +++ b/tests/integration/api/test_api.py @@ -404,19 +404,19 @@ def test_service_level_objective_crud(self): thresholds = [{"timeframe": "7d", "target": 90}] name = "test SLO {}".format(time.time()) slo = dog.ServiceLevelObjective.create(type="metric", query=query, thresholds=thresholds, name=name, - tags=["type:test"]) + tags=["type:test"])["data"][0] assert slo["name"] == name numerator2 = "sum:my.custom.metric{type:good,!type:ignored}.as_count()" denominator2 = "sum:my.custom.metric{!type:ignored}.as_count()" query = {"numerator": numerator2, "denominator": denominator2} slo = dog.ServiceLevelObjective.update(id=slo["id"], type="metric", query=query, thresholds=thresholds, - name=name, tags=["type:test"]) + name=name, tags=["type:test"])["data"][0] assert slo["name"] == name - slos = [s for s in dog.ServiceLevelObjective.get_all() if s["id"] == slo["id"]] + slos = [s for s in dog.ServiceLevelObjective.get_all()["data"] if s["id"] == slo["id"]] assert len(slos) == 1 - assert dog.ServiceLevelObjective.get(slo["id"])["id"] == slo["id"] + assert dog.ServiceLevelObjective.get(slo["id"])["data"]["id"] == slo["id"] dog.ServiceLevelObjective.delete(slo["id"]) @pytest.mark.admin_needed From b8dc167077d39ca2712fe3aff0578647d69677ed Mon Sep 17 00:00:00 2001 From: Cody Lee Date: Thu, 10 Oct 2019 12:54:15 -0500 Subject: [PATCH 13/15] add test for core client suppression --- tests/unit/api/helper.py | 11 +++++++++++ tests/unit/api/test_api.py | 13 +++++++++++++ 2 files changed, 24 insertions(+) diff --git a/tests/unit/api/helper.py b/tests/unit/api/helper.py index 1ffd95760..9d5dc6c7c 100644 --- a/tests/unit/api/helper.py +++ b/tests/unit/api/helper.py @@ -1,6 +1,7 @@ # stdlib import unittest import json +from StringIO import StringIO # 3p from mock import Mock @@ -134,6 +135,16 @@ def setUp(self): def tearDown(self): RequestClient._session = None + def load_request_response(self, status_code=200, response_body='{}', raise_for_status=False): + """ + Load the repsonse body from the given payload + """ + mock_response = MockResponse(raise_for_status=raise_for_status) + mock_response.raw = StringIO(response_body) + mock_response.status_code = status_code + + self.request_mock.request = Mock(return_value=mock_response) + def arm_requests_to_raise(self): """ Arm the mocked request to raise for status. diff --git a/tests/unit/api/test_api.py b/tests/unit/api/test_api.py index 636dcad70..540a333dc 100644 --- a/tests/unit/api/test_api.py +++ b/tests/unit/api/test_api.py @@ -78,6 +78,19 @@ def test_get_hostname(self, mock_config_path): initialize() self.assertEqual(api._host_name, HOST_NAME, api._host_name) + def test_errors_suppressed(self): + """ + API `errors` field ApiError supppressed when specified + """ + # Test API, application keys, API host, and some HTTP client options + initialize(api_key=API_KEY, app_key=APP_KEY, api_host=API_HOST) + + # Make a simple API call + self.load_request_response(response_body='{"data": {}, "errors": ["foo error"]}') + resp = MyCreatable.create(params={"suppress_response_errors_on_codes": [200]}) + self.assertNotIsInstance(resp, ApiError) + self.assertDictEqual({"data": {}, "errors": ["foo error"]}, resp) + def test_request_parameters(self): """ API parameters are set with `initialize` method. From c56d60ee70246fc1609b15fda01d4d5f3363f4e4 Mon Sep 17 00:00:00 2001 From: Cody Lee Date: Thu, 10 Oct 2019 13:59:38 -0500 Subject: [PATCH 14/15] compat for py3 --- datadog/util/compat.py | 1 + tests/unit/api/helper.py | 3 +-- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/datadog/util/compat.py b/datadog/util/compat.py index e15890c42..24431accf 100644 --- a/datadog/util/compat.py +++ b/datadog/util/compat.py @@ -42,6 +42,7 @@ def is_higher_py35(): import builtins import configparser import urllib.request as url_lib, urllib.error, urllib.parse + from io import StringIO imap = map text = str diff --git a/tests/unit/api/helper.py b/tests/unit/api/helper.py index 9d5dc6c7c..a89c046ba 100644 --- a/tests/unit/api/helper.py +++ b/tests/unit/api/helper.py @@ -1,7 +1,6 @@ # stdlib import unittest import json -from StringIO import StringIO # 3p from mock import Mock @@ -23,7 +22,7 @@ DeletableAPISubResource, ActionAPIResource ) -from datadog.util.compat import iteritems +from datadog.util.compat import iteritems, StringIO from tests.util.contextmanagers import EnvVars From 6a73b8012bbd55d4e402609671e0e4219ee4be7a Mon Sep 17 00:00:00 2001 From: Cody Lee Date: Thu, 10 Oct 2019 14:17:20 -0500 Subject: [PATCH 15/15] helper hack for py3 testing compat --- datadog/util/compat.py | 1 - tests/unit/api/helper.py | 8 ++++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/datadog/util/compat.py b/datadog/util/compat.py index 24431accf..e15890c42 100644 --- a/datadog/util/compat.py +++ b/datadog/util/compat.py @@ -42,7 +42,6 @@ def is_higher_py35(): import builtins import configparser import urllib.request as url_lib, urllib.error, urllib.parse - from io import StringIO imap = map text = str diff --git a/tests/unit/api/helper.py b/tests/unit/api/helper.py index a89c046ba..1af83ed94 100644 --- a/tests/unit/api/helper.py +++ b/tests/unit/api/helper.py @@ -1,4 +1,5 @@ # stdlib +from io import BytesIO import unittest import json @@ -22,7 +23,7 @@ DeletableAPISubResource, ActionAPIResource ) -from datadog.util.compat import iteritems, StringIO +from datadog.util.compat import iteritems, is_p3k from tests.util.contextmanagers import EnvVars @@ -139,7 +140,10 @@ def load_request_response(self, status_code=200, response_body='{}', raise_for_s Load the repsonse body from the given payload """ mock_response = MockResponse(raise_for_status=raise_for_status) - mock_response.raw = StringIO(response_body) + if is_p3k(): + mock_response.raw = BytesIO(bytes(response_body, 'utf-8')) + else: + mock_response.raw = BytesIO(response_body) mock_response.status_code = status_code self.request_mock.request = Mock(return_value=mock_response)