From b4a763380df369465b1b5f3259a231c7ff44e00f Mon Sep 17 00:00:00 2001 From: Fede Barcelona Date: Fri, 2 May 2025 10:54:27 +0200 Subject: [PATCH 01/13] build: add nix flake for reproducible dev env --- .envrc | 4 ++++ .gitignore | 2 ++ flake.lock | 61 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ flake.nix | 29 ++++++++++++++++++++++++++ 4 files changed, 96 insertions(+) create mode 100644 .envrc create mode 100644 flake.lock create mode 100644 flake.nix diff --git a/.envrc b/.envrc new file mode 100644 index 00000000..8d344522 --- /dev/null +++ b/.envrc @@ -0,0 +1,4 @@ +has nix && use flake +watch_file *.nix +dotenv_if_exists .env # You can create a .env file with your env vars for this project. You can also use .secrets if you are using act. See the line below. +dotenv_if_exists .secrets # Used by [act](https://nektosact.com/) to load secrets into the pipelines diff --git a/.gitignore b/.gitignore index 404fc5aa..2adb3114 100644 --- a/.gitignore +++ b/.gitignore @@ -61,6 +61,8 @@ venv/ # Direnv .envrc +.direnv/ +.secrets # IntelliJ projects .idea/ diff --git a/flake.lock b/flake.lock new file mode 100644 index 00000000..a3a3be87 --- /dev/null +++ b/flake.lock @@ -0,0 +1,61 @@ +{ + "nodes": { + "flake-utils": { + "inputs": { + "systems": "systems" + }, + "locked": { + "lastModified": 1731533236, + "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1746123514, + "narHash": "sha256-UNO+MbVHLl4AkVWYqekk72/gqFNSLYNkBgto7h+7P3U=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "b5dd9efc912ecabeafa4f082d31e19cb1c74266c", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixpkgs-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "flake-utils": "flake-utils", + "nixpkgs": "nixpkgs" + } + }, + "systems": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 00000000..4cde2cd9 --- /dev/null +++ b/flake.nix @@ -0,0 +1,29 @@ +{ + inputs = { + nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable"; + flake-utils.url = "github:numtide/flake-utils"; + }; + outputs = + { + self, + nixpkgs, + flake-utils, + }: + flake-utils.lib.eachDefaultSystem ( + system: + let + pkgs = import nixpkgs { inherit system; }; + in + { + devShells.default = + with pkgs; + mkShell { + packages = [ + python310 # Minimum supported python version in this project + ]; + }; + + formatter = pkgs.nixfmt-rfc-style; + } + ); +} From 7634e6f6f8eb6b8bb1dda53682ba5c66ead3c3ee Mon Sep 17 00:00:00 2001 From: Fede Barcelona Date: Fri, 2 May 2025 11:00:05 +0200 Subject: [PATCH 02/13] build: replace poetry with uv --- flake.nix | 1 + poetry.lock | 934 ------------------------------------------------- pyproject.toml | 60 ++-- uv.lock | 605 ++++++++++++++++++++++++++++++++ 4 files changed, 636 insertions(+), 964 deletions(-) delete mode 100644 poetry.lock create mode 100644 uv.lock diff --git a/flake.nix b/flake.nix index 4cde2cd9..67937559 100644 --- a/flake.nix +++ b/flake.nix @@ -20,6 +20,7 @@ mkShell { packages = [ python310 # Minimum supported python version in this project + uv ]; }; diff --git a/poetry.lock b/poetry.lock deleted file mode 100644 index 630f0916..00000000 --- a/poetry.lock +++ /dev/null @@ -1,934 +0,0 @@ -# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. - -[[package]] -name = "alabaster" -version = "0.7.13" -description = "A configurable sidebar-enabled Sphinx theme" -optional = true -python-versions = ">=3.6" -groups = ["main"] -markers = "extra == \"docs\"" -files = [ - {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, - {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, -] - -[[package]] -name = "args" -version = "0.1.0" -description = "Command Arguments for Humans." -optional = false -python-versions = "*" -groups = ["dev"] -files = [ - {file = "args-0.1.0.tar.gz", hash = "sha256:a785b8d837625e9b61c39108532d95b85274acd679693b71ebb5156848fcf814"}, -] - -[[package]] -name = "babel" -version = "2.15.0" -description = "Internationalization utilities" -optional = true -python-versions = ">=3.8" -groups = ["main"] -markers = "extra == \"docs\"" -files = [ - {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, - {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, -] - -[package.dependencies] -pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} - -[package.extras] -dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] - -[[package]] -name = "certifi" -version = "2024.2.2" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -groups = ["main"] -files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -groups = ["main"] -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "clint" -version = "0.5.1" -description = "Python Command Line Interface Tools" -optional = false -python-versions = "*" -groups = ["dev"] -files = [ - {file = "clint-0.5.1.tar.gz", hash = "sha256:05224c32b1075563d0b16d0015faaf9da43aa214e4a2140e51f08789e7a4c5aa"}, -] - -[package.dependencies] -args = "*" - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = true -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["main"] -markers = "extra == \"docs\" and sys_platform == \"win32\"" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "coverage" -version = "6.5.0" -description = "Code coverage measurement for Python" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, - {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, - {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, - {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, - {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, - {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, - {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, - {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, - {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, - {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, - {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, - {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, - {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, - {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, - {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, - {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, -] - -[package.extras] -toml = ["tomli ; python_full_version <= \"3.11.0a6\""] - -[[package]] -name = "docutils" -version = "0.19" -description = "Docutils -- Python Documentation Utilities" -optional = true -python-versions = ">=3.7" -groups = ["main"] -markers = "extra == \"docs\"" -files = [ - {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"}, - {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"}, -] - -[[package]] -name = "doublex" -version = "1.9.6.1" -description = "Python test doubles" -optional = false -python-versions = ">=3.6" -groups = ["dev"] -files = [ - {file = "doublex-1.9.6.1.tar.gz", hash = "sha256:48fbc633598eb913a6eb0c4694f8c040ba57bae9653d45643a84243e0c9f3268"}, -] - -[package.dependencies] -PyHamcrest = "*" -six = "*" - -[[package]] -name = "doublex-expects" -version = "0.7.1" -description = "Expects matchers for Doublex test doubles assertions" -optional = false -python-versions = "*" -groups = ["dev"] -files = [ - {file = "doublex-expects-0.7.1.tar.gz", hash = "sha256:8040682d97f0a66f632c5df982f78d09aee36b2c4a1eb275b0c596d115f200aa"}, -] - -[package.dependencies] -doublex = "*" -expects = ">=0.8.0rc1" - -[[package]] -name = "expects" -version = "0.9.0" -description = "Expressive and extensible TDD/BDD assertion library for Python" -optional = false -python-versions = "*" -groups = ["dev"] -files = [ - {file = "expects-0.9.0.tar.gz", hash = "sha256:419902ccafe81b7e9559eeb6b7a07ef9d5c5604eddb93000f0642b3b2d594f4c"}, -] - -[[package]] -name = "flake8" -version = "3.9.2" -description = "the modular source code checker: pep8 pyflakes and co" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -groups = ["dev"] -files = [ - {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, - {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, -] - -[package.dependencies] -mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.7.0,<2.8.0" -pyflakes = ">=2.3.0,<2.4.0" - -[[package]] -name = "idna" -version = "3.7" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -groups = ["main"] -files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, -] - -[[package]] -name = "imagesize" -version = "1.4.1" -description = "Getting image size from png/jpeg/jpeg2000/gif file" -optional = true -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -groups = ["main"] -markers = "extra == \"docs\"" -files = [ - {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, - {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, -] - -[[package]] -name = "importlib-metadata" -version = "7.1.0" -description = "Read metadata from Python packages" -optional = true -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version < \"3.10\" and extra == \"docs\"" -files = [ - {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, - {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, -] - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy ; platform_python_implementation != \"PyPy\"", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] - -[[package]] -name = "jinja2" -version = "3.1.4" -description = "A very fast and expressive template engine." -optional = true -python-versions = ">=3.7" -groups = ["main"] -markers = "extra == \"docs\"" -files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "mamba" -version = "0.11.3" -description = "The definitive testing tool for Python. Born under the banner of Behavior Driven Development." -optional = false -python-versions = "*" -groups = ["dev"] -files = [ - {file = "mamba-0.11.3.tar.gz", hash = "sha256:4dcf69e9a53e78d4aa5ec3dee0bb2c65f02ea68a6b62c4275653d7170b8f5fe2"}, -] - -[package.dependencies] -clint = "*" -coverage = "*" - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = true -python-versions = ">=3.7" -groups = ["main"] -markers = "extra == \"docs\"" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "mccabe" -version = "0.6.1" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = "*" -groups = ["dev"] -files = [ - {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, - {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, -] - -[[package]] -name = "packaging" -version = "24.0" -description = "Core utilities for Python packages" -optional = true -python-versions = ">=3.7" -groups = ["main"] -markers = "extra == \"docs\"" -files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, -] - -[[package]] -name = "pyaml" -version = "21.10.1" -description = "PyYAML-based module to produce pretty and readable YAML-serialized data" -optional = false -python-versions = "*" -groups = ["main"] -files = [ - {file = "pyaml-21.10.1-py2.py3-none-any.whl", hash = "sha256:19985ed303c3a985de4cf8fd329b6d0a5a5b5c9035ea240eccc709ebacbaf4a0"}, - {file = "pyaml-21.10.1.tar.gz", hash = "sha256:c6519fee13bf06e3bb3f20cacdea8eba9140385a7c2546df5dbae4887f768383"}, -] - -[package.dependencies] -PyYAML = "*" - -[[package]] -name = "pycodestyle" -version = "2.7.0" -description = "Python style guide checker" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -groups = ["dev"] -files = [ - {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, - {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, -] - -[[package]] -name = "pyflakes" -version = "2.3.1" -description = "passive checker of Python programs" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -groups = ["dev"] -files = [ - {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, - {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, -] - -[[package]] -name = "pygments" -version = "2.18.0" -description = "Pygments is a syntax highlighting package written in Python." -optional = true -python-versions = ">=3.8" -groups = ["main"] -markers = "extra == \"docs\"" -files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, -] - -[package.extras] -windows-terminal = ["colorama (>=0.4.6)"] - -[[package]] -name = "pyhamcrest" -version = "2.1.0" -description = "Hamcrest framework for matcher objects" -optional = false -python-versions = ">=3.6" -groups = ["dev"] -files = [ - {file = "pyhamcrest-2.1.0-py3-none-any.whl", hash = "sha256:f6913d2f392e30e0375b3ecbd7aee79e5d1faa25d345c8f4ff597665dcac2587"}, - {file = "pyhamcrest-2.1.0.tar.gz", hash = "sha256:c6acbec0923d0cb7e72c22af1926f3e7c97b8e8d69fc7498eabacaf7c975bd9c"}, -] - -[package.extras] -dev = ["black", "doc2dash", "flake8", "pyhamcrest[docs,tests]", "pytest-mypy", "towncrier", "tox", "tox-asdf", "twine"] -docs = ["alabaster (>=0.7,<1.0)", "sphinx (>=4.0,<5.0)"] -tests = ["coverage[toml]", "dataclasses ; python_version < \"3.7\"", "mypy (!=0.940) ; platform_python_implementation != \"PyPy\"", "pytest (>=5.0)", "pytest-mypy-plugins ; platform_python_implementation != \"PyPy\"", "pytest-sugar", "pytest-xdist", "pyyaml", "types-dataclasses ; python_version < \"3.7\"", "types-mock"] -tests-numpy = ["numpy", "pyhamcrest[tests]"] - -[[package]] -name = "pytz" -version = "2024.1" -description = "World timezone definitions, modern and historical" -optional = true -python-versions = "*" -groups = ["main"] -markers = "extra == \"docs\" and python_version == \"3.8\"" -files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.1" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.6" -groups = ["main"] -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "requests" -version = "2.31.0" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "requests-toolbelt" -version = "1.0.0" -description = "A utility belt for advanced users of python-requests" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -groups = ["main"] -files = [ - {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, - {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, -] - -[package.dependencies] -requests = ">=2.0.1,<3.0.0" - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -groups = ["dev"] -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -optional = true -python-versions = "*" -groups = ["main"] -markers = "extra == \"docs\"" -files = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] - -[[package]] -name = "sphinx" -version = "5.3.0" -description = "Python documentation generator" -optional = true -python-versions = ">=3.6" -groups = ["main"] -markers = "extra == \"docs\"" -files = [ - {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"}, - {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"}, -] - -[package.dependencies] -alabaster = ">=0.7,<0.8" -babel = ">=2.9" -colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.14,<0.20" -imagesize = ">=1.3" -importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} -Jinja2 = ">=3.0" -packaging = ">=21.0" -Pygments = ">=2.12" -requests = ">=2.5.0" -snowballstemmer = ">=2.0" -sphinxcontrib-applehelp = "*" -sphinxcontrib-devhelp = "*" -sphinxcontrib-htmlhelp = ">=2.0.0" -sphinxcontrib-jsmath = "*" -sphinxcontrib-qthelp = "*" -sphinxcontrib-serializinghtml = ">=1.1.5" - -[package.extras] -docs = ["sphinxcontrib-websupport"] -lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"] -test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast ; python_version < \"3.8\""] - -[[package]] -name = "sphinx-rtd-theme" -version = "0.5.1" -description = "Read the Docs theme for Sphinx" -optional = true -python-versions = "*" -groups = ["main"] -markers = "extra == \"docs\"" -files = [ - {file = "sphinx_rtd_theme-0.5.1-py2.py3-none-any.whl", hash = "sha256:fa6bebd5ab9a73da8e102509a86f3fcc36dec04a0b52ea80e5a033b2aba00113"}, - {file = "sphinx_rtd_theme-0.5.1.tar.gz", hash = "sha256:eda689eda0c7301a80cf122dad28b1861e5605cbf455558f3775e1e8200e83a5"}, -] - -[package.dependencies] -sphinx = "*" - -[package.extras] -dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client"] - -[[package]] -name = "sphinxcontrib-applehelp" -version = "1.0.4" -description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" -optional = true -python-versions = ">=3.8" -groups = ["main"] -markers = "extra == \"docs\"" -files = [ - {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, - {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, -] - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-devhelp" -version = "1.0.2" -description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." -optional = true -python-versions = ">=3.5" -groups = ["main"] -markers = "extra == \"docs\"" -files = [ - {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, - {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, -] - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-htmlhelp" -version = "2.0.1" -description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -optional = true -python-versions = ">=3.8" -groups = ["main"] -markers = "extra == \"docs\"" -files = [ - {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, - {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, -] - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["html5lib", "pytest"] - -[[package]] -name = "sphinxcontrib-jsmath" -version = "1.0.1" -description = "A sphinx extension which renders display math in HTML via JavaScript" -optional = true -python-versions = ">=3.5" -groups = ["main"] -markers = "extra == \"docs\"" -files = [ - {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, - {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, -] - -[package.extras] -test = ["flake8", "mypy", "pytest"] - -[[package]] -name = "sphinxcontrib-qthelp" -version = "1.0.3" -description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." -optional = true -python-versions = ">=3.5" -groups = ["main"] -markers = "extra == \"docs\"" -files = [ - {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, - {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, -] - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-serializinghtml" -version = "1.1.5" -description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." -optional = true -python-versions = ">=3.5" -groups = ["main"] -markers = "extra == \"docs\"" -files = [ - {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, - {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, -] - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "tatsu" -version = "5.6.1" -description = "TatSu takes a grammar in a variation of EBNF as input, and outputs a memoizing PEG/Packrat parser in Python." -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version < \"3.10\"" -files = [ - {file = "TatSu-5.6.1-py2.py3-none-any.whl", hash = "sha256:7cf03e15ab170fd91489b855cd8a4942b6cd1ac5e0eabcb852793a716600dbf0"}, - {file = "TatSu-5.6.1.zip", hash = "sha256:6a4f07aa7bfe9dfbee8015824feaf13f0b1a89577e2ee5a4a62c18630c309d4e"}, -] - -[package.extras] -future-regex = ["regex"] - -[[package]] -name = "tatsu" -version = "5.8.3" -description = "TatSu takes a grammar in a variation of EBNF as input, and outputs a memoizing PEG/Packrat parser in Python." -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version >= \"3.10\"" -files = [ - {file = "TatSu-5.8.3-py2.py3-none-any.whl", hash = "sha256:0a836692e67247cad9f251e083b045b13345cc715e69a7fbc16522beaa0f2163"}, - {file = "TatSu-5.8.3.zip", hash = "sha256:571ecbcdf33b7828c05e5cd95a8e8ad06af111c2c83a6a245be4d8f7c43de7bb"}, -] - -[package.extras] -future-regex = ["regex"] - -[[package]] -name = "urllib3" -version = "2.2.1" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "zipp" -version = "3.18.1" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = true -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version < \"3.10\" and extra == \"docs\"" -files = [ - {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, - {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy ; platform_python_implementation != \"PyPy\"", "pytest-ruff (>=0.2.1)"] - -[extras] -docs = ["sphinx", "sphinx-rtd-theme"] - -[metadata] -lock-version = "2.1" -python-versions = "^3.8" -content-hash = "39512ca5e49ff5549bf68392da28592478e048145152266581a774f25d2b0cc9" diff --git a/pyproject.toml b/pyproject.toml index 77f7b565..4f6bf5e5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,38 +1,38 @@ -[tool.poetry] +[project] name = "sdcclient" -version = "0.0.0" # Updated by poetry-dynamic-versioning +# version = "0.0.0" # Will be configured by the tool.hatch.version.source in this file. +dynamic = ["version"] # Will be configured by the tool.hatch.version.source in this file. description = "Python client for Sysdig Platform" -authors = ["Sysdig Inc. "] +authors = [{ name = "Sysdig Inc.", email = "info@sysdig.com" }] +requires-python = "~=3.10" license = "MIT" - -[tool.poetry.dependencies] -python = "^3.8" -requests = "^2.31" -pyaml = ">=20.4,<22.0" -requests-toolbelt = "^1.0.0" -urllib3 = "^2.2.1" -tatsu = [ - { version = "^4.4.0", python = "<3.8" }, - { version = ">=5.5.0,<5.7.0", python = ">=3.8, <3.10" }, - { version = ">=5.7.0,<5.9.0", python = ">=3.10" } +dependencies = [ + "requests~=2.31", + "pyaml>=20.4,<22.0", + "requests-toolbelt>=1.0.0,<2", + "urllib3>=2.2.1,<3", + "tatsu>=5.7.0,<5.9.0", ] -sphinx = {version = ">=3.3.1,<6.0.0", optional = true} -sphinx-rtd-theme = { version = ">=0.5,<1.1", optional = true } - -[tool.poetry.dev-dependencies] -mamba = "^0.11.1" -doublex = "^1.9.2" -doublex-expects = "^0.7.1" -expects = "^0.9.0" -flake8 = "^3.8.4" -coverage = "^6.2" -[tool.poetry.extras] -docs = ["sphinx", "sphinx-material", "sphinx-rtd-theme"] +[project.optional-dependencies] +docs = [ + "sphinx>=3.3.1,<6.0.0", + "sphinx-rtd-theme>=0.5,<1.1", +] -[tool.poetry-dynamic-versioning] -enable = true +[dependency-groups] +dev = [ + "mamba>=0.11.1,<0.12", + "doublex>=1.9.2,<2", + "doublex-expects>=0.7.1,<0.8", + "expects>=0.9.0,<0.10", + "flake8>=3.8.4,<4", + "coverage~=6.2", +] [build-system] -requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning"] -build-backend = "poetry.core.masonry.api" +requires = ["hatchling", "hatch-vcs"] +build-backend = "hatchling.build" + +[tool.hatch.version] +source = "vcs" # Will get the latest git tag to configure this project's version diff --git a/uv.lock b/uv.lock new file mode 100644 index 00000000..aca8354c --- /dev/null +++ b/uv.lock @@ -0,0 +1,605 @@ +version = 1 +revision = 2 +requires-python = ">=3.10, <4" + +[[package]] +name = "alabaster" +version = "0.7.16" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/3e/13dd8e5ed9094e734ac430b5d0eb4f2bb001708a8b7856cbf8e084e001ba/alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65", size = 23776, upload-time = "2024-01-10T00:56:10.189Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/34/d4e1c02d3bee589efb5dfa17f88ea08bdb3e3eac12bc475462aec52ed223/alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92", size = 13511, upload-time = "2024-01-10T00:56:08.388Z" }, +] + +[[package]] +name = "args" +version = "0.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e5/1c/b701b3f4bd8d3667df8342f311b3efaeab86078a840fb826bd204118cc6b/args-0.1.0.tar.gz", hash = "sha256:a785b8d837625e9b61c39108532d95b85274acd679693b71ebb5156848fcf814", size = 3048, upload-time = "2012-05-08T07:41:57.541Z" } + +[[package]] +name = "babel" +version = "2.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852, upload-time = "2025-02-01T15:17:41.026Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537, upload-time = "2025-02-01T15:17:37.39Z" }, +] + +[[package]] +name = "certifi" +version = "2025.4.26" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/9e/c05b3920a3b7d20d3d3310465f50348e5b3694f4f88c6daf736eef3024c4/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", size = 160705, upload-time = "2025-04-26T02:12:29.51Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4a/7e/3db2bd1b1f9e95f7cddca6d6e75e2f2bd9f51b1246e546d88addca0106bd/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3", size = 159618, upload-time = "2025-04-26T02:12:27.662Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/28/9901804da60055b406e1a1c5ba7aac1276fb77f1dde635aabfc7fd84b8ab/charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941", size = 201818, upload-time = "2025-05-02T08:31:46.725Z" }, + { url = "https://files.pythonhosted.org/packages/d9/9b/892a8c8af9110935e5adcbb06d9c6fe741b6bb02608c6513983048ba1a18/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd", size = 144649, upload-time = "2025-05-02T08:31:48.889Z" }, + { url = "https://files.pythonhosted.org/packages/7b/a5/4179abd063ff6414223575e008593861d62abfc22455b5d1a44995b7c101/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6", size = 155045, upload-time = "2025-05-02T08:31:50.757Z" }, + { url = "https://files.pythonhosted.org/packages/3b/95/bc08c7dfeddd26b4be8c8287b9bb055716f31077c8b0ea1cd09553794665/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d", size = 147356, upload-time = "2025-05-02T08:31:52.634Z" }, + { url = "https://files.pythonhosted.org/packages/a8/2d/7a5b635aa65284bf3eab7653e8b4151ab420ecbae918d3e359d1947b4d61/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86", size = 149471, upload-time = "2025-05-02T08:31:56.207Z" }, + { url = "https://files.pythonhosted.org/packages/ae/38/51fc6ac74251fd331a8cfdb7ec57beba8c23fd5493f1050f71c87ef77ed0/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c", size = 151317, upload-time = "2025-05-02T08:31:57.613Z" }, + { url = "https://files.pythonhosted.org/packages/b7/17/edee1e32215ee6e9e46c3e482645b46575a44a2d72c7dfd49e49f60ce6bf/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0", size = 146368, upload-time = "2025-05-02T08:31:59.468Z" }, + { url = "https://files.pythonhosted.org/packages/26/2c/ea3e66f2b5f21fd00b2825c94cafb8c326ea6240cd80a91eb09e4a285830/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef", size = 154491, upload-time = "2025-05-02T08:32:01.219Z" }, + { url = "https://files.pythonhosted.org/packages/52/47/7be7fa972422ad062e909fd62460d45c3ef4c141805b7078dbab15904ff7/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6", size = 157695, upload-time = "2025-05-02T08:32:03.045Z" }, + { url = "https://files.pythonhosted.org/packages/2f/42/9f02c194da282b2b340f28e5fb60762de1151387a36842a92b533685c61e/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366", size = 154849, upload-time = "2025-05-02T08:32:04.651Z" }, + { url = "https://files.pythonhosted.org/packages/67/44/89cacd6628f31fb0b63201a618049be4be2a7435a31b55b5eb1c3674547a/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db", size = 150091, upload-time = "2025-05-02T08:32:06.719Z" }, + { url = "https://files.pythonhosted.org/packages/1f/79/4b8da9f712bc079c0f16b6d67b099b0b8d808c2292c937f267d816ec5ecc/charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a", size = 98445, upload-time = "2025-05-02T08:32:08.66Z" }, + { url = "https://files.pythonhosted.org/packages/7d/d7/96970afb4fb66497a40761cdf7bd4f6fca0fc7bafde3a84f836c1f57a926/charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509", size = 105782, upload-time = "2025-05-02T08:32:10.46Z" }, + { url = "https://files.pythonhosted.org/packages/05/85/4c40d00dcc6284a1c1ad5de5e0996b06f39d8232f1031cd23c2f5c07ee86/charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", size = 198794, upload-time = "2025-05-02T08:32:11.945Z" }, + { url = "https://files.pythonhosted.org/packages/41/d9/7a6c0b9db952598e97e93cbdfcb91bacd89b9b88c7c983250a77c008703c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", size = 142846, upload-time = "2025-05-02T08:32:13.946Z" }, + { url = "https://files.pythonhosted.org/packages/66/82/a37989cda2ace7e37f36c1a8ed16c58cf48965a79c2142713244bf945c89/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", size = 153350, upload-time = "2025-05-02T08:32:15.873Z" }, + { url = "https://files.pythonhosted.org/packages/df/68/a576b31b694d07b53807269d05ec3f6f1093e9545e8607121995ba7a8313/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", size = 145657, upload-time = "2025-05-02T08:32:17.283Z" }, + { url = "https://files.pythonhosted.org/packages/92/9b/ad67f03d74554bed3aefd56fe836e1623a50780f7c998d00ca128924a499/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f", size = 147260, upload-time = "2025-05-02T08:32:18.807Z" }, + { url = "https://files.pythonhosted.org/packages/a6/e6/8aebae25e328160b20e31a7e9929b1578bbdc7f42e66f46595a432f8539e/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", size = 149164, upload-time = "2025-05-02T08:32:20.333Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f2/b3c2f07dbcc248805f10e67a0262c93308cfa149a4cd3d1fe01f593e5fd2/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", size = 144571, upload-time = "2025-05-02T08:32:21.86Z" }, + { url = "https://files.pythonhosted.org/packages/60/5b/c3f3a94bc345bc211622ea59b4bed9ae63c00920e2e8f11824aa5708e8b7/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", size = 151952, upload-time = "2025-05-02T08:32:23.434Z" }, + { url = "https://files.pythonhosted.org/packages/e2/4d/ff460c8b474122334c2fa394a3f99a04cf11c646da895f81402ae54f5c42/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", size = 155959, upload-time = "2025-05-02T08:32:24.993Z" }, + { url = "https://files.pythonhosted.org/packages/a2/2b/b964c6a2fda88611a1fe3d4c400d39c66a42d6c169c924818c848f922415/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", size = 153030, upload-time = "2025-05-02T08:32:26.435Z" }, + { url = "https://files.pythonhosted.org/packages/59/2e/d3b9811db26a5ebf444bc0fa4f4be5aa6d76fc6e1c0fd537b16c14e849b6/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", size = 148015, upload-time = "2025-05-02T08:32:28.376Z" }, + { url = "https://files.pythonhosted.org/packages/90/07/c5fd7c11eafd561bb51220d600a788f1c8d77c5eef37ee49454cc5c35575/charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", size = 98106, upload-time = "2025-05-02T08:32:30.281Z" }, + { url = "https://files.pythonhosted.org/packages/a8/05/5e33dbef7e2f773d672b6d79f10ec633d4a71cd96db6673625838a4fd532/charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", size = 105402, upload-time = "2025-05-02T08:32:32.191Z" }, + { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936, upload-time = "2025-05-02T08:32:33.712Z" }, + { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" }, + { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" }, + { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626, upload-time = "2025-05-02T08:32:38.803Z" }, + { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567, upload-time = "2025-05-02T08:32:40.251Z" }, + { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957, upload-time = "2025-05-02T08:32:41.705Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408, upload-time = "2025-05-02T08:32:43.709Z" }, + { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399, upload-time = "2025-05-02T08:32:46.197Z" }, + { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815, upload-time = "2025-05-02T08:32:48.105Z" }, + { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537, upload-time = "2025-05-02T08:32:49.719Z" }, + { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565, upload-time = "2025-05-02T08:32:51.404Z" }, + { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357, upload-time = "2025-05-02T08:32:53.079Z" }, + { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776, upload-time = "2025-05-02T08:32:54.573Z" }, + { url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622, upload-time = "2025-05-02T08:32:56.363Z" }, + { url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435, upload-time = "2025-05-02T08:32:58.551Z" }, + { url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653, upload-time = "2025-05-02T08:33:00.342Z" }, + { url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231, upload-time = "2025-05-02T08:33:02.081Z" }, + { url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243, upload-time = "2025-05-02T08:33:04.063Z" }, + { url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442, upload-time = "2025-05-02T08:33:06.418Z" }, + { url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147, upload-time = "2025-05-02T08:33:08.183Z" }, + { url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057, upload-time = "2025-05-02T08:33:09.986Z" }, + { url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454, upload-time = "2025-05-02T08:33:11.814Z" }, + { url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174, upload-time = "2025-05-02T08:33:13.707Z" }, + { url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166, upload-time = "2025-05-02T08:33:15.458Z" }, + { url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064, upload-time = "2025-05-02T08:33:17.06Z" }, + { url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641, upload-time = "2025-05-02T08:33:18.753Z" }, + { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" }, +] + +[[package]] +name = "clint" +version = "0.5.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "args" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/b4/41ecb1516f1ba728f39ee7062b9dac1352d39823f513bb6f9e8aeb86e26d/clint-0.5.1.tar.gz", hash = "sha256:05224c32b1075563d0b16d0015faaf9da43aa214e4a2140e51f08789e7a4c5aa", size = 29355, upload-time = "2015-08-25T16:11:19.237Z" } + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coverage" +version = "6.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5c/66/38d1870cb7cf62da49add1d6803fdbcdef632b2808b5c80bcac35b7634d8/coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84", size = 775224, upload-time = "2022-09-29T20:05:58.509Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c4/8d/5ec7d08f4601d2d792563fe31db5e9322c306848fec1e65ec8885927f739/coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53", size = 185264, upload-time = "2022-09-29T20:04:39.481Z" }, + { url = "https://files.pythonhosted.org/packages/89/a2/cbf599e50bb4be416e0408c4cf523c354c51d7da39935461a9687e039481/coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660", size = 185482, upload-time = "2022-09-29T20:04:41.703Z" }, + { url = "https://files.pythonhosted.org/packages/15/b0/3639d84ee8a900da0cf6450ab46e22517e4688b6cec0ba8ab6f8166103a2/coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4", size = 214083, upload-time = "2022-09-29T20:04:43.294Z" }, + { url = "https://files.pythonhosted.org/packages/13/f3/c6025ba30f2ce21d20d5332c3819880fe8afdfc008c2e2f9c075c7b67543/coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04", size = 212396, upload-time = "2022-09-29T20:04:44.809Z" }, + { url = "https://files.pythonhosted.org/packages/3c/7d/d5211ea782b193ab8064b06dc0cc042cf1a4ca9c93a530071459172c550f/coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0", size = 213270, upload-time = "2022-09-29T20:04:46.291Z" }, + { url = "https://files.pythonhosted.org/packages/10/9e/68e384940179713640743a010ac7f7c813d1087c8730a9c0bdfa73bdffd7/coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae", size = 219188, upload-time = "2022-09-29T20:04:47.728Z" }, + { url = "https://files.pythonhosted.org/packages/2f/8b/ca3fe3cfbd66d63181f6e6a06b8b494bb327ba8222d2fa628b392b9ad08a/coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466", size = 217430, upload-time = "2022-09-29T20:04:49.098Z" }, + { url = "https://files.pythonhosted.org/packages/c0/18/2a0a9b3c29376ce04ceb7ca2948559dad76409a2c9b3f664756581101e16/coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a", size = 218646, upload-time = "2022-09-29T20:04:50.582Z" }, + { url = "https://files.pythonhosted.org/packages/11/9e/7afba355bdabc550b3b2669e3432e71aec87d79400372d7686c09aab0acf/coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32", size = 187602, upload-time = "2022-09-29T20:04:52.509Z" }, + { url = "https://files.pythonhosted.org/packages/ae/a3/f45cb5d32de0751863945d22083c15eb8854bb53681b2e792f2066c629b9/coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e", size = 188510, upload-time = "2022-09-29T20:04:54.421Z" }, + { url = "https://files.pythonhosted.org/packages/50/cf/455930004231fa87efe8be06d13512f34e070ddfee8b8bf5a050cdc47ab3/coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795", size = 185433, upload-time = "2022-09-29T20:04:55.856Z" }, + { url = "https://files.pythonhosted.org/packages/36/f3/5cbd79cf4cd059c80b59104aca33b8d05af4ad5bf5b1547645ecee716378/coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75", size = 217736, upload-time = "2022-09-29T20:04:57.242Z" }, + { url = "https://files.pythonhosted.org/packages/89/58/5ec19b43a6511288511f64fc4763d95af8403f5926e7e4556e6b29b03a26/coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b", size = 215313, upload-time = "2022-09-29T20:04:58.661Z" }, + { url = "https://files.pythonhosted.org/packages/6a/63/8e82513b7e4a1b8d887b4e85c1c2b6c9b754a581b187c0b084f3330ac479/coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91", size = 217115, upload-time = "2022-09-29T20:05:00.194Z" }, + { url = "https://files.pythonhosted.org/packages/ac/bc/c9d4fd6b3494d2cc1e26f4b98eb19206b92a59094617ad02d5689ac9d3c4/coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4", size = 226072, upload-time = "2022-09-29T20:05:01.635Z" }, + { url = "https://files.pythonhosted.org/packages/78/98/253ce0cfcc3b352d3072940940ed44a035614f2abe781477f77038d21d9f/coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa", size = 224486, upload-time = "2022-09-29T20:05:03.158Z" }, + { url = "https://files.pythonhosted.org/packages/4b/66/6e588f5dfc93ccedd06d6785c8143f17bb92b89247d50128d8789e9588d0/coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b", size = 225537, upload-time = "2022-09-29T20:05:04.646Z" }, + { url = "https://files.pythonhosted.org/packages/ff/27/339089b558672f04e62d0cd2d49b9280270bad3bc95de24e7eb03deb4638/coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578", size = 187586, upload-time = "2022-09-29T20:05:06.22Z" }, + { url = "https://files.pythonhosted.org/packages/e6/24/7fe8ededb4060dd8c3f1d86cb624fcb3452f66fbef5051ed7fab126c5c0c/coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b", size = 188604, upload-time = "2022-09-29T20:05:09.007Z" }, +] + +[[package]] +name = "docutils" +version = "0.17.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/17/559b4d020f4b46e0287a2eddf2d8ebf76318fd3bd495f1625414b052fdc9/docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125", size = 2016138, upload-time = "2021-04-17T14:13:28.434Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4c/5e/6003a0d1f37725ec2ebd4046b657abb9372202655f96e76795dca8c0063c/docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61", size = 575533, upload-time = "2021-04-17T14:13:24.796Z" }, +] + +[[package]] +name = "doublex" +version = "1.9.6.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyhamcrest" }, + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fe/3d/0edaecbafa90c19530739467a5dd02c8ebf9968201ab1ff15537ea98422f/doublex-1.9.6.1.tar.gz", hash = "sha256:48fbc633598eb913a6eb0c4694f8c040ba57bae9653d45643a84243e0c9f3268", size = 15734, upload-time = "2023-02-14T10:53:38.989Z" } + +[[package]] +name = "doublex-expects" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "doublex" }, + { name = "expects" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ce/e8/677f10ca523396283343a478265fbeb84e12a84406f5e6ec8b3b7d448982/doublex-expects-0.7.1.tar.gz", hash = "sha256:8040682d97f0a66f632c5df982f78d09aee36b2c4a1eb275b0c596d115f200aa", size = 6636, upload-time = "2019-08-19T19:20:59.718Z" } + +[[package]] +name = "expects" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/86/9a/4944ecc222f24d18e8d2819800472ffc2668e52986afd5c7bc41ecaf897b/expects-0.9.0.tar.gz", hash = "sha256:419902ccafe81b7e9559eeb6b7a07ef9d5c5604eddb93000f0642b3b2d594f4c", size = 27901, upload-time = "2018-10-25T16:01:46.973Z" } + +[[package]] +name = "flake8" +version = "3.9.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mccabe" }, + { name = "pycodestyle" }, + { name = "pyflakes" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9e/47/15b267dfe7e03dca4c4c06e7eadbd55ef4dfd368b13a0bab36d708b14366/flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b", size = 164777, upload-time = "2021-05-08T19:52:34.369Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/80/35a0716e5d5101e643404dabd20f07f5528a21f3ef4032d31a49c913237b/flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907", size = 73147, upload-time = "2021-05-08T19:52:32.476Z" }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/84/62473fb57d61e31fef6e36d64a179c8781605429fd927b5dd608c997be31/imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a", size = 1280026, upload-time = "2022-07-01T12:21:05.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/62/85c4c919272577931d407be5ba5d71c20f0b616d31a0befe0ae45bb79abd/imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", size = 8769, upload-time = "2022-07-01T12:21:02.467Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + +[[package]] +name = "mamba" +version = "0.11.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "clint" }, + { name = "coverage" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4d/08/58d6330e23edbc2e7c0bb2b716917f4dafc9f6a508a1b297389954a10e4b/mamba-0.11.3.tar.gz", hash = "sha256:4dcf69e9a53e78d4aa5ec3dee0bb2c65f02ea68a6b62c4275653d7170b8f5fe2", size = 14156, upload-time = "2023-11-09T15:34:22.196Z" } + +[[package]] +name = "markupsafe" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357, upload-time = "2024-10-18T15:20:51.44Z" }, + { url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393, upload-time = "2024-10-18T15:20:52.426Z" }, + { url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732, upload-time = "2024-10-18T15:20:53.578Z" }, + { url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866, upload-time = "2024-10-18T15:20:55.06Z" }, + { url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964, upload-time = "2024-10-18T15:20:55.906Z" }, + { url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977, upload-time = "2024-10-18T15:20:57.189Z" }, + { url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366, upload-time = "2024-10-18T15:20:58.235Z" }, + { url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091, upload-time = "2024-10-18T15:20:59.235Z" }, + { url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065, upload-time = "2024-10-18T15:21:00.307Z" }, + { url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514, upload-time = "2024-10-18T15:21:01.122Z" }, + { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353, upload-time = "2024-10-18T15:21:02.187Z" }, + { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392, upload-time = "2024-10-18T15:21:02.941Z" }, + { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" }, + { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" }, + { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" }, + { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" }, + { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" }, + { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" }, + { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094, upload-time = "2024-10-18T15:21:11.005Z" }, + { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521, upload-time = "2024-10-18T15:21:12.911Z" }, + { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" }, + { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" }, + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, + { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" }, + { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" }, + { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" }, + { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" }, + { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" }, + { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" }, + { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" }, + { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" }, + { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" }, + { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" }, + { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" }, + { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" }, + { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" }, + { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" }, + { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" }, + { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" }, + { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" }, + { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" }, + { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" }, + { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" }, + { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" }, +] + +[[package]] +name = "mccabe" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/06/18/fa675aa501e11d6d6ca0ae73a101b2f3571a565e0f7d38e062eec18a91ee/mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f", size = 8612, upload-time = "2017-01-26T22:13:15.699Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/89/479dc97e18549e21354893e4ee4ef36db1d237534982482c3681ee6e7b57/mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", size = 8556, upload-time = "2017-01-26T22:13:14.36Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pyaml" +version = "21.10.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyyaml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b6/f0/dbb524509ce28f5cfd4e1d9e3ef955f51186cfd1b8297f6e158778c4a8ef/pyaml-21.10.1.tar.gz", hash = "sha256:c6519fee13bf06e3bb3f20cacdea8eba9140385a7c2546df5dbae4887f768383", size = 20466, upload-time = "2021-10-09T23:18:05.385Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/fd/78a3a11c7b9b11878ebbf4461a09cbc758bdfc1b45168972727f7334b09a/pyaml-21.10.1-py2.py3-none-any.whl", hash = "sha256:19985ed303c3a985de4cf8fd329b6d0a5a5b5c9035ea240eccc709ebacbaf4a0", size = 24857, upload-time = "2021-10-09T23:18:03.856Z" }, +] + +[[package]] +name = "pycodestyle" +version = "2.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/b3/c832123f2699892c715fcdfebb1a8fdeffa11bb7b2350e46ecdd76b45a20/pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef", size = 103640, upload-time = "2021-03-14T18:44:04.177Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/cc/227251b1471f129bc35e966bb0fceb005969023926d744139642d847b7ae/pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068", size = 41725, upload-time = "2021-03-14T18:44:02.097Z" }, +] + +[[package]] +name = "pyflakes" +version = "2.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a8/0f/0dc480da9162749bf629dca76570972dd9cce5bedc60196a3c912875c87d/pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db", size = 68567, upload-time = "2021-03-24T16:32:56.157Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6c/11/2a745612f1d3cbbd9c69ba14b1b43a35a2f5c3c81cd0124508c52c64307f/pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3", size = 68805, upload-time = "2021-03-24T16:32:54.562Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581, upload-time = "2025-01-06T17:26:30.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293, upload-time = "2025-01-06T17:26:25.553Z" }, +] + +[[package]] +name = "pyhamcrest" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/16/3f/f286caba4e64391a8dc9200e6de6ce0d07471e3f718248c3276843b7793b/pyhamcrest-2.1.0.tar.gz", hash = "sha256:c6acbec0923d0cb7e72c22af1926f3e7c97b8e8d69fc7498eabacaf7c975bd9c", size = 60538, upload-time = "2023-10-22T15:47:28.255Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/71/1b25d3797a24add00f6f8c1bb0ac03a38616e2ec6606f598c1d50b0b0ffb/pyhamcrest-2.1.0-py3-none-any.whl", hash = "sha256:f6913d2f392e30e0375b3ecbd7aee79e5d1faa25d345c8f4ff597665dcac2587", size = 54555, upload-time = "2023-10-22T15:47:25.08Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199, upload-time = "2024-08-06T20:31:40.178Z" }, + { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758, upload-time = "2024-08-06T20:31:42.173Z" }, + { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463, upload-time = "2024-08-06T20:31:44.263Z" }, + { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280, upload-time = "2024-08-06T20:31:50.199Z" }, + { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239, upload-time = "2024-08-06T20:31:52.292Z" }, + { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802, upload-time = "2024-08-06T20:31:53.836Z" }, + { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527, upload-time = "2024-08-06T20:31:55.565Z" }, + { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052, upload-time = "2024-08-06T20:31:56.914Z" }, + { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774, upload-time = "2024-08-06T20:31:58.304Z" }, + { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" }, + { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" }, + { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" }, + { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167, upload-time = "2024-08-06T20:32:08.338Z" }, + { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952, upload-time = "2024-08-06T20:32:14.124Z" }, + { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301, upload-time = "2024-08-06T20:32:16.17Z" }, + { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638, upload-time = "2024-08-06T20:32:18.555Z" }, + { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850, upload-time = "2024-08-06T20:32:19.889Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980, upload-time = "2024-08-06T20:32:21.273Z" }, + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, +] + +[[package]] +name = "requests" +version = "2.32.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218, upload-time = "2024-05-29T15:37:49.536Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928, upload-time = "2024-05-29T15:37:47.027Z" }, +] + +[[package]] +name = "requests-toolbelt" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f3/61/d7545dafb7ac2230c70d38d31cbfe4cc64f7144dc41f6e4e4b78ecd9f5bb/requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6", size = 206888, upload-time = "2023-05-01T04:11:33.229Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06", size = 54481, upload-time = "2023-05-01T04:11:28.427Z" }, +] + +[[package]] +name = "sdcclient" +source = { editable = "." } +dependencies = [ + { name = "pyaml" }, + { name = "requests" }, + { name = "requests-toolbelt" }, + { name = "tatsu" }, + { name = "urllib3" }, +] + +[package.optional-dependencies] +docs = [ + { name = "sphinx" }, + { name = "sphinx-rtd-theme" }, +] + +[package.dev-dependencies] +dev = [ + { name = "coverage" }, + { name = "doublex" }, + { name = "doublex-expects" }, + { name = "expects" }, + { name = "flake8" }, + { name = "mamba" }, +] + +[package.metadata] +requires-dist = [ + { name = "pyaml", specifier = ">=20.4,<22.0" }, + { name = "requests", specifier = "~=2.31" }, + { name = "requests-toolbelt", specifier = ">=1.0.0,<2" }, + { name = "sphinx", marker = "extra == 'docs'", specifier = ">=3.3.1,<6.0.0" }, + { name = "sphinx-rtd-theme", marker = "extra == 'docs'", specifier = ">=0.5,<1.1" }, + { name = "tatsu", specifier = ">=5.7.0,<5.9.0" }, + { name = "urllib3", specifier = ">=2.2.1,<3" }, +] +provides-extras = ["docs"] + +[package.metadata.requires-dev] +dev = [ + { name = "coverage", specifier = "~=6.2" }, + { name = "doublex", specifier = ">=1.9.2,<2" }, + { name = "doublex-expects", specifier = ">=0.7.1,<0.8" }, + { name = "expects", specifier = ">=0.9.0,<0.10" }, + { name = "flake8", specifier = ">=3.8.4,<4" }, + { name = "mamba", specifier = ">=0.11.1,<0.12" }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/44/7b/af302bebf22c749c56c9c3e8ae13190b5b5db37a33d9068652e8f73b7089/snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1", size = 86699, upload-time = "2021-11-16T18:38:38.009Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/dc/c02e01294f7265e63a7315fe086dd1df7dacb9f840a804da846b96d01b96/snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a", size = 93002, upload-time = "2021-11-16T18:38:34.792Z" }, +] + +[[package]] +name = "sphinx" +version = "5.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "alabaster" }, + { name = "babel" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "docutils" }, + { name = "imagesize" }, + { name = "jinja2" }, + { name = "packaging" }, + { name = "pygments" }, + { name = "requests" }, + { name = "snowballstemmer" }, + { name = "sphinxcontrib-applehelp" }, + { name = "sphinxcontrib-devhelp" }, + { name = "sphinxcontrib-htmlhelp" }, + { name = "sphinxcontrib-jsmath" }, + { name = "sphinxcontrib-qthelp" }, + { name = "sphinxcontrib-serializinghtml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/af/b2/02a43597980903483fe5eb081ee8e0ba2bb62ea43a70499484343795f3bf/Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5", size = 6811365, upload-time = "2022-10-16T09:58:25.963Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/67/a7/01dd6fd9653c056258d65032aa09a615b5d7b07dd840845a9f41a8860fbc/sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d", size = 3183160, upload-time = "2022-10-16T09:58:21.63Z" }, +] + +[[package]] +name = "sphinx-rtd-theme" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "sphinx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1c/32/580309c9fd5b1892c6616ce814710c6b14423e98bf1c101bf2c710433cee/sphinx_rtd_theme-1.0.0.tar.gz", hash = "sha256:eec6d497e4c2195fa0e8b2016b337532b8a699a68bcb22a512870e16925c6a5c", size = 2780623, upload-time = "2021-09-13T19:54:10.078Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/d2/3818e4730e314719e27f639c44164419e40eed826d63753dc480262036e8/sphinx_rtd_theme-1.0.0-py2.py3-none-any.whl", hash = "sha256:4d35a56f4508cfee4c4fb604373ede6feae2a306731d533f409ef5c3496fdbd8", size = 2815240, upload-time = "2021-09-13T19:53:37.37Z" }, +] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/6e/b837e84a1a704953c62ef8776d45c3e8d759876b4a84fe14eba2859106fe/sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1", size = 20053, upload-time = "2024-07-29T01:09:00.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/85/9ebeae2f76e9e77b952f4b274c27238156eae7979c5421fba91a28f4970d/sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5", size = 119300, upload-time = "2024-07-29T01:08:58.99Z" }, +] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/d2/5beee64d3e4e747f316bae86b55943f51e82bb86ecd325883ef65741e7da/sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad", size = 12967, upload-time = "2024-07-29T01:09:23.417Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/7a/987e583882f985fe4d7323774889ec58049171828b58c2217e7f79cdf44e/sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2", size = 82530, upload-time = "2024-07-29T01:09:21.945Z" }, +] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/93/983afd9aa001e5201eab16b5a444ed5b9b0a7a010541e0ddfbbfd0b2470c/sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9", size = 22617, upload-time = "2024-07-29T01:09:37.889Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/7b/18a8c0bcec9182c05a0b3ec2a776bba4ead82750a55ff798e8d406dae604/sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8", size = 98705, upload-time = "2024-07-29T01:09:36.407Z" }, +] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/e8/9ed3830aeed71f17c026a07a5097edcf44b692850ef215b161b8ad875729/sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8", size = 5787, upload-time = "2019-01-21T16:10:16.347Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/42/4c8646762ee83602e3fb3fbe774c2fac12f317deb0b5dbeeedd2d3ba4b77/sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", size = 5071, upload-time = "2019-01-21T16:10:14.333Z" }, +] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/68/bc/9104308fc285eb3e0b31b67688235db556cd5b0ef31d96f30e45f2e51cae/sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab", size = 17165, upload-time = "2024-07-29T01:09:56.435Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/83/859ecdd180cacc13b1f7e857abf8582a64552ea7a061057a6c716e790fce/sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb", size = 88743, upload-time = "2024-07-29T01:09:54.885Z" }, +] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/44/6716b257b0aa6bfd51a1b31665d1c205fb12cb5ad56de752dfa15657de2f/sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d", size = 16080, upload-time = "2024-07-29T01:10:09.332Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/a7/d2782e4e3f77c8450f727ba74a8f12756d5ba823d81b941f1b04da9d033a/sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331", size = 92072, upload-time = "2024-07-29T01:10:08.203Z" }, +] + +[[package]] +name = "tatsu" +version = "5.8.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dd/e6/f46e17ed341ff7584b23a63508c781e6e5e984a9e6eb0b66b273991a02eb/TatSu-5.8.3.zip", hash = "sha256:571ecbcdf33b7828c05e5cd95a8e8ad06af111c2c83a6a245be4d8f7c43de7bb", size = 205187, upload-time = "2022-07-29T21:44:19.461Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/67/413a03b1048f9f237e3f242e6e829688d8c9cf1fbe6bd8bb5f574ae67ac9/TatSu-5.8.3-py2.py3-none-any.whl", hash = "sha256:0a836692e67247cad9f251e083b045b13345cc715e69a7fbc16522beaa0f2163", size = 101486, upload-time = "2022-07-29T21:44:15.033Z" }, +] + +[[package]] +name = "urllib3" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8a/78/16493d9c386d8e60e442a35feac5e00f0913c0f4b7c217c11e8ec2ff53e0/urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", size = 390672, upload-time = "2025-04-10T15:23:39.232Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813", size = 128680, upload-time = "2025-04-10T15:23:37.377Z" }, +] From de198849a12b37fffa5f95e7fb8ea57776769199 Mon Sep 17 00:00:00 2001 From: Fede Barcelona Date: Fri, 2 May 2025 12:00:16 +0200 Subject: [PATCH 03/13] lint: add ruff linter and fix linter issues --- .flake8 | 6 ------ examples/get_agents_config.py | 2 +- flake.nix | 1 + sdcclient/_monitor.py | 1 + .../monitor/dashboard_converters/_dashboard_versions.py | 4 ++-- specs/monitor/captures_v1_spec.py | 2 +- utils/sync_pagerduty_policies.py | 2 +- 7 files changed, 7 insertions(+), 11 deletions(-) delete mode 100644 .flake8 diff --git a/.flake8 b/.flake8 deleted file mode 100644 index a46d303e..00000000 --- a/.flake8 +++ /dev/null @@ -1,6 +0,0 @@ -[flake8] -ignore = E501, F821, W504, W605, E303, E126, E131, E241 -show-source = True -count = True -statistics = True -max-line-length=127 \ No newline at end of file diff --git a/examples/get_agents_config.py b/examples/get_agents_config.py index f13785b0..f301342b 100755 --- a/examples/get_agents_config.py +++ b/examples/get_agents_config.py @@ -34,7 +34,7 @@ # Return the result # if ok: - if not ("files" in res) or len(res["files"]) == 0: + if "files" not in res or len(res["files"]) == 0: print("No current auto configuration") else: print("Current contents of config file:") diff --git a/flake.nix b/flake.nix index 67937559..4ea96628 100644 --- a/flake.nix +++ b/flake.nix @@ -21,6 +21,7 @@ packages = [ python310 # Minimum supported python version in this project uv + ruff ]; }; diff --git a/sdcclient/_monitor.py b/sdcclient/_monitor.py index e3cf6429..d4e1ddc7 100644 --- a/sdcclient/_monitor.py +++ b/sdcclient/_monitor.py @@ -1,5 +1,6 @@ import json from typing import Any, Tuple, Union +import re from sdcclient._common import _SdcCommon from sdcclient.monitor import EventsClientV2, DashboardsClientV3 diff --git a/sdcclient/monitor/dashboard_converters/_dashboard_versions.py b/sdcclient/monitor/dashboard_converters/_dashboard_versions.py index 90f5b428..370479a9 100644 --- a/sdcclient/monitor/dashboard_converters/_dashboard_versions.py +++ b/sdcclient/monitor/dashboard_converters/_dashboard_versions.py @@ -157,12 +157,12 @@ def convert_property_name(prop_name, old_metric, new_metric): sorted_metrics = [] timestamp_key = [m for m in migrated_metrics if m['id'] == 'timestamp' and - not ('timeAggregation' in m) or + 'timeAggregation' not in m or not (m['timeAggregation'] is not None) ] no_timestamp_keys = [m for m in migrated_metrics if m['id'] != 'timestamp' and - not ('timeAggregation' in m) or + 'timeAggregation' not in m or not (m['timeAggregation'] is not None) ] values = [m for m in migrated_metrics diff --git a/specs/monitor/captures_v1_spec.py b/specs/monitor/captures_v1_spec.py index 91ce42d1..c3c05cac 100644 --- a/specs/monitor/captures_v1_spec.py +++ b/specs/monitor/captures_v1_spec.py @@ -7,7 +7,7 @@ from expects import expect, have_key, contain from expects.matchers import _Or from expects.matchers.built_in import have_keys, equal -from mamba import description, it, before, after +from mamba import description, it, before, after, _it from sdcclient import SdMonitorClient from specs import be_successful_api_call diff --git a/utils/sync_pagerduty_policies.py b/utils/sync_pagerduty_policies.py index eb720fd1..c0dc46d2 100644 --- a/utils/sync_pagerduty_policies.py +++ b/utils/sync_pagerduty_policies.py @@ -173,7 +173,7 @@ def get_integration_map(acc, channel): # delete all PagerDuty notification channels in Sysdig that do NOT have an integration in PagerDuty # for channel in pager_duty_channels: - if not channel['options']['serviceKey'] in service_integration_keys: + if channel['options']['serviceKey'] not in service_integration_keys: actions.append({ 'info': 'Remove notification channel "{}" not connected to any integration'.format(channel['name']), 'fn': actions_factory.delete_notification_channel(channel) From e56ca3c9ddf2c51ce51ef1b421bc8c273523aa05 Mon Sep 17 00:00:00 2001 From: Fede Barcelona Date: Fri, 2 May 2025 12:00:47 +0200 Subject: [PATCH 04/13] style: format all py code with ruff --- docs/conf.py | 67 +- examples/add_notification_email.py | 6 +- examples/add_policy.py | 8 +- examples/add_policy_v1.py | 8 +- examples/add_users_to_secure.py | 24 +- examples/create_access_keys.py | 28 +- examples/create_alert.py | 32 +- examples/create_dashboard.py | 18 +- examples/create_default_policies.py | 6 +- examples/create_default_policies_v1.py | 6 +- examples/create_sysdig_capture.py | 29 +- examples/dashboard.py | 83 +- examples/dashboard_backup_v1_restore_v2.py | 30 +- examples/dashboard_basic_crud.py | 14 +- examples/dashboard_ibm_cloud.py | 37 +- examples/dashboard_save_load.py | 16 +- examples/dashboard_scope.py | 14 +- examples/delete_alert.py | 10 +- examples/delete_all_policies.py | 10 +- examples/delete_all_policies_v1.py | 12 +- examples/delete_dashboard.py | 12 +- examples/delete_event.py | 10 +- examples/delete_policy.py | 10 +- examples/delete_policy_v1.py | 10 +- examples/download_dashboards.py | 25 +- examples/flip_alerts_enabled.py | 24 +- examples/get_agents_config.py | 6 +- examples/get_anchore_users_account.py | 6 +- examples/get_dashboard.py | 45 +- examples/get_data_promql_advanced.py | 11 +- examples/get_data_promql_instant_advanced.py | 15 +- examples/get_data_promql_instant_simple.py | 15 +- examples/get_data_promql_simple.py | 11 +- examples/get_image_info_by_id.py | 6 +- examples/get_image_scan_result_by_id.py | 8 +- examples/get_label_values.py | 4 +- examples/get_labels.py | 8 +- examples/get_latest_pdf_report_by_digest.py | 8 +- examples/get_metadata.py | 36 +- examples/get_pdf_report.py | 8 +- examples/get_policy.py | 6 +- examples/get_policy_v1.py | 6 +- .../get_secure_default_falco_rules_files.py | 10 +- examples/get_secure_policy_events.py | 18 +- examples/get_secure_policy_events_old.py | 27 +- examples/get_secure_system_falco_rules.py | 6 +- examples/get_secure_user_falco_rules.py | 6 +- examples/get_series.py | 28 +- examples/list_access_keys.py | 14 +- examples/list_admins.py | 24 +- examples/list_alert_notifications.py | 44 +- examples/list_alerts.py | 8 +- examples/list_dashboards.py | 13 +- examples/list_events.py | 24 +- examples/list_notification_channels.py | 6 +- examples/list_policies.py | 6 +- examples/list_policies_v1.py | 16 +- examples/list_profiles.py | 4 +- examples/list_sysdig_captures.py | 20 +- examples/list_users.py | 12 +- examples/notification_channels.py | 13 +- examples/post_event.py | 33 +- examples/post_event_simple.py | 6 +- examples/print_data_retention_info.py | 6 +- examples/print_explore_grouping.py | 4 +- examples/print_user_info.py | 10 +- examples/resolve_alert_notifications.py | 13 +- examples/restore_alerts.py | 46 +- examples/restore_dashboards.py | 21 +- examples/set_agents_config.py | 8 +- examples/set_explore_group_configuration.py | 6 +- examples/set_policy_order_v1.py | 10 +- .../set_secure_default_falco_rules_files.py | 52 +- examples/set_secure_system_falco_rules.py | 8 +- examples/set_secure_user_falco_rules.py | 8 +- examples/update_access_keys.py | 33 +- examples/update_alert.py | 32 +- examples/update_policy.py | 8 +- examples/update_policy_v1.py | 8 +- examples/user_team_mgmt.py | 58 +- examples/user_team_mgmt_extended.py | 282 ++++-- sdcclient/__init__.py | 13 +- sdcclient/_common.py | 801 +++++++++++------- sdcclient/_monitor.py | 295 ++++--- sdcclient/_monitor_v1.py | 273 +++--- sdcclient/_scanning.py | 736 +++++++++------- sdcclient/_secure.py | 442 ++++++---- sdcclient/_secure_v1.py | 105 ++- sdcclient/ibm_auth_helper.py | 35 +- sdcclient/monitor/__init__.py | 7 +- sdcclient/monitor/_dashboards_v2.py | 468 +++++----- sdcclient/monitor/_dashboards_v3.py | 368 ++++---- sdcclient/monitor/_events_v1.py | 63 +- sdcclient/monitor/_events_v2.py | 113 ++- .../dashboard_converters/_dashboard_scope.py | 29 +- .../_dashboard_versions.py | 217 ++--- sdcclient/secure/__init__.py | 25 +- sdcclient/secure/_activity_audit_v1.py | 60 +- sdcclient/secure/_falco_rules_files_old.py | 183 ++-- sdcclient/secure/_policy_events_old.py | 202 +++-- sdcclient/secure/_policy_events_v1.py | 73 +- sdcclient/secure/_policy_v2.py | 92 +- sdcclient/secure/scanning/_alerts.py | 234 +++-- specs/_common/agent_spec.py | 46 +- specs/_common/team_spec.py | 22 +- specs/_common/user_provisioning_spec.py | 10 +- specs/monitor/alerts_v1_spec.py | 18 +- specs/monitor/captures_v1_spec.py | 50 +- .../dashboard_scope_spec.py | 381 ++++++--- specs/monitor/dashboards_v2_spec.py | 96 ++- specs/monitor/dashboards_v3_spec.py | 184 ++-- specs/monitor/events_v1_spec.py | 18 +- specs/monitor/events_v2_spec.py | 82 +- specs/secure/activitylog_v1_spec.py | 27 +- specs/secure/custom_rules_spec.py | 28 +- specs/secure/policy_events_v1_spec.py | 63 +- specs/secure/policy_v1_spec.py | 36 +- specs/secure/policy_v2_spec.py | 39 +- specs/secure/scanning/alerts_spec.py | 384 ++++++--- specs/secure/scanning/list_image_tags_spec.py | 21 +- specs/secure/scanning/list_images_spec.py | 25 +- .../secure/scanning/policy_evaluation_spec.py | 52 +- .../scanning/query_image_content_spec.py | 33 +- .../scanning/scanning_cve_report_spec.py | 54 +- .../scanning_vulnerability_exceptions_spec.py | 231 +++-- .../scanning/scanning_vulnerability_spec.py | 17 +- utils/sync_pagerduty_policies.py | 559 ++++++------ 127 files changed, 5637 insertions(+), 3372 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 93f829e9..15999787 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -20,7 +20,7 @@ import sys from datetime import datetime -sys.path.insert(0, os.path.abspath('..')) +sys.path.insert(0, os.path.abspath("..")) # -- General configuration ------------------------------------------------ @@ -31,35 +31,43 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.autosectionlabel', 'sphinx.ext.graphviz', - 'sphinx.ext.inheritance_diagram', 'sphinx.ext.viewcode', 'sphinx.ext.autosummary', - 'sphinx.ext.githubpages', 'sphinx.ext.napoleon', 'sphinx_rtd_theme'] +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosectionlabel", + "sphinx.ext.graphviz", + "sphinx.ext.inheritance_diagram", + "sphinx.ext.viewcode", + "sphinx.ext.autosummary", + "sphinx.ext.githubpages", + "sphinx.ext.napoleon", + "sphinx_rtd_theme", +] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] -source_suffix = '.rst' +source_suffix = ".rst" # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'Sysdig SDK for Python' -copyright = f'2016-{datetime.now().year}, Sysdig Inc.' -author = u'Sysdig Inc.' +project = "Sysdig SDK for Python" +copyright = f"2016-{datetime.now().year}, Sysdig Inc." +author = "Sysdig Inc." # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. -version = u'' +version = "" # The full version, including alpha/beta/rc tags. -release = u'' +release = "" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -71,10 +79,10 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False @@ -84,7 +92,7 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = 'sphinx_rtd_theme' +html_theme = "sphinx_rtd_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -100,7 +108,7 @@ # -- Options for HTMLHelp output ------------------------------------------ # Output file base name for HTML help builder. -htmlhelp_basename = 'python-sdc-clientdoc' +htmlhelp_basename = "python-sdc-clientdoc" # -- Options for LaTeX output --------------------------------------------- @@ -108,15 +116,12 @@ # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. # # 'preamble': '', - # Latex figure (float) alignment # # 'figure_align': 'htbp', @@ -126,8 +131,13 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'python-sdc-client.tex', u'python-sdc-client Documentation', - u'Sysdig Inc.', 'manual'), + ( + master_doc, + "python-sdc-client.tex", + "python-sdc-client Documentation", + "Sysdig Inc.", + "manual", + ), ] # -- Options for manual page output --------------------------------------- @@ -135,8 +145,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (master_doc, 'python-sdc-client', u'python-sdc-client Documentation', - [author], 1) + (master_doc, "python-sdc-client", "python-sdc-client Documentation", [author], 1) ] # -- Options for Texinfo output ------------------------------------------- @@ -145,9 +154,15 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'python-sdc-client', u'python-sdc-client Documentation', - author, 'python-sdc-client', 'One line description of project.', - 'Miscellaneous'), + ( + master_doc, + "python-sdc-client", + "python-sdc-client Documentation", + author, + "python-sdc-client", + "One line description of project.", + "Miscellaneous", + ), ] # Napoleon settings diff --git a/examples/add_notification_email.py b/examples/add_notification_email.py index c81e5861..10d13914 100755 --- a/examples/add_notification_email.py +++ b/examples/add_notification_email.py @@ -11,8 +11,8 @@ # Parse arguments # if len(sys.argv) != 3: - print(('usage: %s email' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s email" % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -32,7 +32,7 @@ # Return the result # if ok: - print('Recipient added successfully') + print("Recipient added successfully") else: print(res) sys.exit(1) diff --git a/examples/add_policy.py b/examples/add_policy.py index 7bbf2db7..f5e2caa0 100755 --- a/examples/add_policy.py +++ b/examples/add_policy.py @@ -10,9 +10,9 @@ def usage(): - print(('usage: %s ' % sys.argv[0])) - print('Reads policy json from standard input') - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("Reads policy json from standard input") + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) @@ -29,7 +29,7 @@ def usage(): # # Instantiate the SDC client # -sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com') +sdclient = SdSecureClient(sdc_token, "https://secure.sysdig.com") ok, res = sdclient.add_policy_json(policy_json) diff --git a/examples/add_policy_v1.py b/examples/add_policy_v1.py index f9a63098..cac9a58b 100755 --- a/examples/add_policy_v1.py +++ b/examples/add_policy_v1.py @@ -10,9 +10,9 @@ def usage(): - print(('usage: %s ' % sys.argv[0])) - print('Reads policy json from standard input') - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("Reads policy json from standard input") + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) @@ -29,7 +29,7 @@ def usage(): # # Instantiate the SDC client # -sdclient = SdSecureClientV1(sdc_token, 'https://secure.sysdig.com') +sdclient = SdSecureClientV1(sdc_token, "https://secure.sysdig.com") ok, res = sdclient.add_policy(policy_json) diff --git a/examples/add_users_to_secure.py b/examples/add_users_to_secure.py index 59f4f066..895c17de 100755 --- a/examples/add_users_to_secure.py +++ b/examples/add_users_to_secure.py @@ -20,13 +20,13 @@ # Parse arguments # if len(sys.argv) != 2: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] -SECURE_TEAM_NAME = 'Secure Operations' +SECURE_TEAM_NAME = "Secure Operations" # # As of when this script was written, the Secure Operations team does @@ -34,24 +34,24 @@ # Rather, all members of the Secure team have full visibility within # Secure, which is associated with ROLE_TEAM_EDIT. # -SECURE_TEAM_ROLE = 'ROLE_TEAM_EDIT' +SECURE_TEAM_ROLE = "ROLE_TEAM_EDIT" # # Instantiate the SDC client # -sdclient = SdcClient(sdc_token, sdc_url='https://app.sysdigcloud.com') +sdclient = SdcClient(sdc_token, sdc_url="https://app.sysdigcloud.com") ok, res = sdclient.list_memberships(SECURE_TEAM_NAME) if not ok: - print(('Unable to get memberships for ' + SECURE_TEAM_NAME + ' team: ', res)) + print(("Unable to get memberships for " + SECURE_TEAM_NAME + " team: ", res)) sys.exit(1) memberships = res ok, res = sdclient.get_users() if not ok: - print(('Unable to get users: ', res)) + print(("Unable to get users: ", res)) sys.exit(1) all_users = res @@ -60,15 +60,15 @@ # rather than ID, so convert the IDs. # for user in all_users: - if user['username'] in memberships: - print(('Will preserve existing membership for: ' + user['username'])) + if user["username"] in memberships: + print(("Will preserve existing membership for: " + user["username"])) else: - print(('Will add new member: ' + user['username'])) - memberships[user['username']] = SECURE_TEAM_ROLE + print(("Will add new member: " + user["username"])) + memberships[user["username"]] = SECURE_TEAM_ROLE ok, res = sdclient.save_memberships(SECURE_TEAM_NAME, memberships=memberships) if not ok: - print(('Could not edit team:', res, '. Exiting.')) + print(("Could not edit team:", res, ". Exiting.")) sys.exit(1) else: print(('Finished syncing memberships of "' + SECURE_TEAM_NAME + '" team')) diff --git a/examples/create_access_keys.py b/examples/create_access_keys.py index a683e92d..4d396f09 100755 --- a/examples/create_access_keys.py +++ b/examples/create_access_keys.py @@ -12,36 +12,40 @@ # Parse arguments # if len(sys.argv) != 2: - print('usage: %s ' % sys.argv[0]) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') - print('For this script to work, the user for the token must have Admin rights') + print("usage: %s " % sys.argv[0]) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") + print("For this script to work, the user for the token must have Admin rights") sys.exit(1) sdc_token = sys.argv[1] # Maximum number of agents allowed to connect for this access key. Set to '' if not required -agent_limit = '' +agent_limit = "" # Number of agent licenses that are ALWAYS available to this access key. This directly counts against the maximum number of available licenses. Set to '' if not required. -agent_reserved = '' +agent_reserved = "" # Team ID to which to assign the access key. Team ID must be valid. Set to '' if not required. -team_id = '' +team_id = "" # # Instantiate the SDC client # -sdclient = SdcClient(sdc_token, 'https://app.sysdigcloud.com') +sdclient = SdcClient(sdc_token, "https://app.sysdigcloud.com") # # Get the configuration # -ok, res = sdclient.create_access_key( - agent_limit, - agent_reserved, - team_id) +ok, res = sdclient.create_access_key(agent_limit, agent_reserved, team_id) if ok: - print('Access Key: {}\nTeam ID: {}\nAgent Limit: {}\nAgent Reserved: {}\n==========='.format(res['customerAccessKey']['accessKey'], res['customerAccessKey']['teamId'], res['customerAccessKey']['limit'], res['customerAccessKey']['reservation'])) + print( + "Access Key: {}\nTeam ID: {}\nAgent Limit: {}\nAgent Reserved: {}\n===========".format( + res["customerAccessKey"]["accessKey"], + res["customerAccessKey"]["teamId"], + res["customerAccessKey"]["limit"], + res["customerAccessKey"]["reservation"], + ) + ) else: print(res) sys.exit(1) diff --git a/examples/create_alert.py b/examples/create_alert.py index 04aeb62a..f68c2c5e 100755 --- a/examples/create_alert.py +++ b/examples/create_alert.py @@ -16,9 +16,9 @@ # Parse arguments # def usage(): - print(('usage: %s [-a|--alert ] ' % sys.argv[0])) - print('-a|--alert: Set name of alert to create') - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s [-a|--alert ] " % sys.argv[0])) + print("-a|--alert: Set name of alert to create") + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) @@ -45,10 +45,14 @@ def usage(): # # Find notification channels (you need IDs to create an alert). # -notify_channels = [{'type': 'SLACK', 'channel': '#python-sdc-test-alert'}, - {'type': 'EMAIL', 'emailRecipients': ['python-sdc-testing@draios.com', 'test@sysdig.com']}, - {'type': 'SNS', 'snsTopicARNs': ['arn:aws:sns:us-east-1:273107874544:alarms-stg']} - ] +notify_channels = [ + {"type": "SLACK", "channel": "#python-sdc-test-alert"}, + { + "type": "EMAIL", + "emailRecipients": ["python-sdc-testing@draios.com", "test@sysdig.com"], + }, + {"type": "SNS", "snsTopicARNs": ["arn:aws:sns:us-east-1:273107874544:alarms-stg"]}, +] ok, res = sdclient.get_notification_ids(notify_channels) if not ok: @@ -62,17 +66,21 @@ def usage(): # ok, res = sdclient.create_alert( alert_name, # Alert name. - 'this alert was automatically created using the python Sysdig Cloud library', # Alert description. + "this alert was automatically created using the python Sysdig Cloud library", # Alert description. 6, # Syslog-encoded severity. 6 means 'info'. 60, # The alert will fire if the condition is met for at least 60 seconds. - 'avg(cpu.used.percent) > 80', # The condition. - ['host.mac', 'proc.name'], # Segmentation. We want to check this metric for every process on every machine. - 'ANY', + "avg(cpu.used.percent) > 80", # The condition. + [ + "host.mac", + "proc.name", + ], # Segmentation. We want to check this metric for every process on every machine. + "ANY", # in case there is more than one tomcat process, this alert will fire when a single one of them crosses the 80% threshold. 'proc.name = "tomcat"', # Filter. We want to receive a notification only if the name of the process meeting the condition is 'tomcat'. notification_channel_ids, - False) # This alert will be disabled when it's created. + False, +) # This alert will be disabled when it's created. # # Validate a print the results. diff --git a/examples/create_dashboard.py b/examples/create_dashboard.py index 9bee7b97..17bd88fe 100755 --- a/examples/create_dashboard.py +++ b/examples/create_dashboard.py @@ -16,9 +16,9 @@ # Parse arguments # def usage(): - print(('usage: %s [-d|--dashboard ] ' % sys.argv[0])) - print('-d|--dashboard: Set name of dashboard to create') - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s [-d|--dashboard ] " % sys.argv[0])) + print("-d|--dashboard: Set name of dashboard to create") + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) @@ -55,13 +55,13 @@ def usage(): # You can also refer to AWS tags by using "cloudProvider.tag.*" metadata or # agent tags by using "agent.tag.*" metadata dashboardFilter = 'proc.name = "cassandra"' -print('Creating dashboard from view') +print("Creating dashboard from view") ok, res = sdclient.create_dashboard_from_view(dashboardName, viewName, dashboardFilter) # # Check the result # if ok: - print('Dashboard created successfully') + print("Dashboard created successfully") else: print(res) sys.exit(1) @@ -76,14 +76,16 @@ def usage(): # Filter to apply to the new dashboard. Same as above. dashboardFilter = 'proc.name != "cassandra"' -print('Creating dashboard from dashboard') -ok, res = sdclient.create_dashboard_from_dashboard(dashboardCopy, dashboardName, dashboardFilter) +print("Creating dashboard from dashboard") +ok, res = sdclient.create_dashboard_from_dashboard( + dashboardCopy, dashboardName, dashboardFilter +) # # Check the result # if ok: - print('Dashboard copied successfully') + print("Dashboard copied successfully") else: print(res) sys.exit(1) diff --git a/examples/create_default_policies.py b/examples/create_default_policies.py index d7c904fd..990c360c 100755 --- a/examples/create_default_policies.py +++ b/examples/create_default_policies.py @@ -13,8 +13,8 @@ def usage(): - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) @@ -29,7 +29,7 @@ def usage(): # # Instantiate the SDC client # -sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com') +sdclient = SdSecureClient(sdc_token, "https://secure.sysdig.com") ok, res = sdclient.create_default_policies() diff --git a/examples/create_default_policies_v1.py b/examples/create_default_policies_v1.py index 620ab063..97111e66 100755 --- a/examples/create_default_policies_v1.py +++ b/examples/create_default_policies_v1.py @@ -13,8 +13,8 @@ def usage(): - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) @@ -29,7 +29,7 @@ def usage(): # # Instantiate the SDC client # -sdclient = SdSecureClientV1(sdc_token, 'https://secure.sysdig.com') +sdclient = SdSecureClientV1(sdc_token, "https://secure.sysdig.com") ok, res = sdclient.create_default_policies() diff --git a/examples/create_sysdig_capture.py b/examples/create_sysdig_capture.py index bbf5d31f..f5f86332 100755 --- a/examples/create_sysdig_capture.py +++ b/examples/create_sysdig_capture.py @@ -12,15 +12,20 @@ # Parse arguments # if len(sys.argv) not in (5, 6): - print(('usage: %s hostname capture_name duration [filter]' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print( + ( + "usage: %s hostname capture_name duration [filter]" + % sys.argv[0] + ) + ) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] hostname = sys.argv[2] capture_name = sys.argv[3] duration = sys.argv[4] -capture_filter = '' +capture_filter = "" if len(sys.argv) == 6: capture_filter = sys.argv[5] @@ -30,13 +35,15 @@ # sdclient = SdcClient(sdc_token) -ok, res = sdclient.create_sysdig_capture(hostname, capture_name, int(duration), capture_filter) +ok, res = sdclient.create_sysdig_capture( + hostname, capture_name, int(duration), capture_filter +) # # Show the list of metrics # if ok: - capture = res['dump'] + capture = res["dump"] else: print(res) sys.exit(1) @@ -44,18 +51,18 @@ while True: ok, res = sdclient.poll_sysdig_capture(capture) if ok: - capture = res['dump'] + capture = res["dump"] else: print(res) sys.exit(1) - print(('Capture is in state ' + capture['status'])) - if capture['status'] in ('requested', 'capturing', 'uploading'): + print(("Capture is in state " + capture["status"])) + if capture["status"] in ("requested", "capturing", "uploading"): pass - elif capture['status'] in ('error', 'uploadingError'): + elif capture["status"] in ("error", "uploadingError"): sys.exit(1) - elif capture['status'] in ('done', 'uploaded'): - print(('Download at: ' + sdclient.url + capture['downloadURL'])) + elif capture["status"] in ("done", "uploaded"): + print(("Download at: " + sdclient.url + capture["downloadURL"])) sys.exit(0) time.sleep(1) diff --git a/examples/dashboard.py b/examples/dashboard.py index 2441a4e9..3e381f4c 100755 --- a/examples/dashboard.py +++ b/examples/dashboard.py @@ -14,9 +14,9 @@ # Parse arguments # def usage(): - print(('usage: %s [-d|--dashboard ] ' % sys.argv[0])) - print('-d|--dashboard: Set name of dashboard to create') - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s [-d|--dashboard ] " % sys.argv[0])) + print("-d|--dashboard: Set name of dashboard to create") + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) @@ -48,8 +48,8 @@ def usage(): # Check the result if ok: - print(('Dashboard %d created successfully' % res['dashboard']['id'])) - dashboard_configuration = res['dashboard'] + print(("Dashboard %d created successfully" % res["dashboard"]["id"])) + dashboard_configuration = res["dashboard"] else: print(res) sys.exit(1) @@ -61,8 +61,8 @@ def usage(): # Check the result if ok and len(res) > 0: - print('Dashboard found') - dashboard_configuration = res[0]['dashboard'] + print("Dashboard found") + dashboard_configuration = res[0]["dashboard"] else: print(res) sys.exit(1) @@ -70,19 +70,21 @@ def usage(): # # Add a time series # -panel_name = 'CPU Over Time' -panel_type = 'timeSeries' +panel_name = "CPU Over Time" +panel_type = "timeSeries" metrics = [ - {'id': 'proc.name'}, - {'id': 'cpu.used.percent', 'aggregations': {'time': 'avg', 'group': 'avg'}} + {"id": "proc.name"}, + {"id": "cpu.used.percent", "aggregations": {"time": "avg", "group": "avg"}}, ] scope = 'proc.name = "cassandra"' -ok, res = sdclient.add_dashboard_panel(dashboard_configuration, panel_name, panel_type, metrics, scope=scope) +ok, res = sdclient.add_dashboard_panel( + dashboard_configuration, panel_name, panel_type, metrics, scope=scope +) # Check the result if ok: - print('Panel added successfully') - dashboard_configuration = res['dashboard'] + print("Panel added successfully") + dashboard_configuration = res["dashboard"] else: print(res) sys.exit(1) @@ -90,22 +92,29 @@ def usage(): # # Add a top bar chart # -panel_name = 'CPU by host' -panel_type = 'top' +panel_name = "CPU by host" +panel_type = "top" metrics = [ - {'id': 'host.hostName'}, - {'id': 'cpu.used.percent', 'aggregations': {'time': 'avg', 'group': 'avg'}} + {"id": "host.hostName"}, + {"id": "cpu.used.percent", "aggregations": {"time": "avg", "group": "avg"}}, ] -sort_direction = 'desc' +sort_direction = "desc" limit = 10 -layout = {'col': 1, 'row': 7, 'size_x': 12, 'size_y': 6} -ok, res = sdclient.add_dashboard_panel(dashboard_configuration, panel_name, panel_type, metrics, - sort_direction=sort_direction, limit=limit, layout=layout) +layout = {"col": 1, "row": 7, "size_x": 12, "size_y": 6} +ok, res = sdclient.add_dashboard_panel( + dashboard_configuration, + panel_name, + panel_type, + metrics, + sort_direction=sort_direction, + limit=limit, + layout=layout, +) # Check the result if ok: - print('Panel added successfully') - dashboard_configuration = res['dashboard'] + print("Panel added successfully") + dashboard_configuration = res["dashboard"] else: print(res) sys.exit(1) @@ -113,18 +122,18 @@ def usage(): # # Add a number panel # -panel_name = 'CPU' -panel_type = 'number' -metrics = [ - {'id': 'cpu.used.percent', 'aggregations': {'time': 'avg', 'group': 'avg'}} -] -layout = {'col': 1, 'row': 13, 'size_x': 12, 'size_y': 6} -ok, res = sdclient.add_dashboard_panel(dashboard_configuration, panel_name, panel_type, metrics, layout=layout) +panel_name = "CPU" +panel_type = "number" +metrics = [{"id": "cpu.used.percent", "aggregations": {"time": "avg", "group": "avg"}}] +layout = {"col": 1, "row": 13, "size_x": 12, "size_y": 6} +ok, res = sdclient.add_dashboard_panel( + dashboard_configuration, panel_name, panel_type, metrics, layout=layout +) # Check the result if ok: - print('Panel added successfully') - dashboard_configuration = res['dashboard'] + print("Panel added successfully") + dashboard_configuration = res["dashboard"] else: print(res) sys.exit(1) @@ -132,12 +141,12 @@ def usage(): # # Remove a panel # -ok, res = sdclient.remove_dashboard_panel(dashboard_configuration, 'CPU Over Time') +ok, res = sdclient.remove_dashboard_panel(dashboard_configuration, "CPU Over Time") # Check the result if ok: - print('Panel removed successfully') - dashboard_configuration = res['dashboard'] + print("Panel removed successfully") + dashboard_configuration = res["dashboard"] else: print(res) sys.exit(1) @@ -149,7 +158,7 @@ def usage(): # Check the result if ok: - print('Dashboard deleted successfully') + print("Dashboard deleted successfully") else: print(res) sys.exit(1) diff --git a/examples/dashboard_backup_v1_restore_v2.py b/examples/dashboard_backup_v1_restore_v2.py index eb9f7ef5..7b31a538 100755 --- a/examples/dashboard_backup_v1_restore_v2.py +++ b/examples/dashboard_backup_v1_restore_v2.py @@ -12,8 +12,10 @@ # Parse arguments # if len(sys.argv) != 5: - print(f'usage: {sys.argv[0]} ') - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print( + f"usage: {sys.argv[0]} " + ) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_v1_url = sys.argv[1] @@ -36,25 +38,25 @@ print(res) sys.exit(1) -for dashboard in res['dashboards']: - file_name = '{}.json'.format(dashboard['id']) - print(('Saving v1 dashboard {} to file {}...'.format( - dashboard['name'], file_name))) +for dashboard in res["dashboards"]: + file_name = "{}.json".format(dashboard["id"]) + print(("Saving v1 dashboard {} to file {}...".format(dashboard["name"], file_name))) sdclient_v1.save_dashboard_to_file(dashboard, file_name) - print('Importing dashboard to v2...') + print("Importing dashboard to v2...") ok, res = sdclient_v2.create_dashboard_from_file( - 'import of {}'.format(dashboard['name']), + "import of {}".format(dashboard["name"]), file_name, None, - shared=dashboard['isShared'], - public=dashboard['isPublic']) + shared=dashboard["isShared"], + public=dashboard["isPublic"], + ) if ok: - print(('Dashboard {} imported!'.format(dashboard['name']))) - sdclient_v2.delete_dashboard(res['dashboard']) + print(("Dashboard {} imported!".format(dashboard["name"]))) + sdclient_v2.delete_dashboard(res["dashboard"]) else: - print(('Dashboard {} import failed:'.format(dashboard['name']))) + print(("Dashboard {} import failed:".format(dashboard["name"]))) print(res) - print('\n') + print("\n") diff --git a/examples/dashboard_basic_crud.py b/examples/dashboard_basic_crud.py index 89945e29..aa1c64b1 100755 --- a/examples/dashboard_basic_crud.py +++ b/examples/dashboard_basic_crud.py @@ -11,8 +11,8 @@ # Parse arguments # if len(sys.argv) != 2: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -34,12 +34,12 @@ print(res) sys.exit(1) -dashboard = res['dashboard'] +dashboard = res["dashboard"] # # Get Dashboard. # -ok, res = sdclient.get_dashboard(dashboard['id']) +ok, res = sdclient.get_dashboard(dashboard["id"]) # # Check for successful retrieval @@ -48,12 +48,12 @@ print(res) sys.exit(1) -dashboard = res['dashboard'] +dashboard = res["dashboard"] # # Update Dashboard. # -dashboard['name'] = "Let's change the dashboard name. " + uuid.uuid4().hex +dashboard["name"] = "Let's change the dashboard name. " + uuid.uuid4().hex ok, res = sdclient.update_dashboard(dashboard) # @@ -63,7 +63,7 @@ print(res) sys.exit(1) -dashboard = res['dashboard'] +dashboard = res["dashboard"] # # Delete Dashboard. diff --git a/examples/dashboard_ibm_cloud.py b/examples/dashboard_ibm_cloud.py index ad9745e1..83e44593 100755 --- a/examples/dashboard_ibm_cloud.py +++ b/examples/dashboard_ibm_cloud.py @@ -10,10 +10,10 @@ # Parse arguments. def usage(): - print(('usage: %s ' % sys.argv[0])) - print('endpoint-url: The endpoint URL that should point to IBM Cloud') - print('apikey: IBM Cloud IAM apikey that will be used to retrieve an access token') - print('instance-guid: GUID of an IBM Cloud Monitoring with Sysdig instance') + print(("usage: %s " % sys.argv[0])) + print("endpoint-url: The endpoint URL that should point to IBM Cloud") + print("apikey: IBM Cloud IAM apikey that will be used to retrieve an access token") + print("instance-guid: GUID of an IBM Cloud Monitoring with Sysdig instance") sys.exit(1) @@ -23,8 +23,8 @@ def usage(): URL = sys.argv[1] APIKEY = sys.argv[2] GUID = sys.argv[3] -DASHBOARD_NAME = 'IBM Cloud IAM with Python Client Example' -PANEL_NAME = 'CPU Over Time' +DASHBOARD_NAME = "IBM Cloud IAM with Python Client Example" +PANEL_NAME = "CPU Over Time" # Instantiate the client with an IBM Cloud auth object ibm_headers = IbmAuthHelper.get_headers(URL, APIKEY, GUID) @@ -36,25 +36,26 @@ def usage(): # Check the result dashboard_configuration = None if ok: - print(('Dashboard %d created successfully' % res['dashboard']['id'])) - dashboard_configuration = res['dashboard'] + print(("Dashboard %d created successfully" % res["dashboard"]["id"])) + dashboard_configuration = res["dashboard"] else: print(res) sys.exit(1) # Add a time series panel -panel_type = 'timeSeries' +panel_type = "timeSeries" metrics = [ - {'id': 'proc.name'}, - {'id': 'cpu.used.percent', 'aggregations': {'time': 'avg', 'group': 'avg'}} + {"id": "proc.name"}, + {"id": "cpu.used.percent", "aggregations": {"time": "avg", "group": "avg"}}, ] ok, res = sdclient.add_dashboard_panel( - dashboard_configuration, PANEL_NAME, panel_type, metrics) + dashboard_configuration, PANEL_NAME, panel_type, metrics +) # Check the result if ok: - print('Panel added successfully') - dashboard_configuration = res['dashboard'] + print("Panel added successfully") + dashboard_configuration = res["dashboard"] else: print(res) sys.exit(1) @@ -64,8 +65,8 @@ def usage(): # Check the result if ok: - print('Panel removed successfully') - dashboard_configuration = res['dashboard'] + print("Panel removed successfully") + dashboard_configuration = res["dashboard"] else: print(res) sys.exit(1) @@ -75,9 +76,9 @@ def usage(): # Check the result if ok: - print('Dashboard deleted successfully') + print("Dashboard deleted successfully") else: print(res) sys.exit(1) -print('IBM Cloud IAM auth worked successfully!') +print("IBM Cloud IAM auth worked successfully!") diff --git a/examples/dashboard_save_load.py b/examples/dashboard_save_load.py index 8cb5924c..0afde4d6 100755 --- a/examples/dashboard_save_load.py +++ b/examples/dashboard_save_load.py @@ -11,8 +11,8 @@ # Parse arguments # if len(sys.argv) != 2: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -31,10 +31,10 @@ print(res) sys.exit(1) -if len(res['dashboards']) > 0: - sdclient.save_dashboard_to_file(res['dashboards'][0], 'dashboard.json') +if len(res["dashboards"]) > 0: + sdclient.save_dashboard_to_file(res["dashboards"][0], "dashboard.json") else: - print('the user has no dashboards. Exiting.') + print("the user has no dashboards. Exiting.") sys.exit(0) # @@ -43,10 +43,12 @@ # dashboardFilter = 'proc.name = "cassandra"' -ok, res = sdclient.create_dashboard_from_file('test dasboard from file', 'dashboard.json', dashboardFilter) +ok, res = sdclient.create_dashboard_from_file( + "test dasboard from file", "dashboard.json", dashboardFilter +) if ok: - print('Dashboard created successfully') + print("Dashboard created successfully") else: print(res) sys.exit(1) diff --git a/examples/dashboard_scope.py b/examples/dashboard_scope.py index f1c95698..4fd20df6 100755 --- a/examples/dashboard_scope.py +++ b/examples/dashboard_scope.py @@ -16,10 +16,10 @@ # def evaluate(scope, expected): parsed_scope = SdcClient.convert_scope_string_to_expression(scope) - print('{} is valid: {}'.format(scope, parsed_scope[0] is True)) + print("{} is valid: {}".format(scope, parsed_scope[0] is True)) if parsed_scope[0] != expected: - print('Unexpected parsing result!') + print("Unexpected parsing result!") sys.exit(1) @@ -28,7 +28,7 @@ def evaluate(scope, expected): # NOTE: For now you can still leave values without quotes. # The API will be more strict, so please make sure you adopt the new format! -evaluate('proc.name = cassandra', True) +evaluate("proc.name = cassandra", True) # other operators evaluate('proc.name != "cassandra"', True) @@ -47,7 +47,7 @@ def evaluate(scope, expected): evaluate('kubernetes.service.name = "database" and proc.name = "cassandra"', True) # the scope can obviously be omitted in the dashboard configuration -evaluate('', True) +evaluate("", True) evaluate(None, True) # invalid scopes will cause errors @@ -62,6 +62,8 @@ def evaluate(scope, expected): # # Here some errors that will not be detected by the Python library, but the API will # -evaluate('proc.name = "cassandra" or proc.name = "mysql"', True) # not AND'd expressions are supported -evaluate('proc.name in ("cassandra\', \'mysql")', True) # mismatching quotes +evaluate( + 'proc.name = "cassandra" or proc.name = "mysql"', True +) # not AND'd expressions are supported +evaluate("proc.name in (\"cassandra', 'mysql\")", True) # mismatching quotes evaluate('proc.name in ("cassandra", "mysql"', True) # missing parenthesis diff --git a/examples/delete_alert.py b/examples/delete_alert.py index dd3cfe84..f663515a 100755 --- a/examples/delete_alert.py +++ b/examples/delete_alert.py @@ -13,9 +13,9 @@ # Parse arguments # def usage(): - print(('usage: %s [-a|--alert ] ' % sys.argv[0])) - print('-a|--alert: Set name of alert to delete') - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s [-a|--alert ] " % sys.argv[0])) + print("-a|--alert: Set name of alert to delete") + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) @@ -44,8 +44,8 @@ def usage(): print(res) sys.exit(1) -for alert in res['alerts']: - if alert['name'] == alert_name: +for alert in res["alerts"]: + if alert["name"] == alert_name: print("Deleting alert") ok, res = sdclient.delete_alert(alert) if not ok: diff --git a/examples/delete_all_policies.py b/examples/delete_all_policies.py index 0cb3a1c9..3742925d 100755 --- a/examples/delete_all_policies.py +++ b/examples/delete_all_policies.py @@ -9,8 +9,8 @@ def usage(): - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) @@ -25,7 +25,7 @@ def usage(): # # Instantiate the SDC client # -sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com') +sdclient = SdSecureClient(sdc_token, "https://secure.sysdig.com") # Get a list of policyIds ok, res = sdclient.list_policies() @@ -38,8 +38,8 @@ def usage(): policies = res for policy in policies: - print(("deleting policy: " + str(policy['id']))) - ok, res = sdclient.delete_policy_id(policy['id']) + print(("deleting policy: " + str(policy["id"]))) + ok, res = sdclient.delete_policy_id(policy["id"]) if not ok: print(res) sys.exit(1) diff --git a/examples/delete_all_policies_v1.py b/examples/delete_all_policies_v1.py index b13a3d27..c7b24d10 100755 --- a/examples/delete_all_policies_v1.py +++ b/examples/delete_all_policies_v1.py @@ -9,8 +9,8 @@ def usage(): - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) @@ -25,7 +25,7 @@ def usage(): # # Instantiate the SDC client # -sdclient = SdSecureClientV1(sdc_token, 'https://secure.sysdig.com') +sdclient = SdSecureClientV1(sdc_token, "https://secure.sysdig.com") # Get a list of policyIds ok, res = sdclient.list_policies() @@ -35,11 +35,11 @@ def usage(): print(res) sys.exit(1) else: - policies = res['policies'] + policies = res["policies"] for policy in policies: - print(("deleting policy: " + str(policy['id']))) - ok, res = sdclient.delete_policy_id(policy['id']) + print(("deleting policy: " + str(policy["id"]))) + ok, res = sdclient.delete_policy_id(policy["id"]) if not ok: print(res) sys.exit(1) diff --git a/examples/delete_dashboard.py b/examples/delete_dashboard.py index 220a4122..4dbb2e61 100755 --- a/examples/delete_dashboard.py +++ b/examples/delete_dashboard.py @@ -13,9 +13,9 @@ # Parse arguments # def usage(): - print(('usage: %s [-p|--pattern ] ' % sys.argv[0])) - print('-p|--pattern: Delete all dashboards containing the provided pattern') - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s [-p|--pattern ] " % sys.argv[0])) + print("-p|--pattern: Delete all dashboards containing the provided pattern") + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) @@ -50,9 +50,9 @@ def usage(): # # Delete all the dashboards containing pattern # -for dashboard in res['dashboards']: - if pattern in dashboard['name']: - print(("Deleting " + dashboard['name'])) +for dashboard in res["dashboards"]: + if pattern in dashboard["name"]: + print(("Deleting " + dashboard["name"])) ok, res = sdclient.delete_dashboard(dashboard) if not ok: print(res) diff --git a/examples/delete_event.py b/examples/delete_event.py index 898315f5..a97eb246 100755 --- a/examples/delete_event.py +++ b/examples/delete_event.py @@ -15,9 +15,9 @@ def usage(): - print(('usage: %s [-e|--event ] ' % sys.argv[0])) - print('-e|--event: Name of event to delete') - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s [-e|--event ] " % sys.argv[0])) + print("-e|--event: Name of event to delete") + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) @@ -53,8 +53,8 @@ def usage(): # # Delete the first event among the returned ones # -for event in res['events']: - print(("Deleting event " + event['name'])) +for event in res["events"]: + print(("Deleting event " + event["name"])) ok, res = sdclient.delete_event(event) if not ok: diff --git a/examples/delete_policy.py b/examples/delete_policy.py index c644da2d..967ab487 100755 --- a/examples/delete_policy.py +++ b/examples/delete_policy.py @@ -11,10 +11,10 @@ def usage(): - print(('usage: %s [-i|--id ] [-n|--name ] ' % sys.argv[0])) - print('-i|--id: the id of the policy to delete') - print('-n|--name: the name of the policy to delete') - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print(("usage: %s [-i|--id ] [-n|--name ] " % sys.argv[0])) + print("-i|--id: the id of the policy to delete") + print("-n|--name: the name of the policy to delete") + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) @@ -45,7 +45,7 @@ def usage(): # # Instantiate the SDC client # -sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com') +sdclient = SdSecureClient(sdc_token, "https://secure.sysdig.com") if len(id) > 0: ok, res = sdclient.delete_policy_id(id) diff --git a/examples/delete_policy_v1.py b/examples/delete_policy_v1.py index bd8f7f06..cbb8d268 100755 --- a/examples/delete_policy_v1.py +++ b/examples/delete_policy_v1.py @@ -11,10 +11,10 @@ def usage(): - print(('usage: %s [-i|--id ] [-n|--name ] ' % sys.argv[0])) - print('-i|--id: the id of the policy to delete') - print('-n|--name: the name of the policy to delete') - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print(("usage: %s [-i|--id ] [-n|--name ] " % sys.argv[0])) + print("-i|--id: the id of the policy to delete") + print("-n|--name: the name of the policy to delete") + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) @@ -45,7 +45,7 @@ def usage(): # # Instantiate the SDC client # -sdclient = SdSecureClientV1(sdc_token, 'https://secure.sysdig.com') +sdclient = SdSecureClientV1(sdc_token, "https://secure.sysdig.com") if len(id) > 0: ok, res = sdclient.delete_policy_id(id) diff --git a/examples/download_dashboards.py b/examples/download_dashboards.py index ccb76dd4..6001a833 100755 --- a/examples/download_dashboards.py +++ b/examples/download_dashboards.py @@ -21,7 +21,7 @@ def cleanup_dir(path): if not os.path.exists(path): return if not os.path.isdir(path): - print('Provided path is not a directory') + print("Provided path is not a directory") sys.exit(-1) for file in os.listdir(path): @@ -30,7 +30,12 @@ def cleanup_dir(path): if os.path.isfile(file_path): os.unlink(file_path) else: - print(('Cannot clean the provided directory due to delete failure on %s' % file_path)) + print( + ( + "Cannot clean the provided directory due to delete failure on %s" + % file_path + ) + ) except Exception as e: print(e) os.rmdir(path) @@ -40,13 +45,13 @@ def cleanup_dir(path): # Parse arguments # if len(sys.argv) != 3: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] dashboard_state_file = sys.argv[2] -sysdig_dashboard_dir = 'sysdig-dashboard-dir' +sysdig_dashboard_dir = "sysdig-dashboard-dir" # # Instantiate the SDC client @@ -72,12 +77,14 @@ def cleanup_dir(path): if not os.path.exists(sysdig_dashboard_dir): os.makedirs(sysdig_dashboard_dir) -for db in res['dashboards']: - sdclient.save_dashboard_to_file(db, os.path.join(sysdig_dashboard_dir, str(db['id']))) +for db in res["dashboards"]: + sdclient.save_dashboard_to_file( + db, os.path.join(sysdig_dashboard_dir, str(db["id"])) + ) - print(("Name: %s" % (db['name']))) + print(("Name: %s" % (db["name"]))) -zipf = zipfile.ZipFile(dashboard_state_file, 'w', zipfile.ZIP_DEFLATED) +zipf = zipfile.ZipFile(dashboard_state_file, "w", zipfile.ZIP_DEFLATED) zipdir(sysdig_dashboard_dir, zipf) zipf.close() diff --git a/examples/flip_alerts_enabled.py b/examples/flip_alerts_enabled.py index 62075854..72c4a3b4 100755 --- a/examples/flip_alerts_enabled.py +++ b/examples/flip_alerts_enabled.py @@ -15,9 +15,9 @@ # Parse arguments # def usage(): - print(('usage: %s [-a|--alert ] ' % sys.argv[0])) - print('-a|--alert: Comma seperated list of alerts') - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s [-a|--alert ] " % sys.argv[0])) + print("-a|--alert: Comma seperated list of alerts") + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) @@ -47,12 +47,12 @@ def usage(): sys.exit(1) alert_found = False -for alert in res['alerts']: - if alert['name'] in alert_list: +for alert in res["alerts"]: + if alert["name"] in alert_list: alert_found = True - print(("Updating \'" + alert['name'] + "\'. Enabled status before change:")) - print((alert['enabled'])) - alert['enabled'] = not alert['enabled'] + print(("Updating '" + alert["name"] + "'. Enabled status before change:")) + print((alert["enabled"])) + alert["enabled"] = not alert["enabled"] ok, res_update = sdclient.update_alert(alert) if not ok: @@ -60,10 +60,10 @@ def usage(): sys.exit(1) # Validate and print the results - print('Alert status after modification:') - print((alert['enabled'])) - print(' ') + print("Alert status after modification:") + print((alert["enabled"])) + print(" ") if not alert_found: - print('Alert to be updated not found') + print("Alert to be updated not found") sys.exit(1) diff --git a/examples/get_agents_config.py b/examples/get_agents_config.py index f301342b..0ec4ef6b 100755 --- a/examples/get_agents_config.py +++ b/examples/get_agents_config.py @@ -14,8 +14,8 @@ # Parse arguments # if len(sys.argv) != 2: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -23,7 +23,7 @@ # # Instantiate the SDC client # -sdclient = SdcClient(sdc_token, 'https://app.sysdigcloud.com') +sdclient = SdcClient(sdc_token, "https://app.sysdigcloud.com") # # Get the configuration diff --git a/examples/get_anchore_users_account.py b/examples/get_anchore_users_account.py index e7703654..81c9993f 100644 --- a/examples/get_anchore_users_account.py +++ b/examples/get_anchore_users_account.py @@ -9,8 +9,8 @@ def usage(): - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) @@ -25,7 +25,7 @@ def usage(): # # Instantiate the SDC client # -sdclient = SdScanningClient(sdc_token, 'https://secure.sysdig.com') +sdclient = SdScanningClient(sdc_token, "https://secure.sysdig.com") ok, res = sdclient.get_anchore_users_account() diff --git a/examples/get_dashboard.py b/examples/get_dashboard.py index ec4f95e0..dd32b3b9 100755 --- a/examples/get_dashboard.py +++ b/examples/get_dashboard.py @@ -22,7 +22,7 @@ def cleanup_dir(path): if not os.path.exists(path): return if not os.path.isdir(path): - print('Provided path is not a directory') + print("Provided path is not a directory") sys.exit(-1) for file in os.listdir(path): @@ -31,7 +31,12 @@ def cleanup_dir(path): if os.path.isfile(file_path): os.unlink(file_path) else: - print(('Cannot clean the provided directory due to delete failure on %s' % file_path)) + print( + ( + "Cannot clean the provided directory due to delete failure on %s" + % file_path + ) + ) except Exception as e: print(e) os.rmdir(path) @@ -41,8 +46,10 @@ def cleanup_dir(path): # Parse arguments # if len(sys.argv) != 4: - print(('usage: %s "" ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print( + ('usage: %s "" ' % sys.argv[0]) + ) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) @@ -50,7 +57,7 @@ def cleanup_dir(path): sdc_token = sys.argv[1] dashboard_name = sys.argv[2] dashboard_state_file = sys.argv[3] -sysdig_dashboard_dir = 'sysdig-dashboard-dir' +sysdig_dashboard_dir = "sysdig-dashboard-dir" # # Instantiate the SDC client @@ -65,9 +72,9 @@ def cleanup_dir(path): # Check the result if ok and len(res) > 0: - print('Dashboard found, ID: ', res[0]['dashboard']['id']) - dashboard_id = res[0]['dashboard']['id'] - dashboard_configuration = res[0]['dashboard'] + print("Dashboard found, ID: ", res[0]["dashboard"]["id"]) + dashboard_id = res[0]["dashboard"]["id"] + dashboard_configuration = res[0]["dashboard"] else: print(res) sys.exit(1) @@ -90,13 +97,21 @@ def cleanup_dir(path): # # Check for successful retrieval and save it # -if len(res['dashboard']) > 0: - print('Downloading Dashboard ID: ', dashboard_id) - sdclient.save_dashboard_to_file(res['dashboard'], os.path.join(sysdig_dashboard_dir, str(res['dashboard']['id']))) - print('Dashboard Name: "%s"' % (res['dashboard']['name']), 'ID:', dashboard_id, 'downloaded') - - -zipf = zipfile.ZipFile(dashboard_state_file, 'w', zipfile.ZIP_DEFLATED) +if len(res["dashboard"]) > 0: + print("Downloading Dashboard ID: ", dashboard_id) + sdclient.save_dashboard_to_file( + res["dashboard"], + os.path.join(sysdig_dashboard_dir, str(res["dashboard"]["id"])), + ) + print( + 'Dashboard Name: "%s"' % (res["dashboard"]["name"]), + "ID:", + dashboard_id, + "downloaded", + ) + + +zipf = zipfile.ZipFile(dashboard_state_file, "w", zipfile.ZIP_DEFLATED) zipdir(sysdig_dashboard_dir, zipf) zipf.close() diff --git a/examples/get_data_promql_advanced.py b/examples/get_data_promql_advanced.py index 0a818c6b..05d59509 100755 --- a/examples/get_data_promql_advanced.py +++ b/examples/get_data_promql_advanced.py @@ -24,7 +24,7 @@ def print_prometheus_results_as_table(results): for series in results: metric = series.get("metric", {}) - label = ','.join(f'{k}={v}' for k, v in sorted(metric.items())) + label = ",".join(f"{k}={v}" for k, v in sorted(metric.items())) label_keys.append(label) time_series_by_label[label] = {} @@ -54,8 +54,8 @@ def print_prometheus_results_as_table(results): # Parse arguments # if len(sys.argv) != 3: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -69,7 +69,7 @@ def print_prometheus_results_as_table(results): # this by comparing the actual CPU usage of each workload to the CPU limits set for them and # then ranks the results to show the top 5. # -query = ''' +query = """ topk (5, sum by (kube_cluster_name, kube_namespace_name, kube_workload_name) ( rate( @@ -86,7 +86,7 @@ def print_prometheus_results_as_table(results): } ) ) -''' +""" # # Time window: @@ -153,7 +153,6 @@ def print_prometheus_results_as_table(results): # } # - # # Print summary (what, when) # diff --git a/examples/get_data_promql_instant_advanced.py b/examples/get_data_promql_instant_advanced.py index 3e23a516..64fd92f3 100644 --- a/examples/get_data_promql_instant_advanced.py +++ b/examples/get_data_promql_instant_advanced.py @@ -29,11 +29,13 @@ def print_prometheus_instant_result(result): for entry in result: timestamp, value = entry.get("value", [None, None]) - dt = datetime.fromtimestamp(float(timestamp)).isoformat() if timestamp else "N/A" + dt = ( + datetime.fromtimestamp(float(timestamp)).isoformat() if timestamp else "N/A" + ) metric = entry.get("metric", {}) if has_labels: - label_str = ', '.join(f'{k}="{v}"' for k, v in sorted(metric.items())) + label_str = ", ".join(f'{k}="{v}"' for k, v in sorted(metric.items())) print(f"{dt:<25} | {label_str:<40} | {value:>10}") else: print(f"{dt:<25} | {value:>10}") @@ -43,8 +45,8 @@ def print_prometheus_instant_result(result): # Parse arguments # if len(sys.argv) != 3: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -58,7 +60,7 @@ def print_prometheus_instant_result(result): # this by comparing the actual CPU usage of each workload to the CPU limits set for them and # then ranks the results to show the top 5. # -query = ''' +query = """ topk(5, sum by (kube_cluster_name, kube_namespace_name, kube_workload_name) ( rate( @@ -75,7 +77,7 @@ def print_prometheus_instant_result(result): } ) ) -''' +""" # # Time: @@ -109,7 +111,6 @@ def print_prometheus_instant_result(result): # } # - # # Print summary (what, when) # diff --git a/examples/get_data_promql_instant_simple.py b/examples/get_data_promql_instant_simple.py index bdb8e726..607398bc 100755 --- a/examples/get_data_promql_instant_simple.py +++ b/examples/get_data_promql_instant_simple.py @@ -29,11 +29,13 @@ def print_prometheus_instant_result(result): for entry in result: timestamp, value = entry.get("value", [None, None]) - dt = datetime.fromtimestamp(float(timestamp)).isoformat() if timestamp else "N/A" + dt = ( + datetime.fromtimestamp(float(timestamp)).isoformat() if timestamp else "N/A" + ) metric = entry.get("metric", {}) if has_labels: - label_str = ', '.join(f'{k}="{v}"' for k, v in sorted(metric.items())) + label_str = ", ".join(f'{k}="{v}"' for k, v in sorted(metric.items())) print(f"{dt:<25} | {label_str:<40} | {value:>10}") else: print(f"{dt:<25} | {value:>10}") @@ -43,8 +45,8 @@ def print_prometheus_instant_result(result): # Parse arguments # if len(sys.argv) != 3: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -56,11 +58,11 @@ def print_prometheus_instant_result(result): # A PromQL query to execute. In this example, we are querying for the total CPU usage # of all containers in all pods in the last 10 minutes. # -query = ''' +query = """ sum ( avg_over_time(kube_pod_container_resource_requests{resource="cpu"}[10m]) ) -''' +""" # # Time: @@ -94,7 +96,6 @@ def print_prometheus_instant_result(result): # } # - # # Print summary (what, when) # diff --git a/examples/get_data_promql_simple.py b/examples/get_data_promql_simple.py index 8aa4fd8c..c91cd9b5 100755 --- a/examples/get_data_promql_simple.py +++ b/examples/get_data_promql_simple.py @@ -24,7 +24,7 @@ def print_prometheus_results_as_table(results): for series in results: metric = series.get("metric", {}) - label = ','.join(f'{k}={v}' for k, v in sorted(metric.items())) + label = ",".join(f"{k}={v}" for k, v in sorted(metric.items())) label_keys.append(label) time_series_by_label[label] = {} @@ -54,8 +54,8 @@ def print_prometheus_results_as_table(results): # Parse arguments # if len(sys.argv) != 3: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -67,11 +67,11 @@ def print_prometheus_results_as_table(results): # A PromQL query to execute. In this example, we are querying for the total CPU usage # of all containers in all pods in the last 10 minutes. # -query = ''' +query = """ sum ( avg_over_time(kube_pod_container_resource_requests{resource="cpu"}[10m]) ) -''' +""" # # Time window: @@ -138,7 +138,6 @@ def print_prometheus_results_as_table(results): # } # - # # Print summary (what, when) # diff --git a/examples/get_image_info_by_id.py b/examples/get_image_info_by_id.py index 00857832..631506b8 100644 --- a/examples/get_image_info_by_id.py +++ b/examples/get_image_info_by_id.py @@ -9,8 +9,8 @@ def usage(): - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) @@ -26,7 +26,7 @@ def usage(): # # Instantiate the SDC client # -sdclient = SdScanningClient(sdc_token, 'https://secure.sysdig.com') +sdclient = SdScanningClient(sdc_token, "https://secure.sysdig.com") ok, res = sdclient.get_image_info_by_id(image_id_sha) diff --git a/examples/get_image_scan_result_by_id.py b/examples/get_image_scan_result_by_id.py index de8b2a87..ac95f06d 100644 --- a/examples/get_image_scan_result_by_id.py +++ b/examples/get_image_scan_result_by_id.py @@ -9,8 +9,10 @@ def usage(): - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print( + ("usage: %s " % sys.argv[0]) + ) + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) @@ -28,7 +30,7 @@ def usage(): # # Instantiate the SDC client # -sdclient = SdScanningClient(sdc_token, 'https://secure.sysdig.com') +sdclient = SdScanningClient(sdc_token, "https://secure.sysdig.com") ok, res = sdclient.get_image_scan_result_by_id(image_id, full_tag_name, detail) diff --git a/examples/get_label_values.py b/examples/get_label_values.py index 1b43c181..f018492a 100644 --- a/examples/get_label_values.py +++ b/examples/get_label_values.py @@ -33,8 +33,8 @@ def render_label_values_as_table(label_values): # Parse arguments # if len(sys.argv) != 3: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] diff --git a/examples/get_labels.py b/examples/get_labels.py index 6d6e8f50..55c32a2d 100644 --- a/examples/get_labels.py +++ b/examples/get_labels.py @@ -33,8 +33,8 @@ def render_labels_as_table(labels): # Parse arguments # if len(sys.argv) != 2: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -45,9 +45,7 @@ def render_labels_as_table(labels): # # Optional matchers to filter the labels # -match = [ - 'up' -] # Replace with a list of matchers if needed +match = ["up"] # Replace with a list of matchers if needed # # Optional limit diff --git a/examples/get_latest_pdf_report_by_digest.py b/examples/get_latest_pdf_report_by_digest.py index 78495c14..5d0712fc 100644 --- a/examples/get_latest_pdf_report_by_digest.py +++ b/examples/get_latest_pdf_report_by_digest.py @@ -9,8 +9,8 @@ def usage(): - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) @@ -28,7 +28,7 @@ def usage(): # # Instantiate the SDC client # -sdclient = SdScanningClient(sdc_token, 'https://secure.sysdig.com') +sdclient = SdScanningClient(sdc_token, "https://secure.sysdig.com") ok, res = sdclient.get_latest_pdf_report_by_digest(image_digest, full_tag) @@ -36,7 +36,7 @@ def usage(): # Return the result # if ok: - with open(pdf_path, 'wb') as f: + with open(pdf_path, "wb") as f: f.write(res) print(("PDF %s saved" % pdf_path)) else: diff --git a/examples/get_metadata.py b/examples/get_metadata.py index daec2636..5f7183d4 100644 --- a/examples/get_metadata.py +++ b/examples/get_metadata.py @@ -17,41 +17,53 @@ def render_metadata_as_table(metadata): rows = [] for metric, details in metadata.items(): for detail in details: - rows.append({ - "Metric": metric, - "Type": detail.get("type", ""), - "Unit": detail.get("unit", ""), - "Help": detail.get("help", "") - }) + rows.append( + { + "Metric": metric, + "Type": detail.get("type", ""), + "Unit": detail.get("unit", ""), + "Help": detail.get("help", ""), + } + ) # Extract column names columns = ["Metric", "Type", "Unit", "Help"] # Calculate the maximum width for each column - column_widths = {col: max(len(col), max(len(str(row[col])) for row in rows)) for col in columns} + column_widths = { + col: max(len(col), max(len(str(row[col])) for row in rows)) for col in columns + } # Create a horizontal separator separator = "+" + "+".join("-" * (column_widths[col] + 2) for col in columns) + "+" # Create the header row - header = "|" + "|".join(f" {col.ljust(column_widths[col])} " for col in columns) + "|" + header = ( + "|" + "|".join(f" {col.ljust(column_widths[col])} " for col in columns) + "|" + ) # Create the rows for each metadata entry table_rows = [ - "|" + "|".join(f" {str(row[col]).ljust(column_widths[col])} " for col in columns) + "|" + "|" + + "|".join(f" {str(row[col]).ljust(column_widths[col])} " for col in columns) + + "|" for row in rows ] # Combine everything into a table - print(f"{separator}\n{header}\n{separator}\n" + "\n".join(table_rows) + f"\n{separator}") + print( + f"{separator}\n{header}\n{separator}\n" + + "\n".join(table_rows) + + f"\n{separator}" + ) # # Parse arguments # if len(sys.argv) != 2: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] diff --git a/examples/get_pdf_report.py b/examples/get_pdf_report.py index 33136d46..98c546a6 100755 --- a/examples/get_pdf_report.py +++ b/examples/get_pdf_report.py @@ -9,8 +9,8 @@ def usage(): - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) @@ -27,7 +27,7 @@ def usage(): # # Instantiate the SDC client # -sdclient = SdScanningClient(sdc_token, 'https://secure.sysdig.com') +sdclient = SdScanningClient(sdc_token, "https://secure.sysdig.com") ok, res = sdclient.get_pdf_report(image) @@ -35,7 +35,7 @@ def usage(): # Return the result # if ok: - with open(pdf_path, 'wb') as f: + with open(pdf_path, "wb") as f: f.write(res) print(("PDF %s saved" % pdf_path)) else: diff --git a/examples/get_policy.py b/examples/get_policy.py index 99db48c8..9962c18a 100755 --- a/examples/get_policy.py +++ b/examples/get_policy.py @@ -10,8 +10,8 @@ def usage(): - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) @@ -27,7 +27,7 @@ def usage(): # # Instantiate the SDC client # -sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com') +sdclient = SdSecureClient(sdc_token, "https://secure.sysdig.com") ok, res = sdclient.get_policy(name) diff --git a/examples/get_policy_v1.py b/examples/get_policy_v1.py index b94d1e7d..55282e6a 100755 --- a/examples/get_policy_v1.py +++ b/examples/get_policy_v1.py @@ -10,8 +10,8 @@ def usage(): - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) @@ -27,7 +27,7 @@ def usage(): # # Instantiate the SDC client # -sdclient = SdSecureClientV1(sdc_token, 'https://secure.sysdig.com') +sdclient = SdSecureClientV1(sdc_token, "https://secure.sysdig.com") ok, res = sdclient.get_policy(name) diff --git a/examples/get_secure_default_falco_rules_files.py b/examples/get_secure_default_falco_rules_files.py index be526718..52d3f57f 100755 --- a/examples/get_secure_default_falco_rules_files.py +++ b/examples/get_secure_default_falco_rules_files.py @@ -19,9 +19,11 @@ # Parse arguments # def usage(): - print(('usage: %s [-s|--save ] ' % sys.argv[0])) - print('-s|--save: save the retrieved files to a set of files below using save_default_rules_files().') - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print(("usage: %s [-s|--save ] " % sys.argv[0])) + print( + "-s|--save: save the retrieved files to a set of files below using save_default_rules_files()." + ) + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) @@ -46,7 +48,7 @@ def usage(): # # Instantiate the SDC client # -sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com') +sdclient = SdSecureClient(sdc_token, "https://secure.sysdig.com") # # Get the configuration diff --git a/examples/get_secure_policy_events.py b/examples/get_secure_policy_events.py index c804b141..3ee74ba2 100755 --- a/examples/get_secure_policy_events.py +++ b/examples/get_secure_policy_events.py @@ -18,11 +18,17 @@ def usage(): - print(('usage: %s [-s|--summarize] [-l|--limit ] [| ]' % - sys.argv[0])) - print('-s|--summarize: group policy events by sanitized output and print by frequency') - print('-l|--limit: with -s, only print the first outputs') - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print( + ( + "usage: %s [-s|--summarize] [-l|--limit ] [| ]" + % sys.argv[0] + ) + ) + print( + "-s|--summarize: group policy events by sanitized output and print by frequency" + ) + print("-l|--limit: with -s, only print the first outputs") + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) @@ -61,7 +67,7 @@ def usage(): # # Instantiate the SDC client # -sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com') +sdclient = SdSecureClient(sdc_token, "https://secure.sysdig.com") if duration is not None: ok, res = sdclient.get_policy_events_duration(duration) diff --git a/examples/get_secure_policy_events_old.py b/examples/get_secure_policy_events_old.py index 5799256f..1505eb2b 100755 --- a/examples/get_secure_policy_events_old.py +++ b/examples/get_secure_policy_events_old.py @@ -24,11 +24,15 @@ def usage(): - print('usage: %s [-s|--summarize] [-l|--limit ] [| ]' % - sys.argv[0]) - print('-s|--summarize: group policy events by sanitized output and print by frequency') - print('-l|--limit: with -s, only print the first outputs') - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print( + "usage: %s [-s|--summarize] [-l|--limit ] [| ]" + % sys.argv[0] + ) + print( + "-s|--summarize: group policy events by sanitized output and print by frequency" + ) + print("-l|--limit: with -s, only print the first outputs") + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) @@ -67,7 +71,7 @@ def usage(): # # Instantiate the SDC client # -sdclient = PolicyEventsClientOld(sdc_token, 'https://secure.sysdig.com') +sdclient = PolicyEventsClientOld(sdc_token, "https://secure.sysdig.com") if duration is not None: ok, res = sdclient.get_policy_events_duration(duration) @@ -77,7 +81,6 @@ def usage(): all_outputs = dict() while True: - # # Return the result # @@ -85,19 +88,19 @@ def usage(): print(res) sys.exit(1) - if len(res['data']['policyEvents']) == 0: + if len(res["data"]["policyEvents"]) == 0: break - sys.stderr.write("offset={}\n".format(res['ctx']['offset'])) + sys.stderr.write("offset={}\n".format(res["ctx"]["offset"])) - for event in res['data']['policyEvents']: + for event in res["data"]["policyEvents"]: if summarize: - sanitize_output = re.sub(r'\S+\s\(id=\S+\)', '', event['output']) + sanitize_output = re.sub(r"\S+\s\(id=\S+\)", "", event["output"]) all_outputs[sanitize_output] = all_outputs.get(sanitize_output, 0) + 1 else: sys.stdout.write(json.dumps(event) + "\n") - ok, res = sdclient.get_more_policy_events(res['ctx']) + ok, res = sdclient.get_more_policy_events(res["ctx"]) if summarize: sorted = sorted(all_outputs.items(), key=operator.itemgetter(1), reverse=True) diff --git a/examples/get_secure_system_falco_rules.py b/examples/get_secure_system_falco_rules.py index e2672279..7bace408 100755 --- a/examples/get_secure_system_falco_rules.py +++ b/examples/get_secure_system_falco_rules.py @@ -11,8 +11,8 @@ # Parse arguments # if len(sys.argv) != 2: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -20,7 +20,7 @@ # # Instantiate the SDC client # -sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com') +sdclient = SdSecureClient(sdc_token, "https://secure.sysdig.com") # # Get the configuration diff --git a/examples/get_secure_user_falco_rules.py b/examples/get_secure_user_falco_rules.py index cf04f439..991dc66b 100755 --- a/examples/get_secure_user_falco_rules.py +++ b/examples/get_secure_user_falco_rules.py @@ -11,8 +11,8 @@ # Parse arguments # if len(sys.argv) != 2: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -20,7 +20,7 @@ # # Instantiate the SDC client # -sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com') +sdclient = SdSecureClient(sdc_token, "https://secure.sysdig.com") # # Get the configuration diff --git a/examples/get_series.py b/examples/get_series.py index c7f6d79f..5c1f5fa0 100644 --- a/examples/get_series.py +++ b/examples/get_series.py @@ -17,7 +17,10 @@ def render_json_as_table(results): keys = list(results[0].keys()) # Calculate the maximum width for each column - column_widths = {key: max(len(key), max(len(str(row.get(key, ""))) for row in results)) for key in keys} + column_widths = { + key: max(len(key), max(len(str(row.get(key, ""))) for row in results)) + for key in keys + } # Create a horizontal separator separator = "+".join("-" * (column_widths[key] + 2) for key in keys) @@ -28,18 +31,26 @@ def render_json_as_table(results): # Create the rows for each JSON object rows = [] for row in results: - rows.append("|".join(f" {str(row.get(key, '')).ljust(column_widths[key])} " for key in keys)) + rows.append( + "|".join( + f" {str(row.get(key, '')).ljust(column_widths[key])} " for key in keys + ) + ) # Combine everything into a table - print(f"+{separator}+\n|{header}|\n+{separator}+\n" + "\n".join(f"|{row}|" for row in rows) + f"\n+{separator}+") + print( + f"+{separator}+\n|{header}|\n+{separator}+\n" + + "\n".join(f"|{row}|" for row in rows) + + f"\n+{separator}+" + ) # # Parse arguments # if len(sys.argv) != 2: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -50,16 +61,13 @@ def render_json_as_table(results): # # Matchers to filter the series. Example: `up` and `process_start_time_seconds{job="prometheus"}` # -match = [ - 'up', - 'process_start_time_seconds{job="prometheus"}' -] +match = ["up", 'process_start_time_seconds{job="prometheus"}'] # # Optional time range # start = None # Replace with a timestamp if needed -end = None # Replace with a timestamp if needed +end = None # Replace with a timestamp if needed # # Optional limit diff --git a/examples/list_access_keys.py b/examples/list_access_keys.py index 9af0cdfa..3707f2d3 100755 --- a/examples/list_access_keys.py +++ b/examples/list_access_keys.py @@ -12,9 +12,9 @@ # Parse arguments # if len(sys.argv) != 2: - print('usage: %s ' % sys.argv[0]) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') - print('For this script to work, the user for the token must have Admin rights') + print("usage: %s " % sys.argv[0]) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") + print("For this script to work, the user for the token must have Admin rights") sys.exit(1) sdc_token = sys.argv[1] @@ -22,16 +22,16 @@ # # Instantiate the SDC client # -sdclient = SdcClient(sdc_token, 'https://app.sysdigcloud.com') +sdclient = SdcClient(sdc_token, "https://app.sysdigcloud.com") # # Get the configuration # ok, res = sdclient.list_access_keys() if ok: - print('Access Keys\n===========') - for access_key in res['customerAccessKeys']: - print(access_key['accessKey']) + print("Access Keys\n===========") + for access_key in res["customerAccessKeys"]: + print(access_key["accessKey"]) else: print(res) sys.exit(1) diff --git a/examples/list_admins.py b/examples/list_admins.py index 048a45cf..9cadb959 100755 --- a/examples/list_admins.py +++ b/examples/list_admins.py @@ -15,9 +15,9 @@ # Parse arguments # if len(sys.argv) != 2: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') - print('For this script to work, the user for the token must have Admin rights') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") + print("For this script to work, the user for the token must have Admin rights") sys.exit(1) sdc_token = sys.argv[1] @@ -25,7 +25,7 @@ # # Instantiate the SDC client # -sdclient = SdcClient(sdc_token, 'https://app.sysdigcloud.com') +sdclient = SdcClient(sdc_token, "https://app.sysdigcloud.com") # # Get the configuration @@ -35,16 +35,16 @@ admins = [] superadmins = [] for user in res: - if 'ROLE_CUSTOMER' in user['roles']: - admins.append(user['username']) - if 'ROLE_ADMIN' in user['roles']: - superadmins.append(user['username']) - print('Admin users') - print('-----------') + if "ROLE_CUSTOMER" in user["roles"]: + admins.append(user["username"]) + if "ROLE_ADMIN" in user["roles"]: + superadmins.append(user["username"]) + print("Admin users") + print("-----------") for username in admins: print(username) - print('\nSuper Admins') - print('------------') + print("\nSuper Admins") + print("------------") for username in superadmins: print(username) else: diff --git a/examples/list_alert_notifications.py b/examples/list_alert_notifications.py index b64ff355..d5e5acc9 100755 --- a/examples/list_alert_notifications.py +++ b/examples/list_alert_notifications.py @@ -12,22 +12,23 @@ def print_notifications(notifications): for notification in notifications: values = [] - for entity in notification['entities']: - for value in entity['metricValues']: - values.append(str(value['value'])) - notification.update({'values': ','.join(values)}) + for entity in notification["entities"]: + for value in entity["metricValues"]: + values.append(str(value["value"])) + notification.update({"values": ",".join(values)}) notification["filter"] = notification.get("filter", "") - print("#%(id)s, State: %(state)s, Severity: %(severity)s, Scope: %(filter)s, Condition: %(condition)s, " - "Value: %(values)s, Resolved: %(resolved)s" % - notification) + print( + "#%(id)s, State: %(state)s, Severity: %(severity)s, Scope: %(filter)s, Condition: %(condition)s, " + "Value: %(values)s, Resolved: %(resolved)s" % notification + ) # # Parse arguments # if len(sys.argv) != 2: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -41,10 +42,10 @@ def print_notifications(notifications): # Get the notifications in the last day # ok, res = sdclient.get_notifications( - from_ts=int(time.time() - 86400), - to_ts=int(time.time())) + from_ts=int(time.time() - 86400), to_ts=int(time.time()) +) -print_notifications(res['notifications']) +print_notifications(res["notifications"]) if not ok: sys.exit(1) @@ -52,10 +53,10 @@ def print_notifications(notifications): # Get the notifications in the last day and active state # ok, res = sdclient.get_notifications( - from_ts=int(time.time() - 86400), - to_ts=int(time.time()), state='ACTIVE') + from_ts=int(time.time() - 86400), to_ts=int(time.time()), state="ACTIVE" +) -print_notifications(res['notifications']) +print_notifications(res["notifications"]) if not ok: sys.exit(1) @@ -63,10 +64,10 @@ def print_notifications(notifications): # Get the notifications in the last day and active state # ok, res = sdclient.get_notifications( - from_ts=int(time.time() - 86400), - to_ts=int(time.time()), state='OK') + from_ts=int(time.time() - 86400), to_ts=int(time.time()), state="OK" +) -print_notifications(res['notifications']) +print_notifications(res["notifications"]) if not ok: sys.exit(1) @@ -74,10 +75,9 @@ def print_notifications(notifications): # Get the notifications in the last day and resolved state # ok, res = sdclient.get_notifications( - from_ts=int(time.time() - 86400), - to_ts=int(time.time()), - resolved=True) + from_ts=int(time.time() - 86400), to_ts=int(time.time()), resolved=True +) -print_notifications(res['notifications']) +print_notifications(res["notifications"]) if not ok: sys.exit(1) diff --git a/examples/list_alerts.py b/examples/list_alerts.py index 95ff1d68..f9981fec 100755 --- a/examples/list_alerts.py +++ b/examples/list_alerts.py @@ -14,8 +14,8 @@ # json_dumpfilename = None if len(sys.argv) < 2 or len(sys.argv) > 3: - print(('usage: %s [json-dumpfile]' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s [json-dumpfile]" % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) elif len(sys.argv) == 3: json_dumpfilename = sys.argv[2] @@ -39,8 +39,8 @@ print(res) sys.exit(1) -for alert in res['alerts']: - print(('enabled: %s, name: %s' % (str(alert['enabled']), alert['name']))) +for alert in res["alerts"]: + print(("enabled: %s, name: %s" % (str(alert["enabled"]), alert["name"]))) if json_dumpfilename: with open(json_dumpfilename, "w") as f: diff --git a/examples/list_dashboards.py b/examples/list_dashboards.py index 0023bdce..8c74420e 100755 --- a/examples/list_dashboards.py +++ b/examples/list_dashboards.py @@ -11,8 +11,8 @@ # Parse arguments # if len(sys.argv) != 2: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -34,5 +34,10 @@ print(res) sys.exit(1) -for db in res['dashboards']: - print(("Name: %s, # Charts: %d" % (db['name'], len(db['widgets'] if 'widgets' in db else [])))) +for db in res["dashboards"]: + print( + ( + "Name: %s, # Charts: %d" + % (db["name"], len(db["widgets"] if "widgets" in db else [])) + ) + ) diff --git a/examples/list_events.py b/examples/list_events.py index 41517c8f..92e90ab6 100755 --- a/examples/list_events.py +++ b/examples/list_events.py @@ -9,19 +9,23 @@ def print_events(data): - for event in data['events']: - event['sev'] = event.get('severity', 'not set') - event['description'] = event.get('description', 'not set') - print(('id: %(id)s, time: %(timestamp)d, name: %(name)s, description: %(description)s, severity: %(sev)s' - % event)) + for event in data["events"]: + event["sev"] = event.get("severity", "not set") + event["description"] = event.get("description", "not set") + print( + ( + "id: %(id)s, time: %(timestamp)d, name: %(name)s, description: %(description)s, severity: %(sev)s" + % event + ) + ) # # Parse arguments # if len(sys.argv) != 2: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -45,8 +49,8 @@ def print_events(data): # # Get the events before other event # -if len(res['events']) > 0: - ok, res = sdclient.get_events(pivot=res['events'][-1]["id"]) +if len(res["events"]) > 0: + ok, res = sdclient.get_events(pivot=res["events"][-1]["id"]) else: ok, res = True, {"events": []} @@ -70,7 +74,7 @@ def print_events(data): # # Get the events that match a status # -ok, res = sdclient.get_events(status=['triggered', 'unacknowledged']) +ok, res = sdclient.get_events(status=["triggered", "unacknowledged"]) if ok: print_events(res) diff --git a/examples/list_notification_channels.py b/examples/list_notification_channels.py index 3025c230..9d5a9c45 100755 --- a/examples/list_notification_channels.py +++ b/examples/list_notification_channels.py @@ -12,8 +12,8 @@ # Parse arguments # if len(sys.argv) != 2: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -32,7 +32,7 @@ # Return the result # if ok: - print((json.dumps(res['notificationChannels'], indent=4))) + print((json.dumps(res["notificationChannels"], indent=4))) else: print(res) sys.exit(1) diff --git a/examples/list_policies.py b/examples/list_policies.py index 2146c93d..268efde2 100755 --- a/examples/list_policies.py +++ b/examples/list_policies.py @@ -10,8 +10,8 @@ def usage(): - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) @@ -26,7 +26,7 @@ def usage(): # # Instantiate the SDC client # -sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com') +sdclient = SdSecureClient(sdc_token, "https://secure.sysdig.com") ok, res = sdclient.list_policies() diff --git a/examples/list_policies_v1.py b/examples/list_policies_v1.py index 30758a7f..0795be0f 100755 --- a/examples/list_policies_v1.py +++ b/examples/list_policies_v1.py @@ -11,10 +11,12 @@ def usage(): - print(('usage: %s [-o|--order-only] ' % sys.argv[0])) - print('-o|--order-only: Only display the list of policy ids in evaluation order. ' - 'Suitable for use by set_policy_order.py') - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print(("usage: %s [-o|--order-only] " % sys.argv[0])) + print( + "-o|--order-only: Only display the list of policy ids in evaluation order. " + "Suitable for use by set_policy_order.py" + ) + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) @@ -39,7 +41,7 @@ def usage(): # # Instantiate the SDC client # -sdclient = SdSecureClientV1(sdc_token, 'https://secure.sysdig.com') +sdclient = SdSecureClientV1(sdc_token, "https://secure.sysdig.com") ok, res = sdclient.get_policy_priorities() @@ -48,13 +50,13 @@ def usage(): sys.exit(1) # Strip the surrounding json to only keep the list of policy ids -res = res['priorities']['policyIds'] +res = res["priorities"]["policyIds"] if not order_only: priorities = res ok, res = sdclient.list_policies() if ok: - res['policies'].sort(key=lambda p: priorities.index(p['id'])) + res["policies"].sort(key=lambda p: priorities.index(p["id"])) # # Return the result diff --git a/examples/list_profiles.py b/examples/list_profiles.py index 963e4db8..00be90e8 100755 --- a/examples/list_profiles.py +++ b/examples/list_profiles.py @@ -9,7 +9,7 @@ def usage(): - print(('usage: %s ' % sys.argv[0])) + print(("usage: %s " % sys.argv[0])) sys.exit(1) @@ -37,7 +37,7 @@ def usage(): sys.exit(1) # Strip the surrounding json to only keep the list of profiles -res = res['profiles'] +res = res["profiles"] for profile in res: print(("ID: {}, Name: {}".format(profile["profileId"], profile["imageName"]))) diff --git a/examples/list_sysdig_captures.py b/examples/list_sysdig_captures.py index fb72b4b0..7fbf5559 100755 --- a/examples/list_sysdig_captures.py +++ b/examples/list_sysdig_captures.py @@ -11,8 +11,8 @@ # Parse arguments # if len(sys.argv) != 2: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -31,11 +31,21 @@ # Show the list of metrics # if ok: - captures = res['dumps'] + captures = res["dumps"] else: print(res) sys.exit(1) for capture in captures: - print(("Folder %s, Name %s, Host: %s, Size: %d, Status: %s" % - (capture['folder'], capture['name'], capture['agent']['hostName'], capture['size'], capture['status']))) + print( + ( + "Folder %s, Name %s, Host: %s, Size: %d, Status: %s" + % ( + capture["folder"], + capture["name"], + capture["agent"]["hostName"], + capture["size"], + capture["status"], + ) + ) + ) diff --git a/examples/list_users.py b/examples/list_users.py index 642d7904..0876f43c 100755 --- a/examples/list_users.py +++ b/examples/list_users.py @@ -12,9 +12,9 @@ # Parse arguments # if len(sys.argv) != 2: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') - print('For this script to work, the user for the token must have Admin rights') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") + print("For this script to work, the user for the token must have Admin rights") sys.exit(1) sdc_token = sys.argv[1] @@ -22,16 +22,16 @@ # # Instantiate the SDC client # -sdclient = SdcClient(sdc_token, 'https://app.sysdigcloud.com') +sdclient = SdcClient(sdc_token, "https://app.sysdigcloud.com") # # Get the configuration # ok, res = sdclient.get_users() if ok: - print('Users\n=====') + print("Users\n=====") for user in res: - print((user['username'])) + print((user["username"])) else: print(res) sys.exit(1) diff --git a/examples/notification_channels.py b/examples/notification_channels.py index bf93eed8..0fbc90a0 100755 --- a/examples/notification_channels.py +++ b/examples/notification_channels.py @@ -13,9 +13,9 @@ # Parse arguments # def usage(): - print(('usage: %s [-c|--channel ] ' % sys.argv[0])) - print('-c|--channel: Set name of channel to create') - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s [-c|--channel ] " % sys.argv[0])) + print("-c|--channel: Set name of channel to create") + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) @@ -43,8 +43,9 @@ def usage(): # # Create an email notification channel # -ok, res = sdclient.create_email_notification_channel(channel_name, ['gianluca.borello@sysdig.com', 'foo@sysdig.com', - 'bar@sysdig.com']) +ok, res = sdclient.create_email_notification_channel( + channel_name, ["gianluca.borello@sysdig.com", "foo@sysdig.com", "bar@sysdig.com"] +) if not ok: print(res) sys.exit(1) @@ -52,7 +53,7 @@ def usage(): # # The notification channel will contain the id, that can be used when creating alerts # -channel = res['notificationChannel'] +channel = res["notificationChannel"] print(channel) # diff --git a/examples/post_event.py b/examples/post_event.py index e45d2e34..e5a71e13 100755 --- a/examples/post_event.py +++ b/examples/post_event.py @@ -15,15 +15,24 @@ # Usage: post_event.py [-h] [-d DESCRIPTION] [-s SEVERITY] [-c SCOPE] [-t TAGS] sysdig_token name # parser = argparse.ArgumentParser() -parser.add_argument('-d', '--description') -parser.add_argument('-s', '--severity', help='syslog style from 0 (high) to 7 (low)') -parser.add_argument('-c', '--scope', - help='metadata, in Sysdig Cloud format, of nodes to associate with the event, ' - 'eg: \'host.hostName = "ip-10-1-1-1" and container.name = "foo"\'') -parser.add_argument('-t', '--tags', - help='dictionary of arbitrary key-value pairs, eg: \'{"app":"my_app", "file":"text.py"}\'') -parser.add_argument('sysdig_token', help='You can find your token at https://app.sysdigcloud.com/#/settings/user') -parser.add_argument('name') +parser.add_argument("-d", "--description") +parser.add_argument("-s", "--severity", help="syslog style from 0 (high) to 7 (low)") +parser.add_argument( + "-c", + "--scope", + help="metadata, in Sysdig Cloud format, of nodes to associate with the event, " + 'eg: \'host.hostName = "ip-10-1-1-1" and container.name = "foo"\'', +) +parser.add_argument( + "-t", + "--tags", + help='dictionary of arbitrary key-value pairs, eg: \'{"app":"my_app", "file":"text.py"}\'', +) +parser.add_argument( + "sysdig_token", + help="You can find your token at https://app.sysdigcloud.com/#/settings/user", +) +parser.add_argument("name") args = parser.parse_args() tags = None @@ -38,13 +47,15 @@ # # Post the event using post_event(self, name, description=None, severity=None, event_filter=None, tags=None) # -ok, res = sdclient.post_event(args.name, args.description, args.severity, args.scope, tags) +ok, res = sdclient.post_event( + args.name, args.description, args.severity, args.scope, tags +) # # Return the result # if ok: - print('Event Posted Successfully') + print("Event Posted Successfully") else: print(res) sys.exit(1) diff --git a/examples/post_event_simple.py b/examples/post_event_simple.py index feeadd96..34328921 100755 --- a/examples/post_event_simple.py +++ b/examples/post_event_simple.py @@ -11,8 +11,8 @@ # Parse arguments # if len(sys.argv) < 4: - print(('usage: %s name description [severity]' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s name description [severity]" % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -40,7 +40,7 @@ # Return the result # if ok: - print('Event Posted Successfully') + print("Event Posted Successfully") else: print(res) sys.exit(1) diff --git a/examples/print_data_retention_info.py b/examples/print_data_retention_info.py index b0e97ff6..526e77c2 100755 --- a/examples/print_data_retention_info.py +++ b/examples/print_data_retention_info.py @@ -11,8 +11,8 @@ # Parse arguments # if len(sys.argv) != 2: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -34,4 +34,4 @@ print(res) sys.exit(1) -print(res['agents']) +print(res["agents"]) diff --git a/examples/print_explore_grouping.py b/examples/print_explore_grouping.py index 4ee28154..37d9fa7f 100755 --- a/examples/print_explore_grouping.py +++ b/examples/print_explore_grouping.py @@ -11,8 +11,8 @@ # Parse arguments # if len(sys.argv) != 2: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] diff --git a/examples/print_user_info.py b/examples/print_user_info.py index e7cbb302..ee709514 100755 --- a/examples/print_user_info.py +++ b/examples/print_user_info.py @@ -12,8 +12,8 @@ # Parse arguments # if len(sys.argv) != 2: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -45,6 +45,6 @@ print(res) sys.exit(1) -print(('User Email: ' + uinfo['user']['username'])) -print(('Current Agents: %d' % nagents)) -print(('Max Agents: %s' % uinfo['user']['customerSettings']['plan']['maxAgents'])) +print(("User Email: " + uinfo["user"]["username"])) +print(("Current Agents: %d" % nagents)) +print(("Max Agents: %s" % uinfo["user"]["customerSettings"]["plan"]["maxAgents"])) diff --git a/examples/resolve_alert_notifications.py b/examples/resolve_alert_notifications.py index 2aab3e0c..b8187acb 100755 --- a/examples/resolve_alert_notifications.py +++ b/examples/resolve_alert_notifications.py @@ -12,8 +12,8 @@ # Parse arguments # if len(sys.argv) != 3: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -27,8 +27,11 @@ # # Get the unresolved notifications in the last day # -ok, res = sdclient.get_notifications(from_ts=int(time.time() - int(num_days_to_resolve) * 86400), - to_ts=int(time.time()), resolved=False) +ok, res = sdclient.get_notifications( + from_ts=int(time.time() - int(num_days_to_resolve) * 86400), + to_ts=int(time.time()), + resolved=False, +) if not ok: print(res) @@ -37,7 +40,7 @@ # # Resolve them # -notifications = res['notifications'] +notifications = res["notifications"] print(("Resolving " + str(len(notifications)) + " notifications")) for notification in notifications: diff --git a/examples/restore_alerts.py b/examples/restore_alerts.py index a50966a7..b9d9becc 100755 --- a/examples/restore_alerts.py +++ b/examples/restore_alerts.py @@ -12,8 +12,8 @@ # Parse arguments # if len(sys.argv) != 3: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -34,8 +34,11 @@ existing_alerts = {} ok, res = sdclient.get_alerts() if ok: - for alert in res['alerts']: - existing_alerts[alert['name']] = {'id': alert['id'], 'version': alert['version']} + for alert in res["alerts"]: + existing_alerts[alert["name"]] = { + "id": alert["id"], + "version": alert["version"], + } else: print(res) sys.exit(1) @@ -57,27 +60,34 @@ created_count = 0 updated_count = 0 -with open(alerts_dump_file, 'r') as f: +with open(alerts_dump_file, "r") as f: j = json.load(f) - for a in j['alerts']: - if 'notificationChannelIds' in a: - for channel_id in a['notificationChannelIds']: + for a in j["alerts"]: + if "notificationChannelIds" in a: + for channel_id in a["notificationChannelIds"]: if channel_id not in existing_notification_channel_ids: - print(('Notification Channel ID ' + str(channel_id) + ' referenced in Alert "' + a[ - 'name'] + '" does not exist.\n Restoring without this ID.')) - a['notificationChannelIds'].remove(channel_id) + print( + ( + "Notification Channel ID " + + str(channel_id) + + ' referenced in Alert "' + + a["name"] + + '" does not exist.\n Restoring without this ID.' + ) + ) + a["notificationChannelIds"].remove(channel_id) # The Create/Update APIs will validate but actually ignore these fields; # to avoid problems, don't submit in the API request - for timefield in ['createdOn', 'modifiedOn']: + for timefield in ["createdOn", "modifiedOn"]: del a[timefield] # NOTE: when exporting alerts that contain deprecated metrics you will # need to remove them from the source json # (see https://sysdigdocs.atlassian.net/wiki/spaces/Monitor/pages/205684810/Metrics#Metrics-HeuristicandDeprecatedMetrics) - if a['name'] in existing_alerts: - a['id'] = existing_alerts[a['name']]['id'] - a['version'] = existing_alerts[a['name']]['version'] + if a["name"] in existing_alerts: + a["id"] = existing_alerts[a["name"]]["id"] + a["version"] = existing_alerts[a["name"]]["version"] ok, res = sdclient.update_alert(a) updated_count += 1 else: @@ -87,5 +97,7 @@ print(res) sys.exit(1) -print(f'All Alerts in {alerts_dump_file} restored successfully ' - f'({str(created_count)} created, {str(updated_count)} updated)') +print( + f"All Alerts in {alerts_dump_file} restored successfully " + f"({str(created_count)} created, {str(updated_count)} updated)" +) diff --git a/examples/restore_dashboards.py b/examples/restore_dashboards.py index c29720d2..b9331a16 100755 --- a/examples/restore_dashboards.py +++ b/examples/restore_dashboards.py @@ -13,8 +13,8 @@ # Parse arguments # if len(sys.argv) != 3: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -25,24 +25,29 @@ # sdclient = SdMonitorClient(sdc_token) -zipf = zipfile.ZipFile(dashboard_state_file, 'r') +zipf = zipfile.ZipFile(dashboard_state_file, "r") for info in zipf.infolist(): data = zipf.read(info.filename) try: j = json.loads(data) except ValueError: - print(('Invalid JSON file found in ZIP file ' + info.filename + ': skipping')) + print(("Invalid JSON file found in ZIP file " + info.filename + ": skipping")) continue # # Handle old files # - if 'dashboard' in j: - j = j['dashboard'] + if "dashboard" in j: + j = j["dashboard"] ok, res = sdclient.create_dashboard_with_configuration(j) if ok: - print(('Restored Dashboard named: ' + j['name'])) + print(("Restored Dashboard named: " + j["name"])) else: - print(("Dashboard creation failed for dashboard name %s with error %s" % (j['name'], res))) + print( + ( + "Dashboard creation failed for dashboard name %s with error %s" + % (j["name"], res) + ) + ) diff --git a/examples/set_agents_config.py b/examples/set_agents_config.py index 963053bd..08c5d3b5 100755 --- a/examples/set_agents_config.py +++ b/examples/set_agents_config.py @@ -17,8 +17,8 @@ # Parse arguments # if len(sys.argv) != 3: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -33,7 +33,7 @@ # # Instantiate the SDC client # -sdclient = SdcClient(sdc_token, 'https://app.sysdigcloud.com') +sdclient = SdcClient(sdc_token, "https://app.sysdigcloud.com") json = {"files": [{"filter": "*", "content": yaml_conf}]} @@ -46,6 +46,6 @@ # Check if everything went well # if ok: - print('configuration set successfully') + print("configuration set successfully") else: print(res) diff --git a/examples/set_explore_group_configuration.py b/examples/set_explore_group_configuration.py index 112b3fad..89e2eb90 100755 --- a/examples/set_explore_group_configuration.py +++ b/examples/set_explore_group_configuration.py @@ -11,8 +11,8 @@ # Parse arguments # if len(sys.argv) != 2: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -25,7 +25,7 @@ # # Fire the request, set the group configuration you need in the example below # -groupConfig = ['agent.tag.role', 'host.mac'] +groupConfig = ["agent.tag.role", "host.mac"] ok, res = sdclient.set_explore_grouping_hierarchy(groupConfig) # diff --git a/examples/set_policy_order_v1.py b/examples/set_policy_order_v1.py index e2b7d812..8ad4fef0 100755 --- a/examples/set_policy_order_v1.py +++ b/examples/set_policy_order_v1.py @@ -10,9 +10,9 @@ def usage(): - print(('usage: %s ' % sys.argv[0])) - print('Reads json representing new policy evaluation order from standard input') - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("Reads json representing new policy evaluation order from standard input") + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) @@ -34,7 +34,7 @@ def usage(): # # Instantiate the SDC client # -sdclient = SdSecureClientV1(sdc_token, 'https://secure.sysdig.com') +sdclient = SdSecureClientV1(sdc_token, "https://secure.sysdig.com") # # The argument to /api/policies/priorities is the list of ids wrapped @@ -49,7 +49,7 @@ def usage(): sys.exit(1) obj = res -obj['priorities']['policyIds'] = priorities_obj +obj["priorities"]["policyIds"] = priorities_obj ok, res = sdclient.set_policy_priorities(json.dumps(obj)) diff --git a/examples/set_secure_default_falco_rules_files.py b/examples/set_secure_default_falco_rules_files.py index a0b49a57..524f1cf1 100755 --- a/examples/set_secure_default_falco_rules_files.py +++ b/examples/set_secure_default_falco_rules_files.py @@ -21,17 +21,26 @@ # Parse arguments # def usage(): - print(('usage: %s [-l|--load ] [-t|--tag ] [-c|--content ] ' % sys.argv[0])) - print('-l|--load: load the files to set from a set of files below using load_default_rules_files().') - print('-t|--tag: Set a tag for the set of files') - print('-c|--content: the (single) file to set') - print('if --load is specified, neither --tag nor --content can be specified') - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print( + ( + "usage: %s [-l|--load ] [-t|--tag ] [-c|--content ] " + % sys.argv[0] + ) + ) + print( + "-l|--load: load the files to set from a set of files below using load_default_rules_files()." + ) + print("-t|--tag: Set a tag for the set of files") + print("-c|--content: the (single) file to set") + print("if --load is specified, neither --tag nor --content can be specified") + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) try: - opts, args = getopt.getopt(sys.argv[1:], "l:t:n:c:", ["load=", "tag=", "name=", "content="]) + opts, args = getopt.getopt( + sys.argv[1:], "l:t:n:c:", ["load=", "tag=", "name=", "content="] + ) except getopt.GetoptError: usage() @@ -59,7 +68,7 @@ def usage(): # # Instantiate the SDC client # -sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com') +sdclient = SdSecureClient(sdc_token, "https://secure.sysdig.com") files_obj = {} if load_dir != "": @@ -71,7 +80,7 @@ def usage(): print(res) sys.exit(1) else: - with open(cpath, 'r') as content_file: + with open(cpath, "r") as content_file: content = content_file.read() required_engine_version = 0 cyaml = yaml.safe_load(content) @@ -80,18 +89,25 @@ def usage(): try: required_engine_version = int(obj["required_engine_version"]) except ValueError: - print(("Required engine version \"{}\" in content {} must be a number".format( - obj["required_engine_version"], cpath))) + print( + ( + 'Required engine version "{}" in content {} must be a number'.format( + obj["required_engine_version"], cpath + ) + ) + ) sys.exit(1) files_obj = { "tag": tag, - "files": [{ - "name": os.path.basename(cpath), - "variants": { - "required_engine_version": required_engine_version, - "content": content + "files": [ + { + "name": os.path.basename(cpath), + "variants": { + "required_engine_version": required_engine_version, + "content": content, + }, } - }] + ], } ok, res = sdclient.set_default_falco_rules_files(files_obj) @@ -100,7 +116,7 @@ def usage(): # Return the result # if ok: - print('default falco rules files set successfully') + print("default falco rules files set successfully") else: print(res) sys.exit(1) diff --git a/examples/set_secure_system_falco_rules.py b/examples/set_secure_system_falco_rules.py index 6ae7b185..8fb5cc98 100755 --- a/examples/set_secure_system_falco_rules.py +++ b/examples/set_secure_system_falco_rules.py @@ -15,8 +15,8 @@ # Parse arguments # if len(sys.argv) != 3: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -29,7 +29,7 @@ # Verify that the content is valid yaml parsed_yaml_conf = yaml.safe_load(yaml_conf) -sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com') +sdclient = SdSecureClient(sdc_token, "https://secure.sysdig.com") # # Push the configuration @@ -40,7 +40,7 @@ # Check if everything went well # if ok: - print('system falco rules set successfully') + print("system falco rules set successfully") else: print(res) sys.exit(1) diff --git a/examples/set_secure_user_falco_rules.py b/examples/set_secure_user_falco_rules.py index ec790375..f48771c2 100755 --- a/examples/set_secure_user_falco_rules.py +++ b/examples/set_secure_user_falco_rules.py @@ -15,8 +15,8 @@ # Parse arguments # if len(sys.argv) != 3: - print(('usage: %s ' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -29,7 +29,7 @@ # Verify that the content is valid yaml parsed_yaml_conf = yaml.safe_load(yaml_conf) -sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com') +sdclient = SdSecureClient(sdc_token, "https://secure.sysdig.com") # # Push the configuration @@ -40,7 +40,7 @@ # Check if everything went well # if ok: - print('user falco rules set successfully') + print("user falco rules set successfully") else: print(res) sys.exit(1) diff --git a/examples/update_access_keys.py b/examples/update_access_keys.py index 27e9803d..c96c88c6 100755 --- a/examples/update_access_keys.py +++ b/examples/update_access_keys.py @@ -12,43 +12,48 @@ # Parse arguments # if len(sys.argv) != 2: - print('usage: %s ' % sys.argv[0]) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') - print('For this script to work, the user for the token must have Admin rights') + print("usage: %s " % sys.argv[0]) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") + print("For this script to work, the user for the token must have Admin rights") sys.exit(1) sdc_token = sys.argv[1] # Access Key that needs to be updated -accessKey = '' +accessKey = "" # Maximum number of agents allowed to connect for this access key. Set to '' if not required -agent_limit = '' +agent_limit = "" # Number of agent licenses that are ALWAYS available to this access key. This directly counts against the maximum number of available licenses. Set to '' if not required. -agent_reserved = '' +agent_reserved = "" # Team ID to which to assign the access key. Team ID must be valid. Set to '' if not required. -team_id = '' +team_id = "" # # Instantiate the SDC client # -sdclient = SdcClient(sdc_token, 'https://app.sysdigcloud.com') +sdclient = SdcClient(sdc_token, "https://app.sysdigcloud.com") # # Get the configuration # if accessKey: ok, res = sdclient.update_access_key( - accessKey, - agent_limit, - agent_reserved, - team_id) + accessKey, agent_limit, agent_reserved, team_id + ) else: - print('Please specify the Access Key that you would like to be updated') + print("Please specify the Access Key that you would like to be updated") sys.exit(1) if ok: - print('Access Key: {}\nTeam ID: {}\nAgent Limit: {}\nAgent Reserved: {}\n==========='.format(res['customerAccessKey']['accessKey'], res['customerAccessKey']['teamId'], res['customerAccessKey']['limit'], res['customerAccessKey']['reservation'])) + print( + "Access Key: {}\nTeam ID: {}\nAgent Limit: {}\nAgent Reserved: {}\n===========".format( + res["customerAccessKey"]["accessKey"], + res["customerAccessKey"]["teamId"], + res["customerAccessKey"]["limit"], + res["customerAccessKey"]["reservation"], + ) + ) else: print(res) sys.exit(1) diff --git a/examples/update_alert.py b/examples/update_alert.py index d134d7b8..dfa86387 100755 --- a/examples/update_alert.py +++ b/examples/update_alert.py @@ -16,9 +16,9 @@ # Parse arguments # def usage(): - print(('usage: %s [-a|--alert ] ' % sys.argv[0])) - print('-a|--alert: Set name of alert to update') - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s [-a|--alert ] " % sys.argv[0])) + print("-a|--alert: Set name of alert to update") + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) @@ -48,17 +48,21 @@ def usage(): sys.exit(1) alert_found = False -for alert in res['alerts']: - if alert['name'] == alert_name: +for alert in res["alerts"]: + if alert["name"] == alert_name: alert_found = True - print('Updating alert. Configuration before changing timespan, description, and notification channels:') + print( + "Updating alert. Configuration before changing timespan, description, and notification channels:" + ) print((json.dumps(alert, sort_keys=True, indent=4))) - if 'notificationChannelIds' in alert: - alert['notificationChannelIds'] = alert['notificationChannelIds'][0:-1] - update_txt = ' (changed by update_alert)' - if alert['description'][-len(update_txt):] != update_txt: - alert['description'] = alert['description'] + update_txt - alert['timespan'] = alert['timespan'] * 2 # Note: Expressed in seconds * 1000000 + if "notificationChannelIds" in alert: + alert["notificationChannelIds"] = alert["notificationChannelIds"][0:-1] + update_txt = " (changed by update_alert)" + if alert["description"][-len(update_txt) :] != update_txt: + alert["description"] = alert["description"] + update_txt + alert["timespan"] = ( + alert["timespan"] * 2 + ) # Note: Expressed in seconds * 1000000 ok, res_update = sdclient.update_alert(alert) if not ok: @@ -66,9 +70,9 @@ def usage(): sys.exit(1) # Validate and print the results - print('\nAlert after modification:') + print("\nAlert after modification:") print((json.dumps(res_update, sort_keys=True, indent=4))) if not alert_found: - print('Alert to be updated not found') + print("Alert to be updated not found") sys.exit(1) diff --git a/examples/update_policy.py b/examples/update_policy.py index f5b8d3ab..4f04ee00 100755 --- a/examples/update_policy.py +++ b/examples/update_policy.py @@ -10,9 +10,9 @@ def usage(): - print(('usage: %s ' % sys.argv[0])) - print('Reads json representing updated policy from standard input') - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("Reads json representing updated policy from standard input") + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) @@ -28,7 +28,7 @@ def usage(): # # Instantiate the SDC client # -sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com') +sdclient = SdSecureClient(sdc_token, "https://secure.sysdig.com") ok, res = sdclient.update_policy_json(policy_json) diff --git a/examples/update_policy_v1.py b/examples/update_policy_v1.py index d4eb056a..ebec1ef8 100755 --- a/examples/update_policy_v1.py +++ b/examples/update_policy_v1.py @@ -10,9 +10,9 @@ def usage(): - print(('usage: %s ' % sys.argv[0])) - print('Reads json representing updated policy from standard input') - print('You can find your token at https://secure.sysdig.com/#/settings/user') + print(("usage: %s " % sys.argv[0])) + print("Reads json representing updated policy from standard input") + print("You can find your token at https://secure.sysdig.com/#/settings/user") sys.exit(1) @@ -28,7 +28,7 @@ def usage(): # # Instantiate the SDC client # -sdclient = SdSecureClientV1(sdc_token, 'https://secure.sysdig.com') +sdclient = SdSecureClientV1(sdc_token, "https://secure.sysdig.com") ok, res = sdclient.update_policy(policy_json) diff --git a/examples/user_team_mgmt.py b/examples/user_team_mgmt.py index 214049d1..8eb37031 100755 --- a/examples/user_team_mgmt.py +++ b/examples/user_team_mgmt.py @@ -11,8 +11,8 @@ # Parse arguments # if len(sys.argv) != 4: - print(('usage: %s team-name user-name' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s team-name user-name" % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -20,21 +20,23 @@ # # Instantiate the SDC client # -sdclient = SdcClient(sdc_token, sdc_url='https://app.sysdigcloud.com') +sdclient = SdcClient(sdc_token, sdc_url="https://app.sysdigcloud.com") team_name = sys.argv[2] user_name = sys.argv[3] -print(('Trying to invite a user:', user_name)) +print(("Trying to invite a user:", user_name)) ok, res = sdclient.create_user_invite(user_name) if not ok: - if res == 'user ' + user_name + ' already exists': - print(('User creation failed because', user_name, 'already exists. Continuing.')) + if res == "user " + user_name + " already exists": + print( + ("User creation failed because", user_name, "already exists. Continuing.") + ) else: - print(('User creation failed:', res, '. Exiting.')) + print(("User creation failed:", res, ". Exiting.")) sys.exit(1) else: - print('User creation succeeded') + print("User creation succeeded") # Possible failures on Team creation might include having reached the # max limit on Teams for this customer account or if the Team by that @@ -42,48 +44,50 @@ # would have deleted the Team by the same name, and we need to be able # to configure Teams for this test to pass, we'll treat both types of # error as a genuine fail of the test. -print(('Now trying to create a team with name:', team_name)) +print(("Now trying to create a team with name:", team_name)) ok, res = sdclient.create_team(team_name) if not ok: - print(('Team creation failed:', res, '. Exiting.')) + print(("Team creation failed:", res, ". Exiting.")) sys.exit(1) else: - print(('Team creation succeeded.', res)) + print(("Team creation succeeded.", res)) -print(('Now trying to find team with name:', team_name)) +print(("Now trying to find team with name:", team_name)) ok, res = sdclient.get_team(team_name) if not ok: - print(('Could not get team info:', res, '. Exiting.')) + print(("Could not get team info:", res, ". Exiting.")) sys.exit(1) else: - print('Team fetch succeeded') + print("Team fetch succeeded") -print(('Now trying to edit team:', team_name)) +print(("Now trying to edit team:", team_name)) memberships = { - 'admin@draios.com': 'ROLE_TEAM_MANAGER', - 'john-doe@sysdig.com': 'ROLE_TEAM_READ' + "admin@draios.com": "ROLE_TEAM_MANAGER", + "john-doe@sysdig.com": "ROLE_TEAM_READ", } -ok, res = sdclient.edit_team(team_name, description='Nextgen2', memberships=memberships) +ok, res = sdclient.edit_team(team_name, description="Nextgen2", memberships=memberships) if not ok: - print(('Could not edit team:', res, '. Exiting.')) + print(("Could not edit team:", res, ". Exiting.")) sys.exit(1) else: - print('Edited team to change description and add users') + print("Edited team to change description and add users") -print(('Now trying to edit user:', user_name)) -ok, res = sdclient.edit_user(user_name, firstName='Just', lastName='Edited3', systemRole='ROLE_CUSTOMER') +print(("Now trying to edit user:", user_name)) +ok, res = sdclient.edit_user( + user_name, firstName="Just", lastName="Edited3", systemRole="ROLE_CUSTOMER" +) if not ok: - print(('Could not edit user:', res, '. Exiting.')) + print(("Could not edit user:", res, ". Exiting.")) sys.exit(1) else: - print('Edit user succeeded') + print("Edit user succeeded") -print(('Now trying to delete the team:', team_name)) +print(("Now trying to delete the team:", team_name)) ok, res = sdclient.delete_team(team_name) if not ok: - print(('Could not delete team:', res, '. Exiting.')) + print(("Could not delete team:", res, ". Exiting.")) sys.exit(1) else: - print('Delete team succeeded') + print("Delete team succeeded") sys.exit(0) diff --git a/examples/user_team_mgmt_extended.py b/examples/user_team_mgmt_extended.py index ff33a1f9..c2375507 100755 --- a/examples/user_team_mgmt_extended.py +++ b/examples/user_team_mgmt_extended.py @@ -11,8 +11,8 @@ # Parse arguments # if len(sys.argv) != 4: - print(('usage: %s team-prefix user-name' % sys.argv[0])) - print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + print(("usage: %s team-prefix user-name" % sys.argv[0])) + print("You can find your token at https://app.sysdigcloud.com/#/settings/user") sys.exit(1) sdc_token = sys.argv[1] @@ -20,11 +20,11 @@ # # Instantiate the SDC client # -sdclient = SdcClient(sdc_token, sdc_url='https://app.sysdigcloud.com') +sdclient = SdcClient(sdc_token, sdc_url="https://app.sysdigcloud.com") team_prefix = sys.argv[2] -user_email_parts = sys.argv[3].split('@') +user_email_parts = sys.argv[3].split("@") user_email_prefix = user_email_parts[0] user_email_domain = user_email_parts[1] @@ -34,36 +34,42 @@ # All users initially are part of default team. # -admin = user_email_prefix + '+team_mgmt-admin' + '@' + user_email_domain -userA = user_email_prefix + '+team_mgmt-a' + '@' + user_email_domain -userB = user_email_prefix + '+team_mgmt-b' + '@' + user_email_domain +admin = user_email_prefix + "+team_mgmt-admin" + "@" + user_email_domain +userA = user_email_prefix + "+team_mgmt-a" + "@" + user_email_domain +userB = user_email_prefix + "+team_mgmt-b" + "@" + user_email_domain -teamA = team_prefix + 'A' -teamB = team_prefix + 'B' +teamA = team_prefix + "A" +teamB = team_prefix + "B" -print('Creating test users...') +print("Creating test users...") try: - ok, res = sdclient.create_user_invite(admin, first_name='TestUser', last_name='Admin', system_role='ROLE_CUSTOMER') + ok, res = sdclient.create_user_invite( + admin, first_name="TestUser", last_name="Admin", system_role="ROLE_CUSTOMER" + ) if not ok: - print(('-- User creation failed:', res, '. Exiting.')) + print(("-- User creation failed:", res, ". Exiting.")) sys.exit(1) else: - print(('-- User \'', admin, '\' created successfully.')) + print(("-- User '", admin, "' created successfully.")) - ok, res = sdclient.create_user_invite(userA, first_name='TestUser', last_name='Alpha') + ok, res = sdclient.create_user_invite( + userA, first_name="TestUser", last_name="Alpha" + ) if not ok: - print(('-- User creation failed:', res, '. Exiting.')) + print(("-- User creation failed:", res, ". Exiting.")) sys.exit(1) else: - print(('-- User \'', userA, '\' created successfully.')) + print(("-- User '", userA, "' created successfully.")) - ok, res = sdclient.create_user_invite(userB, first_name='TestUser', last_name='Beta') + ok, res = sdclient.create_user_invite( + userB, first_name="TestUser", last_name="Beta" + ) if not ok: - print(('-- User creation failed:', res, '. Exiting.')) + print(("-- User creation failed:", res, ". Exiting.")) sys.exit(1) else: - print(('-- User \'', userB, '\' created successfully.')) + print(("-- User '", userB, "' created successfully.")) # # Create test teams @@ -76,21 +82,21 @@ # error as a genuine fail of the test. # - print('Creating test teams...') + print("Creating test teams...") ok, res = sdclient.create_team(teamA) if not ok: - print(('-- Team creation failed:', res, '. Exiting.')) + print(("-- Team creation failed:", res, ". Exiting.")) sys.exit(1) else: - print(('-- Team \'', teamA, '\' created successfully.')) + print(("-- Team '", teamA, "' created successfully.")) ok, res = sdclient.create_team(teamB) if not ok: - print(('-- Team creation failed:', res, '. Exiting.')) + print(("-- Team creation failed:", res, ". Exiting.")) sys.exit(1) else: - print(('-- Team \'', teamB, '\' created successfully.')) + print(("-- Team '", teamB, "' created successfully.")) # # Membership manipulation @@ -98,168 +104,298 @@ # Admins are part of all teams and their membership cannot be edited. # - print('Membership manipulation...') + print("Membership manipulation...") ok, res = sdclient.list_memberships(teamA) if not ok: - print(('-- Unable to fetch team memberships:', res, '. Exiting.')) + print(("-- Unable to fetch team memberships:", res, ". Exiting.")) sys.exit(1) elif admin not in list(res.keys()): - print(('-- Admin should be part of all teams!', 'Exiting.')) + print(("-- Admin should be part of all teams!", "Exiting.")) sys.exit(1) elif userA in list(res.keys()) or userB in list(res.keys()): - print(('-- Users ', userA, ' and ', userB, ' should not be part of team ', teamA, '!', 'Exiting.')) + print( + ( + "-- Users ", + userA, + " and ", + userB, + " should not be part of team ", + teamA, + "!", + "Exiting.", + ) + ) sys.exit(1) ok, res = sdclient.list_memberships(teamB) if not ok: - print(('-- Unable to fetch team memberships:', res, '. Exiting.')) + print(("-- Unable to fetch team memberships:", res, ". Exiting.")) sys.exit(1) elif admin not in list(res.keys()): - print(('-- Admin should be part of all teams!', 'Exiting.')) + print(("-- Admin should be part of all teams!", "Exiting.")) sys.exit(1) elif userA in list(res.keys()) or userB in list(res.keys()): - print(('-- Users ', userA, ' and ', userB, ' should not be part of team ', teamB, '!', 'Exiting.')) + print( + ( + "-- Users ", + userA, + " and ", + userB, + " should not be part of team ", + teamB, + "!", + "Exiting.", + ) + ) sys.exit(1) # # Create team memberships # - print('-- Create team memberships') + print("-- Create team memberships") # Manipulate with teamA - ok, res = sdclient.save_memberships(teamA, {userA: 'ROLE_TEAM_EDIT'}) + ok, res = sdclient.save_memberships(teamA, {userA: "ROLE_TEAM_EDIT"}) if not ok: - print(('-- Unable to add ', userA, ' to ', teamA, ' due to: ', res, '. Exiting.')) + print( + ("-- Unable to add ", userA, " to ", teamA, " due to: ", res, ". Exiting.") + ) sys.exit(1) ok, res = sdclient.list_memberships(teamA) if not ok: - print(('-- Unable to fetch team memberships:', res, '. Exiting.')) + print(("-- Unable to fetch team memberships:", res, ". Exiting.")) sys.exit(1) elif userA not in list(res.keys()) or admin not in list(res.keys()): - print(('-- Users ', userA, ' and ', admin, ' should be part of team ', teamA, '!', 'Exiting.')) + print( + ( + "-- Users ", + userA, + " and ", + admin, + " should be part of team ", + teamA, + "!", + "Exiting.", + ) + ) sys.exit(1) # Manipulate with teamB - ok, res = sdclient.save_memberships(teamB, {userA: 'ROLE_TEAM_MANAGER', userB: 'ROLE_TEAM_READ'}) + ok, res = sdclient.save_memberships( + teamB, {userA: "ROLE_TEAM_MANAGER", userB: "ROLE_TEAM_READ"} + ) if not ok: - print(('-- Unable to add ', userA, ' and ', userB, ' to ', teamB, ' due to: ', res, '. Exiting.')) + print( + ( + "-- Unable to add ", + userA, + " and ", + userB, + " to ", + teamB, + " due to: ", + res, + ". Exiting.", + ) + ) sys.exit(1) ok, res = sdclient.list_memberships(teamB) if not ok: - print(('-- Unable to fetch team memberships:', res, '. Exiting.')) + print(("-- Unable to fetch team memberships:", res, ". Exiting.")) sys.exit(1) - elif userA not in list(res.keys()) or userB not in list(res.keys()) or admin not in list(res.keys()): - print(('-- Users ', userA, ', ', userB, ' and ', admin, ' should be part of team ', teamB, '!', 'Exiting.')) + elif ( + userA not in list(res.keys()) + or userB not in list(res.keys()) + or admin not in list(res.keys()) + ): + print( + ( + "-- Users ", + userA, + ", ", + userB, + " and ", + admin, + " should be part of team ", + teamB, + "!", + "Exiting.", + ) + ) sys.exit(1) # Update team memberships - print('-- Update team memberships') + print("-- Update team memberships") # Add new or update existing memberships - ok, res = sdclient.save_memberships(teamA, {userA: 'ROLE_TEAM_READ', userB: 'ROLE_TEAM_EDIT'}) + ok, res = sdclient.save_memberships( + teamA, {userA: "ROLE_TEAM_READ", userB: "ROLE_TEAM_EDIT"} + ) if not ok: - print(('-- Unable to modify membership for ', userA, ' and to add ', userB, ' to ', teamA, ' due to: ', res, - '. Exiting.')) + print( + ( + "-- Unable to modify membership for ", + userA, + " and to add ", + userB, + " to ", + teamA, + " due to: ", + res, + ". Exiting.", + ) + ) sys.exit(1) ok, res = sdclient.list_memberships(teamA) if not ok: - print(('-- Unable to fetch team memberships:', res, '. Exiting.')) + print(("-- Unable to fetch team memberships:", res, ". Exiting.")) sys.exit(1) - elif userA not in list(res.keys()) or userB not in list(res.keys()) or admin not in list(res.keys()): - print(('-- Users ', userA, ', ', userB, ' and ', admin, ' should be part of team ', teamA, '!', 'Exiting.')) + elif ( + userA not in list(res.keys()) + or userB not in list(res.keys()) + or admin not in list(res.keys()) + ): + print( + ( + "-- Users ", + userA, + ", ", + userB, + " and ", + admin, + " should be part of team ", + teamA, + "!", + "Exiting.", + ) + ) sys.exit(1) - elif res[userA] != 'ROLE_TEAM_READ' or res[userB] != 'ROLE_TEAM_EDIT': - print(('-- Users ', userA, ' and ', userB, ' should have appropriate roles assigned for team ', teamA, '!', - 'Exiting.')) + elif res[userA] != "ROLE_TEAM_READ" or res[userB] != "ROLE_TEAM_EDIT": + print( + ( + "-- Users ", + userA, + " and ", + userB, + " should have appropriate roles assigned for team ", + teamA, + "!", + "Exiting.", + ) + ) sys.exit(1) # Remove team memberships - print('-- Remove team memberships') + print("-- Remove team memberships") ok, res = sdclient.remove_memberships(teamA, [userB]) if not ok: - print(('-- Unable to remove membership for ', userB, ' from team', teamA, ' due to: ', res, '. Exiting.')) + print( + ( + "-- Unable to remove membership for ", + userB, + " from team", + teamA, + " due to: ", + res, + ". Exiting.", + ) + ) sys.exit(1) ok, res = sdclient.list_memberships(teamA) if not ok: - print(('-- Unable to fetch team memberships:', res, '. Exiting.')) + print(("-- Unable to fetch team memberships:", res, ". Exiting.")) sys.exit(1) elif userB in list(res.keys()): - print(('-- User ', userB, ' should not be part of team ', teamA, '!', 'Exiting.')) + print( + ("-- User ", userB, " should not be part of team ", teamA, "!", "Exiting.") + ) sys.exit(1) # Admin user cannot be removed from any team ok, res = sdclient.remove_memberships(teamB, [admin, userA]) if not ok: - print(('-- Unable to remove membership for ', userB, ' from team', teamA, ' due to: ', res, '. Exiting.')) + print( + ( + "-- Unable to remove membership for ", + userB, + " from team", + teamA, + " due to: ", + res, + ". Exiting.", + ) + ) sys.exit(1) ok, res = sdclient.list_memberships(teamB) if not ok: - print(('-- Unable to fetch team memberships:', res, '. Exiting.')) + print(("-- Unable to fetch team memberships:", res, ". Exiting.")) sys.exit(1) elif userA in list(res.keys()): - print(('-- User ', userA, ' should not be part of team ', teamB, '!', 'Exiting.')) + print( + ("-- User ", userA, " should not be part of team ", teamB, "!", "Exiting.") + ) sys.exit(1) elif admin not in list(res.keys()): - print(('-- User ', admin, ' should be always part of all teams!', 'Exiting.')) + print(("-- User ", admin, " should be always part of all teams!", "Exiting.")) sys.exit(1) finally: # # Clean-up # - print('Cleaning up...') + print("Cleaning up...") - print('-- Deleting test teams.') + print("-- Deleting test teams.") try: ok, res = sdclient.delete_team(teamA) if not ok: - print(('-- Team \'', teamA, '\' deletion failed: ', res)) + print(("-- Team '", teamA, "' deletion failed: ", res)) except Exception as exception: - print(('-- Team \'', teamA, '\' deletion failed: ', exception)) + print(("-- Team '", teamA, "' deletion failed: ", exception)) try: ok, res = sdclient.delete_team(teamB) if not ok: - print(('-- Team \'', teamB, '\' deletion failed: ', res)) + print(("-- Team '", teamB, "' deletion failed: ", res)) except Exception as exception: - print(('-- Team \'', teamB, '\' deletion failed: ', exception)) + print(("-- Team '", teamB, "' deletion failed: ", exception)) - print('-- Deleting test users.') + print("-- Deleting test users.") try: ok, res = sdclient.delete_user(admin) if not ok: - print(('-- User \'', admin, '\' deletion failed: ', res)) + print(("-- User '", admin, "' deletion failed: ", res)) except Exception as exception: - print(('-- User \'', admin, '\' deletion failed: ', exception)) + print(("-- User '", admin, "' deletion failed: ", exception)) try: ok, res = sdclient.delete_user(userA) if not ok: - print(('-- User \'', userA, '\' deletion failed: ', res)) + print(("-- User '", userA, "' deletion failed: ", res)) except Exception as exception: - print(('-- User \'', userA, '\' deletion failed: ', exception)) + print(("-- User '", userA, "' deletion failed: ", exception)) try: ok, res = sdclient.delete_user(userB) if not ok: - print(('-- User \'', userB, '\' deletion failed: ', res)) + print(("-- User '", userB, "' deletion failed: ", res)) except Exception as exception: - print(('-- User \'', userB, '\' deletion failed: ', exception)) + print(("-- User '", userB, "' deletion failed: ", exception)) -print('All done successfully!!!') +print("All done successfully!!!") sys.exit(0) diff --git a/sdcclient/__init__.py b/sdcclient/__init__.py index b431fdf1..9cb47551 100644 --- a/sdcclient/__init__.py +++ b/sdcclient/__init__.py @@ -7,5 +7,14 @@ from sdcclient._secure_v1 import SdSecureClientV1 from sdcclient.ibm_auth_helper import IbmAuthHelper -__all__ = ["SdMonitorClient", "SdcClient", "SdMonitorClientV1", "SdScanningClient", "SdSecureClient", - "SdSecureClientV1", "IbmAuthHelper", "monitor", "secure"] +__all__ = [ + "SdMonitorClient", + "SdcClient", + "SdMonitorClientV1", + "SdScanningClient", + "SdSecureClient", + "SdSecureClientV1", + "IbmAuthHelper", + "monitor", + "secure", +] diff --git a/sdcclient/_common.py b/sdcclient/_common.py index 77510a1c..94faee70 100644 --- a/sdcclient/_common.py +++ b/sdcclient/_common.py @@ -28,7 +28,7 @@ def send(self, request, **kwargs): class _SdcCommon(object): - '''Interact with the Sysdig Monitor/Secure API. + """Interact with the Sysdig Monitor/Secure API. **Arguments** - **token**: A Sysdig Monitor/Secure API token from the *Sysdig Cloud API* section of the Settings page for `monitor `_ or .`secure `_. @@ -38,19 +38,26 @@ class _SdcCommon(object): **Returns** An object for further interactions with the Sysdig Monitor/Secure API. See methods below. - ''' + """ + lasterr = None - def __init__(self, token="", sdc_url='https://app.sysdigcloud.com', ssl_verify=True, custom_headers=None): + def __init__( + self, + token="", + sdc_url="https://app.sysdigcloud.com", + ssl_verify=True, + custom_headers=None, + ): self.token = os.environ.get("SDC_TOKEN", token) self.hdrs = self.__get_headers(custom_headers) - self.url = os.environ.get("SDC_URL", sdc_url).rstrip('/') + self.url = os.environ.get("SDC_URL", sdc_url).rstrip("/") self.ssl_verify = os.environ.get("SDC_SSL_VERIFY", None) if self.ssl_verify is None: self.ssl_verify = ssl_verify else: - if self.ssl_verify.lower() in ['true', 'false']: - self.ssl_verify = self.ssl_verify.lower() == 'true' + if self.ssl_verify.lower() in ["true", "false"]: + self.ssl_verify = self.ssl_verify.lower() == "true" adapter = SysdigHTTPAdapter(ssl_verify=self.ssl_verify) self.http = requests.Session() @@ -59,48 +66,50 @@ def __init__(self, token="", sdc_url='https://app.sysdigcloud.com', ssl_verify=T def __get_headers(self, custom_headers): headers = { - 'Content-Type': 'application/json', - 'Authorization': 'Bearer ' + self.token + "Content-Type": "application/json", + "Authorization": "Bearer " + self.token, } if custom_headers: headers.update(custom_headers) return headers def _checkResponse(self, res): - if res.status_code >= 300: # FIXME: Should it be >=400? 301 = Moved Permanently, 302 = Found, 303 = See Other + if ( + res.status_code >= 300 + ): # FIXME: Should it be >=400? 301 = Moved Permanently, 302 = Found, 303 = See Other errorcode = res.status_code self.lasterr = None try: j = res.json() except Exception: - self.lasterr = 'status code ' + str(errorcode) + self.lasterr = "status code " + str(errorcode) return False - if 'errors' in j: + if "errors" in j: error_msgs = [] - for error in j['errors']: + for error in j["errors"]: error_msg = [] - if 'message' in error: - error_msg.append(error['message']) + if "message" in error: + error_msg.append(error["message"]) - if 'reason' in error: - error_msg.append(error['reason']) + if "reason" in error: + error_msg.append(error["reason"]) - error_msgs.append(': '.join(error_msg)) + error_msgs.append(": ".join(error_msg)) - self.lasterr = '\n'.join(error_msgs) - elif 'message' in j: - self.lasterr = j['message'] - elif 'error' in j: - self.lasterr = j['error'] + self.lasterr = "\n".join(error_msgs) + elif "message" in j: + self.lasterr = j["message"] + elif "error" in j: + self.lasterr = j["error"] else: - self.lasterr = 'status code ' + str(errorcode) + self.lasterr = "status code " + str(errorcode) return False return True def get_user_info(self): - '''**Description** + """**Description** Get details about the current user. **Success Return Value** @@ -108,52 +117,64 @@ def get_user_info(self): **Example** `examples/print_user_info.py `_ - ''' - res = self.http.get(self.url + '/api/user/me', headers=self.hdrs, verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/user/me", headers=self.hdrs, verify=self.ssl_verify + ) return self._request_result(res) def get_user_token(self): - '''**Description** + """**Description** Return the API token of the current user. **Success Return Value** A string containing the user token. - ''' - res = self.http.get(self.url + '/api/token', headers=self.hdrs, verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/token", headers=self.hdrs, verify=self.ssl_verify + ) if not self._checkResponse(res): return [False, self.lasterr] tkinfo = res.json() - return [True, tkinfo['token']['key']] + return [True, tkinfo["token"]["key"]] def get_connected_agents(self): - '''**Description** + """**Description** Return the agents currently connected to Sysdig Monitor for the current user. **Success Return Value** A list of the agents with all their attributes. - ''' - res = self.http.get(self.url + '/api/agents/connected', headers=self.hdrs, verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/agents/connected", + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] data = res.json() - return [True, data['agents']] + return [True, data["agents"]] def get_n_connected_agents(self): - '''**Description** + """**Description** Return the number of agents currently connected to Sysdig Monitor for the current user. **Success Return Value** An integer number. - ''' - res = self.http.get(self.url + '/api/agents/connected', headers=self.hdrs, verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/agents/connected", + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] data = res.json() - return [True, data['total']] + return [True, data["total"]] def list_notification_channels(self): - '''**Description** + """**Description** List all configured Notification Channels **Arguments** @@ -161,12 +182,16 @@ def list_notification_channels(self): **Success Return Value** A JSON representation of all the notification channels - ''' - res = self.http.get(self.url + '/api/notificationChannels', headers=self.hdrs, verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/notificationChannels", + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def get_notification_ids(self, channels=None): - '''**Description** + """**Description** Get an array of all configured Notification Channel IDs, or a filtered subset of them. **Arguments** @@ -178,9 +203,13 @@ def get_notification_ids(self, channels=None): **Examples** - `examples/create_alert.py `_ - `examples/restore_alerts.py `_ - ''' + """ - res = self.http.get(self.url + '/api/notificationChannels', headers=self.hdrs, verify=self.ssl_verify) + res = self.http.get( + self.url + "/api/notificationChannels", + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return False, self.lasterr @@ -191,7 +220,7 @@ def get_notification_ids(self, channels=None): # just return them all. if channels is None: for ch in res.json()["notificationChannels"]: - ids.append(ch['id']) + ids.append(ch["id"]) return [True, ids] # Return the filtered set of channels based on the provided types/names array. @@ -199,47 +228,50 @@ def get_notification_ids(self, channels=None): for c in channels: found = False for ch in res.json()["notificationChannels"]: - if c['type'] == ch['type']: - if c['type'] == 'SNS': - opt = ch['options'] - if set(opt['snsTopicARNs']) == set(c['snsTopicARNs']): + if c["type"] == ch["type"]: + if c["type"] == "SNS": + opt = ch["options"] + if set(opt["snsTopicARNs"]) == set(c["snsTopicARNs"]): found = True - ids.append(ch['id']) - elif c['type'] == 'EMAIL': - opt = ch['options'] - if 'emailRecipients' in c: - if set(c['emailRecipients']) == set(opt['emailRecipients']): + ids.append(ch["id"]) + elif c["type"] == "EMAIL": + opt = ch["options"] + if "emailRecipients" in c: + if set(c["emailRecipients"]) == set(opt["emailRecipients"]): found = True - ids.append(ch['id']) - elif 'name' in c: - if c['name'] == ch.get('name'): + ids.append(ch["id"]) + elif "name" in c: + if c["name"] == ch.get("name"): found = True - ids.append(ch['id']) - elif c['type'] == 'PAGER_DUTY': - opt = ch['options'] - if opt['account'] == c['account'] and opt['serviceName'] == c['serviceName']: + ids.append(ch["id"]) + elif c["type"] == "PAGER_DUTY": + opt = ch["options"] + if ( + opt["account"] == c["account"] + and opt["serviceName"] == c["serviceName"] + ): found = True - ids.append(ch['id']) - elif c['type'] == 'SLACK': - if 'name' in c: - if c['name'] == ch.get('name'): + ids.append(ch["id"]) + elif c["type"] == "SLACK": + if "name" in c: + if c["name"] == ch.get("name"): found = True - ids.append(ch['id']) - elif c['type'] == 'OPSGENIE': - if 'name' in c: - if c['name'] == ch.get('name'): + ids.append(ch["id"]) + elif c["type"] == "OPSGENIE": + if "name" in c: + if c["name"] == ch.get("name"): found = True - ids.append(ch['id']) - elif c['type'] == 'VICTOROPS': - if 'name' in c: - if c['name'] == ch.get('name'): + ids.append(ch["id"]) + elif c["type"] == "VICTOROPS": + if "name" in c: + if c["name"] == ch.get("name"): found = True - ids.append(ch['id']) - elif c['type'] == 'WEBHOOK': - if 'name' in c: - if c['name'] == ch.get('name'): + ids.append(ch["id"]) + elif c["type"] == "WEBHOOK": + if "name" in c: + if c["name"] == ch.get("name"): found = True - ids.append(ch['id']) + ids.append(ch["id"]) if not found: return False, "Channel not found: " + str(c) @@ -247,18 +279,20 @@ def get_notification_ids(self, channels=None): def create_email_notification_channel(self, channel_name, email_recipients): channel_json = { - 'notificationChannel': { - 'type': 'EMAIL', - 'name': channel_name, - 'enabled': True, - 'options': { - 'emailRecipients': email_recipients - } + "notificationChannel": { + "type": "EMAIL", + "name": channel_name, + "enabled": True, + "options": {"emailRecipients": email_recipients}, } } - res = self.http.post(self.url + '/api/notificationChannels', headers=self.hdrs, data=json.dumps(channel_json), - verify=self.ssl_verify) + res = self.http.post( + self.url + "/api/notificationChannels", + headers=self.hdrs, + data=json.dumps(channel_json), + verify=self.ssl_verify, + ) return self._request_result(res) def create_notification_channel(self, channel): @@ -266,43 +300,54 @@ def create_notification_channel(self, channel): channel["version"] = None channel["createdOn"] = None channel["modifiedOn"] = None - channel_json = { - 'notificationChannel': channel - } + channel_json = {"notificationChannel": channel} - res = self.http.post(self.url + '/api/notificationChannels', headers=self.hdrs, data=json.dumps(channel_json), - verify=self.ssl_verify) + res = self.http.post( + self.url + "/api/notificationChannels", + headers=self.hdrs, + data=json.dumps(channel_json), + verify=self.ssl_verify, + ) return self._request_result(res) def get_notification_channel(self, id): - - res = self.http.get(self.url + '/api/notificationChannels/' + str(id), headers=self.hdrs, - verify=self.ssl_verify) + res = self.http.get( + self.url + "/api/notificationChannels/" + str(id), + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return False, self.lasterr - return True, res.json()['notificationChannel'] + return True, res.json()["notificationChannel"] def update_notification_channel(self, channel): - if 'id' not in channel: + if "id" not in channel: return [False, "Invalid channel format"] - res = self.http.put(self.url + '/api/notificationChannels/' + str(channel['id']), headers=self.hdrs, - data=json.dumps({"notificationChannel": channel}), verify=self.ssl_verify) + res = self.http.put( + self.url + "/api/notificationChannels/" + str(channel["id"]), + headers=self.hdrs, + data=json.dumps({"notificationChannel": channel}), + verify=self.ssl_verify, + ) return self._request_result(res) def delete_notification_channel(self, channel): - if 'id' not in channel: + if "id" not in channel: return [False, "Invalid channel format"] - res = self.http.delete(self.url + '/api/notificationChannels/' + str(channel['id']), headers=self.hdrs, - verify=self.ssl_verify) + res = self.http.delete( + self.url + "/api/notificationChannels/" + str(channel["id"]), + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return False, self.lasterr return True, None def get_data_retention_info(self): - '''**Description** + """**Description** Return the list of data retention intervals, with beginning and end UTC time for each of them. Sysdig Monitor performs rollups of the data it stores. This means that data is stored at different time granularities depending on how far back in time it is. This call can be used to know what precision you can expect before you make a call to :func:`~SdcClient.get_data`. **Success Return Value** @@ -310,12 +355,16 @@ def get_data_retention_info(self): **Example** `examples/print_data_retention_info.py `_ - ''' - res = self.http.get(self.url + '/api/history/timelines/', headers=self.hdrs, verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/history/timelines/", + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def get_data_promql(self, query, start, end, step, timeout=None, limit=None): - '''**Description** + """**Description** Evaluate an expression query over a specified time range. **Arguments** @@ -333,12 +382,12 @@ def get_data_promql(self, query, start, end, step, timeout=None, limit=None): **Examples** - `examples/get_data_promql_simple.py `_ - `examples/get_data_promql_advanced.py `_ - ''' + """ params = { - "query": query, - "start": start, - "end": end, - "step": step, + "query": query, + "start": start, + "end": end, + "step": step, } if timeout: @@ -351,7 +400,7 @@ def get_data_promql(self, query, start, end, step, timeout=None, limit=None): return self._request_result(res) def get_data_promql_instant(self, query, time=None, timeout=None, limit=None): - '''**Description** + """**Description** Evaluate an instant query at a single point in time. **Arguments** @@ -366,9 +415,9 @@ def get_data_promql_instant(self, query, time=None, timeout=None, limit=None): **Examples** - `examples/get_data_promql_instant_simple.py `_ - ''' + """ params = { - "query": query, + "query": query, } if time: @@ -383,7 +432,7 @@ def get_data_promql_instant(self, query, time=None, timeout=None, limit=None): return self._request_result(res) def get_series(self, match, start=None, end=None, limit=None): - '''**Description** + """**Description** Retrieve metadata about time series that match a set of label matchers. **Arguments** @@ -397,9 +446,9 @@ def get_series(self, match, start=None, end=None, limit=None): **Examples** - `examples/get_series.py` - ''' + """ params = { - "match[]": match, # `match` should be a list of matchers + "match[]": match, # `match` should be a list of matchers } if start: @@ -414,7 +463,7 @@ def get_series(self, match, start=None, end=None, limit=None): return self._request_result(res) def get_labels(self, match=None, limit=None): - '''**Description** + """**Description** Retrieve metadata about label names. **Arguments** @@ -426,7 +475,7 @@ def get_labels(self, match=None, limit=None): **Examples** - `examples/get_labels.py` - ''' + """ params = {} if match: @@ -439,7 +488,7 @@ def get_labels(self, match=None, limit=None): return self._request_result(res) def get_label_values(self, label_name, match=None, limit=None): - '''**Description** + """**Description** Retrieve the values for a specific label. **Arguments** @@ -452,7 +501,7 @@ def get_label_values(self, label_name, match=None, limit=None): **Examples** - `examples/get_label_values.py` - ''' + """ params = {} if match: @@ -465,7 +514,7 @@ def get_label_values(self, label_name, match=None, limit=None): return self._request_result(res) def get_metadata(self, metric_name=None, limit=None): - '''**Description** + """**Description** Retrieve metadata about metrics. **Arguments** @@ -477,7 +526,7 @@ def get_metadata(self, metric_name=None, limit=None): **Examples** - `examples/get_metadata.py` - ''' + """ params = {} if metric_name: @@ -490,7 +539,7 @@ def get_metadata(self, metric_name=None, limit=None): return self._request_result(res) def get_sysdig_captures(self, from_sec=None, to_sec=None, scope_filter=None): - '''**Description** + """**Description** Returns the list of sysdig captures for the user. **Arguments** @@ -503,18 +552,19 @@ def get_sysdig_captures(self, from_sec=None, to_sec=None, scope_filter=None): **Example** `examples/list_sysdig_captures.py `_ - ''' - url = '{url}/api/sysdig?source={source}{frm}{to}{scopeFilter}'.format( + """ + url = "{url}/api/sysdig?source={source}{frm}{to}{scopeFilter}".format( url=self.url, source=self.product, - frm="&from=%d" % (from_sec * 10 ** 6) if from_sec else "", - to="&to=%d" % (to_sec * 10 ** 6) if to_sec else "", - scopeFilter="&scopeFilter=%s" % scope_filter if scope_filter else "") + frm="&from=%d" % (from_sec * 10**6) if from_sec else "", + to="&to=%d" % (to_sec * 10**6) if to_sec else "", + scopeFilter="&scopeFilter=%s" % scope_filter if scope_filter else "", + ) res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) return self._request_result(res) def poll_sysdig_capture(self, capture): - '''**Description** + """**Description** Fetch the updated state of a sysdig capture. Can be used to poll the status of a capture that has been previously created and started with :func:`~SdcClient.create_sysdig_capture`. **Arguments** @@ -525,17 +575,20 @@ def poll_sysdig_capture(self, capture): **Example** `examples/create_sysdig_capture.py `_ - ''' - if 'id' not in capture: - return [False, 'Invalid capture format'] + """ + if "id" not in capture: + return [False, "Invalid capture format"] - url = '{url}/api/sysdig/{id}?source={source}'.format( - url=self.url, id=capture['id'], source=self.product) + url = "{url}/api/sysdig/{id}?source={source}".format( + url=self.url, id=capture["id"], source=self.product + ) res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) return self._request_result(res) - def create_sysdig_capture(self, hostname, capture_name, duration, capture_filter='', folder='/'): - '''**Description** + def create_sysdig_capture( + self, hostname, capture_name, duration, capture_filter="", folder="/" + ): + """**Description** Create a new sysdig capture. The capture will be immediately started. **Arguments** @@ -550,7 +603,7 @@ def create_sysdig_capture(self, hostname, capture_name, duration, capture_filter **Example** `examples/create_sysdig_capture.py `_ - ''' + """ res = self.get_connected_agents() if not res[0]: return res @@ -558,28 +611,33 @@ def create_sysdig_capture(self, hostname, capture_name, duration, capture_filter capture_agent = None for agent in res[1]: - if hostname == agent.get('hostName'): + if hostname == agent.get("hostName"): capture_agent = agent break if capture_agent is None: - return [False, hostname + ' not found'] + return [False, hostname + " not found"] data = { - 'agent': capture_agent, - 'name': capture_name, - 'duration': duration, - 'folder': folder, - 'filters': capture_filter, - 'bucketName': '', - 'source': self.product + "agent": capture_agent, + "name": capture_name, + "duration": duration, + "folder": folder, + "filters": capture_filter, + "bucketName": "", + "source": self.product, } - res = self.http.post(self.url + '/api/sysdig', headers=self.hdrs, data=json.dumps(data), verify=self.ssl_verify) + res = self.http.post( + self.url + "/api/sysdig", + headers=self.hdrs, + data=json.dumps(data), + verify=self.ssl_verify, + ) return self._request_result(res) def download_sysdig_capture(self, capture_id): - '''**Description** + """**Description** Download a sysdig capture by id. **Arguments** @@ -587,9 +645,10 @@ def download_sysdig_capture(self, capture_id): **Success Return Value** The bytes of the scap - ''' - url = '{url}/api/sysdig/{id}/download?_product={product}'.format( - url=self.url, id=capture_id, product=self.product) + """ + url = "{url}/api/sysdig/{id}/download?_product={product}".format( + url=self.url, id=capture_id, product=self.product + ) res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): return False, self.lasterr @@ -606,7 +665,11 @@ def delete_sysdig_capture(self, capture_id): Returns: A touple of (bool, error) where the first value is false if there's an error and the second value is the error. """ - res = self.http.delete(f'{self.url}/api/sysdig/{capture_id}', headers=self.hdrs, verify=self.ssl_verify) + res = self.http.delete( + f"{self.url}/api/sysdig/{capture_id}", + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return False, self.lasterr @@ -614,7 +677,7 @@ def delete_sysdig_capture(self, capture_id): return True, None def create_user(self, user_email, first_name=None, last_name=None, password=None): - ''' + """ Provisions a new user to use Sysdig without sending an email notification. If password is not set through this request a random one is generated for the user which requires them to reset password on first login. @@ -628,7 +691,7 @@ def create_user(self, user_email, first_name=None, last_name=None, password=None Returns: The provisioned user information. - ''' + """ user_info = { "username": user_email, @@ -638,12 +701,18 @@ def create_user(self, user_email, first_name=None, last_name=None, password=None } user_info = {k: v for k, v in user_info.items() if v} - res = self.http.post(self.url + '/api/user/provisioning/', headers=self.hdrs, data=json.dumps(user_info), - verify=self.ssl_verify) + res = self.http.post( + self.url + "/api/user/provisioning/", + headers=self.hdrs, + data=json.dumps(user_info), + verify=self.ssl_verify, + ) return self._request_result(res) - def create_user_invite(self, user_email, first_name=None, last_name=None, system_role=None): - '''**Description** + def create_user_invite( + self, user_email, first_name=None, last_name=None, system_role=None + ): + """**Description** Invites a new user to use Sysdig Monitor. This should result in an email notification to the specified address. **Arguments** @@ -659,29 +728,37 @@ def create_user_invite(self, user_email, first_name=None, last_name=None, system - `examples/user_team_mgmt.py `_ - `examples/user_team_mgmt_extended.py `_ - ''' + """ # Look up the list of users to see if this exists, do not create if one exists - res = self.http.get(self.url + '/api/users', headers=self.hdrs, verify=self.ssl_verify) + res = self.http.get( + self.url + "/api/users", headers=self.hdrs, verify=self.ssl_verify + ) if not self._checkResponse(res): return [False, self.lasterr] data = res.json() - for user in data['users']: - if user['username'] == user_email: - return [False, 'user ' + user_email + ' already exists'] + for user in data["users"]: + if user["username"] == user_email: + return [False, "user " + user_email + " already exists"] # Create the user - options = {'username': user_email, - 'firstName': first_name, - 'lastName': last_name, - 'systemRole': system_role} + options = { + "username": user_email, + "firstName": first_name, + "lastName": last_name, + "systemRole": system_role, + } user_json = {k: v for k, v in options.items() if v is not None} - res = self.http.post(self.url + '/api/users', headers=self.hdrs, data=json.dumps(user_json), - verify=self.ssl_verify) + res = self.http.post( + self.url + "/api/users", + headers=self.hdrs, + data=json.dumps(user_json), + verify=self.ssl_verify, + ) return self._request_result(res) def delete_user(self, user_email): - '''**Description** + """**Description** Deletes a user from Sysdig Monitor. **Arguments** @@ -689,36 +766,44 @@ def delete_user(self, user_email): **Example** `examples/user_team_mgmt.py `_ - ''' + """ ok, res = self.get_user_ids([user_email]) if not ok: return ok, res userid = res[0] - res = self.http.delete(self.url + '/api/users/' + str(userid), headers=self.hdrs, verify=self.ssl_verify) + res = self.http.delete( + self.url + "/api/users/" + str(userid), + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] return [True, None] def get_user(self, user_email): - res = self.http.get(self.url + '/api/users', headers=self.hdrs, verify=self.ssl_verify) + res = self.http.get( + self.url + "/api/users", headers=self.hdrs, verify=self.ssl_verify + ) if not self._checkResponse(res): return [False, self.lasterr] - for u in res.json()['users']: - if u['username'] == user_email: + for u in res.json()["users"]: + if u["username"] == user_email: return [True, u] - return [False, 'User not found'] + return [False, "User not found"] def get_users(self): - '''**Description** + """**Description** Return a list containing details about all users in the Sysdig Monitor environment. The API token must have Admin rights for this to succeed. **Success Return Value** A list user objects - ''' - res = self.http.get(self.url + '/api/users', headers=self.hdrs, verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/users", headers=self.hdrs, verify=self.ssl_verify + ) if not self._checkResponse(res): return [False, self.lasterr] - return [True, res.json()['users']] + return [True, res.json()["users"]] def edit_user(self, user_email, firstName=None, lastName=None, systemRole=None): ok, user = self.get_user(user_email) @@ -726,30 +811,38 @@ def edit_user(self, user_email, firstName=None, lastName=None, systemRole=None): return ok, user reqbody = { - 'systemRole': systemRole if systemRole else user['systemRole'], - 'username': user_email, - 'enabled': user.get('enabled', False), - 'version': user['version'] + "systemRole": systemRole if systemRole else user["systemRole"], + "username": user_email, + "enabled": user.get("enabled", False), + "version": user["version"], } if firstName is None: - reqbody['firstName'] = user['firstName'] if 'firstName' in list(user.keys()) else '' + reqbody["firstName"] = ( + user["firstName"] if "firstName" in list(user.keys()) else "" + ) else: - reqbody['firstName'] = firstName + reqbody["firstName"] = firstName if lastName is None: - reqbody['lastName'] = user['lastName'] if 'lastName' in list(user.keys()) else '' + reqbody["lastName"] = ( + user["lastName"] if "lastName" in list(user.keys()) else "" + ) else: - reqbody['lastName'] = lastName + reqbody["lastName"] = lastName - res = self.http.put(self.url + '/api/users/' + str(user['id']), headers=self.hdrs, data=json.dumps(reqbody), - verify=self.ssl_verify) + res = self.http.put( + self.url + "/api/users/" + str(user["id"]), + headers=self.hdrs, + data=json.dumps(reqbody), + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] - return [True, 'Successfully edited user'] + return [True, "Successfully edited user"] - def get_teams(self, team_filter='', product_filter=''): - '''**Description** + def get_teams(self, team_filter="", product_filter=""): + """**Description** Return the set of teams that match the filter specified. The *team_filter* should be a substring of the names of the teams to be returned. **Arguments** @@ -758,22 +851,22 @@ def get_teams(self, team_filter='', product_filter=''): **Success Return Value** The teams that match the filter. - ''' - url = f'{self.url}/api/teams' + """ + url = f"{self.url}/api/teams" if product_filter: - if product_filter not in ['SDC', 'SDS']: + if product_filter not in ["SDC", "SDS"]: return [False, 'invalid product header, allowed only "SDC" or "SDS"'] - url = f'{url}?product={product_filter}' + url = f"{url}?product={product_filter}" res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): return [False, self.lasterr] - ret = [t for t in res.json()['teams'] if team_filter in t['name']] + ret = [t for t in res.json()["teams"] if team_filter in t["name"]] return [True, ret] def get_team_by_id(self, id): - '''**Description** + """**Description** Return the team with the specified team ID, if it is present. **Arguments** @@ -784,15 +877,19 @@ def get_team_by_id(self, id): **Example** `examples/user_team_mgmt.py `_ - ''' - res = self.http.get(self.url + '/api/teams/' + str(id), headers=self.hdrs, verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/teams/" + str(id), + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] - return [True, res.json()['team']] + return [True, res.json()["team"]] def get_team(self, name): - '''**Description** + """**Description** Return the team with the specified team name, if it is present. **Arguments** @@ -803,14 +900,18 @@ def get_team(self, name): **Example** `examples/user_team_mgmt.py `_ - ''' - res = self.http.get(self.url + '/api/v2/teams/light/name/' + str(name), headers=self.hdrs, verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/v2/teams/light/name/" + str(name), + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] - light_team = res.json()['team'] + light_team = res.json()["team"] - ok, team_with_memberships = self.get_team_by_id(light_team['id']) + ok, team_with_memberships = self.get_team_by_id(light_team["id"]) if not ok: return [False, self.lasterr] @@ -818,25 +919,31 @@ def get_team(self, name): return [True, team_with_memberships] def get_team_ids(self, teams): - res = self.http.get(self.url + '/api/teams', headers=self.hdrs, verify=self.ssl_verify) + res = self.http.get( + self.url + "/api/teams", headers=self.hdrs, verify=self.ssl_verify + ) if not self._checkResponse(res): return [False, self.lasterr] - u = [x for x in res.json()['teams'] if x['name'] in teams] - return [True, [x['id'] for x in u]] + u = [x for x in res.json()["teams"] if x["name"] in teams] + return [True, [x["id"] for x in u]] def _get_user_id_dict(self, users): - res = self.http.get(self.url + '/api/users', headers=self.hdrs, verify=self.ssl_verify) + res = self.http.get( + self.url + "/api/users", headers=self.hdrs, verify=self.ssl_verify + ) if not self._checkResponse(res): return [False, self.lasterr] - u = [x for x in res.json()['users'] if x['username'] in users] - return [True, dict((user['username'], user['id']) for user in u)] + u = [x for x in res.json()["users"] if x["username"] in users] + return [True, dict((user["username"], user["id"]) for user in u)] def _get_id_user_dict(self, user_ids): - res = self.http.get(self.url + '/api/users', headers=self.hdrs, verify=self.ssl_verify) + res = self.http.get( + self.url + "/api/users", headers=self.hdrs, verify=self.ssl_verify + ) if not self._checkResponse(res): return [False, self.lasterr] - u = [x for x in res.json()['users'] if x['id'] in user_ids] - return [True, dict((user['id'], user['username']) for user in u)] + u = [x for x in res.json()["users"] if x["id"] in user_ids] + return [True, dict((user["id"], user["username"]) for user in u)] def get_user_ids(self, users): ok, res = self._get_user_id_dict(users) @@ -845,9 +952,20 @@ def get_user_ids(self, users): else: return [True, list(res.values())] - def create_team(self, name, memberships=None, filter='', description='', show='host', theme='#7BB0B2', - perm_capture=False, perm_custom_events=False, perm_aws_data=False, perm_rapid_response=False): - ''' + def create_team( + self, + name, + memberships=None, + filter="", + description="", + show="host", + theme="#7BB0B2", + perm_capture=False, + perm_custom_events=False, + perm_aws_data=False, + perm_rapid_response=False, + ): + """ **Description** Creates a new team @@ -868,43 +986,55 @@ def create_team(self, name, memberships=None, filter='', description='', show='h **Example** `examples/user_team_mgmt.py `_ - ''' + """ reqbody = { - 'name': name, - 'description': description, - 'theme': theme, - 'show': show, - 'canUseSysdigCapture': perm_capture, - 'canUseCustomEvents': perm_custom_events, - 'canUseAwsMetrics': perm_aws_data, - 'canUseRapidResponse': perm_rapid_response, + "name": name, + "description": description, + "theme": theme, + "show": show, + "canUseSysdigCapture": perm_capture, + "canUseCustomEvents": perm_custom_events, + "canUseAwsMetrics": perm_aws_data, + "canUseRapidResponse": perm_rapid_response, } # Map user-names to IDs if memberships: ok, res = self._get_user_id_dict(list(memberships.keys())) if not ok: - return [False, 'Could not fetch IDs for user names'] - reqbody['userRoles'] = [ - { - 'userId': user_id, - 'role': memberships[user_name] - } + return [False, "Could not fetch IDs for user names"] + reqbody["userRoles"] = [ + {"userId": user_id, "role": memberships[user_name]} for (user_name, user_id) in res.items() ] else: - reqbody['users'] = [] + reqbody["users"] = [] - if filter != '': - reqbody['filter'] = filter + if filter != "": + reqbody["filter"] = filter - res = self.http.post(self.url + '/api/teams', headers=self.hdrs, data=json.dumps(reqbody), - verify=self.ssl_verify) + res = self.http.post( + self.url + "/api/teams", + headers=self.hdrs, + data=json.dumps(reqbody), + verify=self.ssl_verify, + ) return self._request_result(res) - def edit_team(self, name, memberships=None, filter=None, description=None, show=None, theme=None, - perm_capture=None, perm_custom_events=None, perm_aws_data=None, perm_rapid_response=False): - ''' + def edit_team( + self, + name, + memberships=None, + filter=None, + description=None, + show=None, + theme=None, + perm_capture=None, + perm_custom_events=None, + perm_aws_data=None, + perm_rapid_response=False, + ): + """ **Description** Edits an existing team. All arguments are optional. Team settings for any arguments unspecified will remain at their current settings. @@ -925,60 +1055,67 @@ def edit_team(self, name, memberships=None, filter=None, description=None, show= **Example** `examples/user_team_mgmt.py `_ - ''' + """ ok, team = self.get_team(name) if not ok: return ok, team reqbody = { - 'name': name, - 'theme': theme if theme else team['theme'], - 'show': show if show else team['show'], - 'canUseSysdigCapture': perm_capture if perm_capture else team['canUseSysdigCapture'], - 'canUseCustomEvents': perm_custom_events if perm_custom_events else team['canUseCustomEvents'], - 'canUseAwsMetrics': perm_aws_data if perm_aws_data else team['canUseAwsMetrics'], - 'canUseRapidResponse': perm_rapid_response, - 'defaultTeamRole': team['defaultTeamRole'], - 'entryPoint': team['entryPoint'], - 'id': team['id'], - 'version': team['version'] + "name": name, + "theme": theme if theme else team["theme"], + "show": show if show else team["show"], + "canUseSysdigCapture": perm_capture + if perm_capture + else team["canUseSysdigCapture"], + "canUseCustomEvents": perm_custom_events + if perm_custom_events + else team["canUseCustomEvents"], + "canUseAwsMetrics": perm_aws_data + if perm_aws_data + else team["canUseAwsMetrics"], + "canUseRapidResponse": perm_rapid_response, + "defaultTeamRole": team["defaultTeamRole"], + "entryPoint": team["entryPoint"], + "id": team["id"], + "version": team["version"], } # Handling team description if description is not None: - reqbody['description'] = description - elif 'description' in list(team.keys()): - reqbody['description'] = team['description'] + reqbody["description"] = description + elif "description" in list(team.keys()): + reqbody["description"] = team["description"] # Handling for users to map (user-name, team-role) pairs to memberships if memberships is not None: ok, res = self._get_user_id_dict(list(memberships.keys())) if not res: - return [False, 'Could not convert user names to IDs'] - reqbody['userRoles'] = [ - { - 'userId': user_id, - 'role': memberships[user_name] - } + return [False, "Could not convert user names to IDs"] + reqbody["userRoles"] = [ + {"userId": user_id, "role": memberships[user_name]} for (user_name, user_id) in res.items() ] - elif 'userRoles' in list(team.keys()): - reqbody['userRoles'] = team['userRoles'] + elif "userRoles" in list(team.keys()): + reqbody["userRoles"] = team["userRoles"] else: - reqbody['userRoles'] = [] + reqbody["userRoles"] = [] # Special handling for filters since we don't support blank filters if filter is not None: - reqbody['filter'] = filter - elif 'filter' in list(team.keys()): - reqbody['filter'] = team['filter'] - - res = self.http.put(self.url + '/api/teams/' + str(team['id']), headers=self.hdrs, data=json.dumps(reqbody), - verify=self.ssl_verify) + reqbody["filter"] = filter + elif "filter" in list(team.keys()): + reqbody["filter"] = team["filter"] + + res = self.http.put( + self.url + "/api/teams/" + str(team["id"]), + headers=self.hdrs, + data=json.dumps(reqbody), + verify=self.ssl_verify, + ) return self._request_result(res) def delete_team(self, name): - '''**Description** + """**Description** Deletes a team from Sysdig Monitor. **Arguments** @@ -986,18 +1123,22 @@ def delete_team(self, name): **Example** `examples/user_team_mgmt.py `_ - ''' + """ ok, team = self.get_team(name) if not ok: return ok, team - res = self.http.delete(self.url + '/api/teams/' + str(team['id']), headers=self.hdrs, verify=self.ssl_verify) + res = self.http.delete( + self.url + "/api/teams/" + str(team["id"]), + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] return [True, None] def list_memberships(self, team): - ''' + """ **Description** List all memberships for specified team. @@ -1009,22 +1150,25 @@ def list_memberships(self, team): **Example** `examples/user_team_mgmt_extended.py `_ - ''' + """ ok, res = self.get_team(team) if not ok: return ok, res - raw_memberships = res['userRoles'] - user_ids = [m['userId'] for m in raw_memberships] + raw_memberships = res["userRoles"] + user_ids = [m["userId"] for m in raw_memberships] ok, res = self._get_id_user_dict(user_ids) if not ok: - return [False, 'Could not fetch IDs for user names'] + return [False, "Could not fetch IDs for user names"] else: - return [True, dict([(res[m['userId']], m['role']) for m in raw_memberships])] + return [ + True, + dict([(res[m["userId"]], m["role"]) for m in raw_memberships]), + ] def save_memberships(self, team, memberships): - ''' + """ **Description** Create new user team memberships or update existing ones. @@ -1034,7 +1178,7 @@ def save_memberships(self, team, memberships): **Example** `examples/user_team_mgmt_extended.py `_ - ''' + """ res = self.list_memberships(team) @@ -1052,7 +1196,7 @@ def save_memberships(self, team, memberships): return [True, None] def remove_memberships(self, team, users): - ''' + """ **Description** Remove user memberships from specified team. @@ -1062,7 +1206,7 @@ def remove_memberships(self, team, users): **Example** `examples/user_team_mgmt_extended.py `_ - ''' + """ res = self.list_memberships(team) @@ -1080,7 +1224,7 @@ def remove_memberships(self, team, users): return [True, None] def list_access_keys(self): - ''' + """ **Description** List all the access keys enabled and disabled for this instance of Sysdig Monitor/Secure @@ -1089,50 +1233,66 @@ def list_access_keys(self): **Example** `examples/list_access_keys.py `_ - ''' - res = self.http.get(self.url + '/api/customer/accessKeys', headers=self.hdrs, verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/customer/accessKeys", + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def create_access_key(self, agent_limit=None, agent_reserved=None, team_id=None): - ''' + """ **Description** Create a new access key for Sysdig Monitor/Secure **Reslut** The access keys object - ''' + """ access_key_payload = { "customerAccessKey": { "limit": agent_limit, "reservation": agent_reserved, - "teamId": team_id + "teamId": team_id, } } - res = self.http.post(self.url + '/api/customer/accessKeys', headers=self.hdrs, verify=self.ssl_verify, data=json.dumps(access_key_payload)) + res = self.http.post( + self.url + "/api/customer/accessKeys", + headers=self.hdrs, + verify=self.ssl_verify, + data=json.dumps(access_key_payload), + ) return self._request_result(res) - def update_access_key(self, access_key, agent_limit=None, agent_reserved=None, team_id=None): - ''' + def update_access_key( + self, access_key, agent_limit=None, agent_reserved=None, team_id=None + ): + """ **Description** Create a new access key for Sysdig Monitor/Secure **Reslut** The access keys object - ''' + """ access_key_payload = { "customerAccessKey": { "limit": agent_limit, "reservation": agent_reserved, - "teamId": team_id + "teamId": team_id, } } - res = self.http.put(self.url + '/api/customer/accessKeys/' + access_key, headers=self.hdrs, verify=self.ssl_verify, data=json.dumps(access_key_payload)) + res = self.http.put( + self.url + "/api/customer/accessKeys/" + access_key, + headers=self.hdrs, + verify=self.ssl_verify, + data=json.dumps(access_key_payload), + ) return self._request_result(res) def disable_access_key(self, access_key): - ''' + """ **Description** Disable an existing access key @@ -1141,13 +1301,16 @@ def disable_access_key(self, access_key): **Reslut** The access keys object - ''' - res = self.http.post(self.url + '/api/customer/accessKeys/' + access_key + "/disable/", headers=self.hdrs, - verify=self.ssl_verify) + """ + res = self.http.post( + self.url + "/api/customer/accessKeys/" + access_key + "/disable/", + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def enable_access_key(self, access_key): - ''' + """ **Description** Enable an existing access key @@ -1156,25 +1319,34 @@ def enable_access_key(self, access_key): **Reslut** The access keys object - ''' - res = self.http.post(self.url + '/api/customer/accessKeys/' + access_key + "/enable/", headers=self.hdrs, - verify=self.ssl_verify) + """ + res = self.http.post( + self.url + "/api/customer/accessKeys/" + access_key + "/enable/", + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def get_agents_config(self): - res = self.http.get(self.url + '/api/agents/config', headers=self.hdrs, verify=self.ssl_verify) + res = self.http.get( + self.url + "/api/agents/config", headers=self.hdrs, verify=self.ssl_verify + ) if not self._checkResponse(res): return [False, self.lasterr] data = res.json() return [True, data] def set_agents_config(self, config): - res = self.http.put(self.url + '/api/agents/config', headers=self.hdrs, data=json.dumps(config), - verify=self.ssl_verify) + res = self.http.put( + self.url + "/api/agents/config", + headers=self.hdrs, + data=json.dumps(config), + verify=self.ssl_verify, + ) return self._request_result(res) def clear_agents_config(self): - data = {'files': []} + data = {"files": []} return self.set_agents_config(data) def get_user_api_token(self, username, teamname): @@ -1182,12 +1354,15 @@ def get_user_api_token(self, username, teamname): if not ok: return ok, team - res = self.http.get(self.url + '/api/token/%s/%d' % (username, team['id']), headers=self.hdrs, - verify=self.ssl_verify) + res = self.http.get( + self.url + "/api/token/%s/%d" % (username, team["id"]), + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] data = res.json() - return [True, data['token']['key']] + return [True, data["token"]["key"]] def _request_result(self, res): if not self._checkResponse(res): diff --git a/sdcclient/_monitor.py b/sdcclient/_monitor.py index d4e1ddc7..62dcab2f 100644 --- a/sdcclient/_monitor.py +++ b/sdcclient/_monitor.py @@ -7,13 +7,20 @@ class SdMonitorClient(DashboardsClientV3, EventsClientV2, _SdcCommon): - - def __init__(self, token="", sdc_url='https://app.sysdigcloud.com', ssl_verify=True, custom_headers=None): - super(SdMonitorClient, self).__init__(token, sdc_url, ssl_verify, custom_headers) + def __init__( + self, + token="", + sdc_url="https://app.sysdigcloud.com", + ssl_verify=True, + custom_headers=None, + ): + super(SdMonitorClient, self).__init__( + token, sdc_url, ssl_verify, custom_headers + ) self.product = "SDC" def get_alerts(self) -> Union[Tuple[bool, str], Tuple[bool, Any]]: - ''' + """ Retrieve the list of alerts configured by the user. Returns: @@ -25,12 +32,16 @@ def get_alerts(self) -> Union[Tuple[bool, str], Tuple[bool, Any]]: >>> ok, res = client.get_alerts() >>> for alert in res['alerts']: >>> print(f'enabled: {str(alert["enabled"])}, name: {alert["name"]}' ) - ''' - res = self.http.get(self.url + '/api/alerts', headers=self.hdrs, verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/alerts", headers=self.hdrs, verify=self.ssl_verify + ) return self._request_result(res) - def get_notifications(self, from_ts, to_ts, state=None, resolved=None) -> Union[Tuple[bool, str], Tuple[bool, Any]]: - ''' + def get_notifications( + self, from_ts, to_ts, state=None, resolved=None + ) -> Union[Tuple[bool, str], Tuple[bool, Any]]: + """ Returns the list of Sysdig Monitor alert notifications. Args: @@ -53,28 +64,35 @@ def get_notifications(self, from_ts, to_ts, state=None, resolved=None) -> Union[ >>> ok, res = client.get_notifications(from_ts=int(time.time() - 86400), to_ts=int(time.time()), state='OK') >>> # Get the notifications in the last day and resolved state >>> ok, res = client.get_notifications(from_ts=int(time.time() - 86400), to_ts=int(time.time()), resolved=True) - ''' + """ params = {} if from_ts is not None: - params['from'] = from_ts * 1000000 + params["from"] = from_ts * 1000000 if to_ts is not None: - params['to'] = to_ts * 1000000 + params["to"] = to_ts * 1000000 if state is not None: - params['state'] = state + params["state"] = state if resolved is not None: - params['resolved'] = resolved - - res = self.http.get(self.url + '/api/notifications', headers=self.hdrs, params=params, verify=self.ssl_verify) + params["resolved"] = resolved + + res = self.http.get( + self.url + "/api/notifications", + headers=self.hdrs, + params=params, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return False, self.lasterr return True, res.json() - def update_notification_resolution(self, notification, resolved) -> Union[Tuple[bool, str], Tuple[bool, Any]]: - ''' + def update_notification_resolution( + self, notification, resolved + ) -> Union[Tuple[bool, str], Tuple[bool, Any]]: + """ Updates the resolution status of an alert notification. Args: @@ -92,21 +110,38 @@ def update_notification_resolution(self, notification, resolved) -> Union[Tuple[ >>> # Resolve all of them >>> for notification in notifications: >>> ok, res = sdclient.update_notification_resolution(notification, True) - ''' - if 'id' not in notification: - return False, 'Invalid notification format' - - notification['resolved'] = resolved - data = {'notification': notification} - - res = self.http.put(self.url + '/api/notifications/' + str(notification['id']), headers=self.hdrs, - data=json.dumps(data), verify=self.ssl_verify) + """ + if "id" not in notification: + return False, "Invalid notification format" + + notification["resolved"] = resolved + data = {"notification": notification} + + res = self.http.put( + self.url + "/api/notifications/" + str(notification["id"]), + headers=self.hdrs, + data=json.dumps(data), + verify=self.ssl_verify, + ) return self._request_result(res) - def create_alert(self, name=None, description=None, severity=None, for_atleast_s=None, condition=None, - segmentby=None, segment_condition='ANY', user_filter='', notify=None, enabled=True, - annotations=None, alert_obj=None, type="MANUAL") -> Union[Tuple[bool, str], Tuple[bool, Any]]: - ''' + def create_alert( + self, + name=None, + description=None, + severity=None, + for_atleast_s=None, + condition=None, + segmentby=None, + segment_condition="ANY", + user_filter="", + notify=None, + enabled=True, + annotations=None, + alert_obj=None, + type="MANUAL", + ) -> Union[Tuple[bool, str], Tuple[bool, Any]]: + """ Create a threshold-based alert. Args: @@ -128,7 +163,7 @@ def create_alert(self, name=None, description=None, severity=None, for_atleast_s A tuple where the first parameter indicates if the call was successful, and the second parameter holds either the error as string, or the response object. - ''' + """ if annotations is None: annotations = {} @@ -139,59 +174,68 @@ def create_alert(self, name=None, description=None, severity=None, for_atleast_s # # Get the list of alerts from the server # - res = self.http.get(self.url + '/api/alerts', headers=self.hdrs, verify=self.ssl_verify) + res = self.http.get( + self.url + "/api/alerts", headers=self.hdrs, verify=self.ssl_verify + ) if not self._checkResponse(res): return False, self.lasterr res.json() if alert_obj is None: if None in (name, description, severity, for_atleast_s, condition): - return False, 'Must specify a full Alert object or all parameters: ' \ - 'name, description, severity, for_atleast_s, condition' + return ( + False, + "Must specify a full Alert object or all parameters: " + "name, description, severity, for_atleast_s, condition", + ) else: # # Populate the alert information # alert_json = { - 'alert': { - 'type': type, - 'name': name, - 'description': description, - 'enabled': enabled, - 'severity': severity, - 'timespan': for_atleast_s * 1000000, - 'condition': condition, - 'filter': user_filter + "alert": { + "type": type, + "name": name, + "description": description, + "enabled": enabled, + "severity": severity, + "timespan": for_atleast_s * 1000000, + "condition": condition, + "filter": user_filter, } } if segmentby: - alert_json['alert']['segmentBy'] = segmentby - alert_json['alert']['segmentCondition'] = {'type': segment_condition} + alert_json["alert"]["segmentBy"] = segmentby + alert_json["alert"]["segmentCondition"] = { + "type": segment_condition + } if annotations: - alert_json['alert']['annotations'] = annotations + alert_json["alert"]["annotations"] = annotations if notify is not None: - alert_json['alert']['notificationChannelIds'] = notify + alert_json["alert"]["notificationChannelIds"] = notify else: # The REST API enforces "Alert ID and version must be null", so remove them if present, # since these would have been there in a dump from the list_alerts.py example. - alert_obj.pop('id', None) - alert_obj.pop('version', None) - alert_json = { - 'alert': alert_obj - } + alert_obj.pop("id", None) + alert_obj.pop("version", None) + alert_json = {"alert": alert_obj} # # Create the new alert # - res = self.http.post(self.url + '/api/alerts', headers=self.hdrs, data=json.dumps(alert_json), - verify=self.ssl_verify) + res = self.http.post( + self.url + "/api/alerts", + headers=self.hdrs, + data=json.dumps(alert_json), + verify=self.ssl_verify, + ) return self._request_result(res) def update_alert(self, alert) -> Union[Tuple[bool, str], Tuple[bool, Any]]: - ''' + """ Update a modified threshold-based alert. Args: @@ -209,17 +253,21 @@ def update_alert(self, alert) -> Union[Tuple[bool, str], Tuple[bool, Any]]: >>> if alert['name'] == alert_name: >>> alert['timespan'] = alert['timespan'] * 2 # Note: Expressed in seconds * 1000000 >>> ok, res_update = client.update_alert(alert) - ''' + """ - if 'id' not in alert: + if "id" not in alert: return False, "Invalid alert format" - res = self.http.put(self.url + '/api/alerts/' + str(alert['id']), headers=self.hdrs, - data=json.dumps({"alert": alert}), verify=self.ssl_verify) + res = self.http.put( + self.url + "/api/alerts/" + str(alert["id"]), + headers=self.hdrs, + data=json.dumps({"alert": alert}), + verify=self.ssl_verify, + ) return self._request_result(res) def delete_alert(self, alert) -> Union[Tuple[bool, str], Tuple[bool, Any]]: - '''**Description** + """**Description** Deletes an alert. **Arguments** @@ -230,18 +278,24 @@ def delete_alert(self, alert) -> Union[Tuple[bool, str], Tuple[bool, Any]]: **Example** `examples/delete_alert.py `_ - ''' - if 'id' not in alert: - return False, 'Invalid alert format' + """ + if "id" not in alert: + return False, "Invalid alert format" - res = self.http.delete(self.url + '/api/alerts/' + str(alert['id']), headers=self.hdrs, verify=self.ssl_verify) + res = self.http.delete( + self.url + "/api/alerts/" + str(alert["id"]), + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return False, self.lasterr return True, None - def get_explore_grouping_hierarchy(self) -> Union[Tuple[bool, str], Tuple[bool, Any]]: - '''**Description** + def get_explore_grouping_hierarchy( + self, + ) -> Union[Tuple[bool, str], Tuple[bool, Any]]: + """**Description** Return the user's current grouping hierarchy as visible in the Explore tab of Sysdig Monitor. **Success Return Value** @@ -249,57 +303,69 @@ def get_explore_grouping_hierarchy(self) -> Union[Tuple[bool, str], Tuple[bool, **Example** `examples/print_explore_grouping.py `_ - ''' - res = self.http.get(self.url + '/api/groupConfigurations', headers=self.hdrs, verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/groupConfigurations", + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return False, self.lasterr data = res.json() - if 'groupConfigurations' not in data: - return False, 'corrupted groupConfigurations API response' + if "groupConfigurations" not in data: + return False, "corrupted groupConfigurations API response" - gconfs = data['groupConfigurations'] + gconfs = data["groupConfigurations"] for gconf in gconfs: - if gconf['id'] == 'explore': + if gconf["id"] == "explore": res = [] - items = gconf['groups'][0]['groupBy'] + items = gconf["groups"][0]["groupBy"] for item in items: - res.append(item['metric']) + res.append(item["metric"]) return True, res - return False, 'corrupted groupConfigurations API response, missing "explore" entry' + return ( + False, + 'corrupted groupConfigurations API response, missing "explore" entry', + ) - def set_explore_grouping_hierarchy(self, new_hierarchy) -> Union[Tuple[bool, str], Tuple[bool, Any]]: - '''**Description** + def set_explore_grouping_hierarchy( + self, new_hierarchy + ) -> Union[Tuple[bool, str], Tuple[bool, Any]]: + """**Description** Changes the grouping hierarchy in the Explore panel of the current user. **Arguments** - **new_hierarchy**: a list of sysdig segmentation metrics indicating the new grouping hierarchy. - ''' - body = { - 'id': 'explore', - 'groups': [{'groupBy': []}] - } + """ + body = {"id": "explore", "groups": [{"groupBy": []}]} for item in new_hierarchy: - body['groups'][0]['groupBy'].append({'metric': item}) - - res = self.http.put(self.url + '/api/groupConfigurations/explore', headers=self.hdrs, - data=json.dumps(body), verify=self.ssl_verify) + body["groups"][0]["groupBy"].append({"metric": item}) + + res = self.http.put( + self.url + "/api/groupConfigurations/explore", + headers=self.hdrs, + data=json.dumps(body), + verify=self.ssl_verify, + ) if not self._checkResponse(res): return False, self.lasterr else: return True, None @staticmethod - def convert_scope_string_to_expression(scope) -> Union[Tuple[bool, str], Tuple[bool, Any]]: - '''**Description** - Internal function to convert a filter string to a filter object to be used with dashboards. - ''' + def convert_scope_string_to_expression( + scope, + ) -> Union[Tuple[bool, str], Tuple[bool, Any]]: + """**Description** + Internal function to convert a filter string to a filter object to be used with dashboards. + """ # # NOTE: The supported grammar is not perfectly aligned with the grammar supported by the Sysdig backend. # Proper grammar implementation will happen soon. @@ -310,47 +376,52 @@ def convert_scope_string_to_expression(scope) -> Union[Tuple[bool, str], Tuple[b return True, [] expressions = [] - string_expressions = scope.strip(' \t\n\r').split(' and ') + string_expressions = scope.strip(" \t\n\r").split(" and ") expression_re = re.compile( - '^(?Pnot )?(?P[^ ]+) (?P=|!=|in|contains|starts with) (?P(:?"[^"]+"|\'[^\']+\'|(.+)|.+))$') + "^(?Pnot )?(?P[^ ]+) (?P=|!=|in|contains|starts with) (?P(:?\"[^\"]+\"|'[^']+'|(.+)|.+))$" + ) for string_expression in string_expressions: matches = expression_re.match(string_expression) if matches is None: - return False, 'invalid scope format' + return False, "invalid scope format" - is_not_operator = matches.group('not') is not None + is_not_operator = matches.group("not") is not None - if matches.group('operator') == 'in': - list_value = matches.group('value').strip(' ()') - value_matches = re.findall('(:?\'[^\',]+\')|(:?"[^",]+")|(:?[,]+)', list_value) + if matches.group("operator") == "in": + list_value = matches.group("value").strip(" ()") + value_matches = re.findall( + "(:?'[^',]+')|(:?\"[^\",]+\")|(:?[,]+)", list_value + ) if len(value_matches) == 0: - return False, 'invalid scope value list format' + return False, "invalid scope value list format" value_matches = map(lambda v: v[0] if v[0] else v[1], value_matches) - values = map(lambda v: v.strip(' "\''), value_matches) + values = map(lambda v: v.strip(" \"'"), value_matches) else: - values = [matches.group('value').strip('"\'')] + values = [matches.group("value").strip("\"'")] operator_parse_dict = { - 'in': 'in' if not is_not_operator else 'notIn', - '=': 'equals' if not is_not_operator else 'notEquals', - '!=': 'notEquals' if not is_not_operator else 'equals', - 'contains': 'contains' if not is_not_operator else 'notContains', - 'starts with': 'startsWith' + "in": "in" if not is_not_operator else "notIn", + "=": "equals" if not is_not_operator else "notEquals", + "!=": "notEquals" if not is_not_operator else "equals", + "contains": "contains" if not is_not_operator else "notContains", + "starts with": "startsWith", } - operator = operator_parse_dict.get(matches.group('operator'), None) + operator = operator_parse_dict.get(matches.group("operator"), None) if operator is None: - return False, 'invalid scope operator' + return False, "invalid scope operator" - expressions.append({ - 'operand': matches.group('operand'), - 'operator': operator, - 'value': values - }) + expressions.append( + { + "operand": matches.group("operand"), + "operator": operator, + "value": values, + } + ) return True, expressions diff --git a/sdcclient/_monitor_v1.py b/sdcclient/_monitor_v1.py index a9d3387f..dd72bfb8 100644 --- a/sdcclient/_monitor_v1.py +++ b/sdcclient/_monitor_v1.py @@ -10,78 +10,98 @@ class SdMonitorClientV1(SdMonitorClient): - '''**Description** - Handles dashboards version 1 (ie. up to February 2019). For later Sysdig Monitor versions, please use :class:`~SdMonitorClient` instead. - ''' + """**Description** + Handles dashboards version 1 (ie. up to February 2019). For later Sysdig Monitor versions, please use :class:`~SdMonitorClient` instead. + """ - def __init__(self, token="", sdc_url='https://app.sysdigcloud.com', ssl_verify=True): + def __init__( + self, token="", sdc_url="https://app.sysdigcloud.com", ssl_verify=True + ): super(SdMonitorClientV1, self).__init__(token, sdc_url, ssl_verify) - self._dashboards_api_version = 'v1' - self._dashboards_api_endpoint = '/ui/dashboards' - self._default_dashboards_api_endpoint = '/api/defaultDashboards' - - def create_dashboard_from_template(self, dashboard_name, template, scope, shared=False, public=False, - annotations={}): + self._dashboards_api_version = "v1" + self._dashboards_api_endpoint = "/ui/dashboards" + self._default_dashboards_api_endpoint = "/api/defaultDashboards" + + def create_dashboard_from_template( + self, + dashboard_name, + template, + scope, + shared=False, + public=False, + annotations={}, + ): if scope is not None: if not isinstance(scope, basestring): - return [False, 'Invalid scope format: Expected a string'] + return [False, "Invalid scope format: Expected a string"] # # Clean up the dashboard we retireved so it's ready to be pushed # - template['id'] = None - template['version'] = None - template['schema'] = 1 - template['name'] = dashboard_name - template['isShared'] = shared - template['isPublic'] = public - template['publicToken'] = None + template["id"] = None + template["version"] = None + template["schema"] = 1 + template["name"] = dashboard_name + template["isShared"] = shared + template["isPublic"] = public + template["publicToken"] = None # set dashboard scope to the specific parameter ok, scope_expression = self.convert_scope_string_to_expression(scope) if not ok: return ok, scope_expression - template['filterExpression'] = scope - template['scopeExpressionList'] = map( - lambda ex: {'operand': ex['operand'], 'operator': ex['operator'], 'value': ex['value'], 'displayName': '', - 'isVariable': False}, scope_expression) - - if 'widgets' in template and template['widgets'] is not None: + template["filterExpression"] = scope + template["scopeExpressionList"] = map( + lambda ex: { + "operand": ex["operand"], + "operator": ex["operator"], + "value": ex["value"], + "displayName": "", + "isVariable": False, + }, + scope_expression, + ) + + if "widgets" in template and template["widgets"] is not None: # Default dashboards (aka Explore views) specify panels with the property `widgets`, # while custom dashboards use `items` - template['items'] = list(template['widgets']) - del template['widgets'] + template["items"] = list(template["widgets"]) + del template["widgets"] # NOTE: Individual panels might override the dashboard scope, the override will NOT be reset - if 'items' in template and template['items'] is not None: - for chart in template['items']: - if 'overrideFilter' not in chart: - chart['overrideFilter'] = False + if "items" in template and template["items"] is not None: + for chart in template["items"]: + if "overrideFilter" not in chart: + chart["overrideFilter"] = False - if not chart['overrideFilter']: + if not chart["overrideFilter"]: # patch frontend bug to hide scope override warning even when it's not really overridden - chart['scope'] = scope + chart["scope"] = scope # if chart scope is equal to dashboard scope, set it as non override - chart_scope = chart['scope'] if 'scope' in chart else None - chart['overrideFilter'] = chart_scope != scope + chart_scope = chart["scope"] if "scope" in chart else None + chart["overrideFilter"] = chart_scope != scope - if 'annotations' in template: - template['annotations'].update(annotations) + if "annotations" in template: + template["annotations"].update(annotations) else: - template['annotations'] = annotations + template["annotations"] = annotations - template['annotations']['createdByEngine'] = True + template["annotations"]["createdByEngine"] = True # # Create the new dashboard # - res = self.http.post(self.url + self._dashboards_api_endpoint, headers=self.hdrs, - data=json.dumps({'dashboard': template}), verify=self.ssl_verify) + res = self.http.post( + self.url + self._dashboards_api_endpoint, + headers=self.hdrs, + data=json.dumps({"dashboard": template}), + verify=self.ssl_verify, + ) return self._request_result(res) def create_dashboard(self, name): - ''' + """ **Description** Creates an empty dashboard. You can then add panels by using ``add_dashboard_panel``. @@ -93,23 +113,31 @@ def create_dashboard(self, name): **Example** `examples/dashboard.py `_ - ''' - dashboard_configuration = { - 'name': name, - 'schema': 2, - 'items': [] - } + """ + dashboard_configuration = {"name": name, "schema": 2, "items": []} # # Create the new dashboard # - res = self.http.post(self.url + self._dashboards_api_endpoint, headers=self.hdrs, - data=json.dumps({'dashboard': dashboard_configuration}), - verify=self.ssl_verify) + res = self.http.post( + self.url + self._dashboards_api_endpoint, + headers=self.hdrs, + data=json.dumps({"dashboard": dashboard_configuration}), + verify=self.ssl_verify, + ) return self._request_result(res) - def add_dashboard_panel(self, dashboard, name, panel_type, metrics, scope=None, sort_by=None, limit=None, - layout=None): + def add_dashboard_panel( + self, + dashboard, + name, + panel_type, + metrics, + scope=None, + sort_by=None, + limit=None, + layout=None, + ): """**Description** Adds a panel to the dashboard. A panel can be a time series, or a top chart (i.e. bar chart), or a number panel. @@ -133,19 +161,14 @@ def add_dashboard_panel(self, dashboard, name, panel_type, metrics, scope=None, `examples/dashboard.py `_ """ panel_configuration = { - 'name': name, - 'showAs': None, - 'showAsType': None, - 'metrics': [], - 'gridConfiguration': { - 'col': 1, - 'row': 1, - 'size_x': 12, - 'size_y': 6 - } + "name": name, + "showAs": None, + "showAsType": None, + "metrics": [], + "gridConfiguration": {"col": 1, "row": 1, "size_x": 12, "size_y": 6}, } - if panel_type == 'timeSeries': + if panel_type == "timeSeries": # # In case of a time series, the current dashboard implementation # requires the timestamp to be explicitly specified as "key". @@ -154,7 +177,7 @@ def add_dashboard_panel(self, dashboard, name, panel_type, metrics, scope=None, # specify time window and sampling) # metrics = copy.copy(metrics) - metrics.insert(0, {'id': 'timestamp'}) + metrics.insert(0, {"id": "timestamp"}) # # Convert list of metrics to format used by Sysdig Monitor @@ -163,97 +186,94 @@ def add_dashboard_panel(self, dashboard, name, panel_type, metrics, scope=None, k_count = 0 v_count = 0 for i, metric in enumerate(metrics): - property_name = 'v' if 'aggregations' in metric else 'k' + property_name = "v" if "aggregations" in metric else "k" - if property_name == 'k': + if property_name == "k": i = k_count k_count += 1 else: i = v_count v_count += 1 - property_names[metric['id']] = property_name + str(i) - - panel_configuration['metrics'].append({ - 'metricId': metric['id'], - 'aggregation': metric['aggregations']['time'] if 'aggregations' in metric else None, - 'groupAggregation': metric['aggregations']['group'] if 'aggregations' in metric else None, - 'propertyName': property_name + str(i) - }) + property_names[metric["id"]] = property_name + str(i) + + panel_configuration["metrics"].append( + { + "metricId": metric["id"], + "aggregation": metric["aggregations"]["time"] + if "aggregations" in metric + else None, + "groupAggregation": metric["aggregations"]["group"] + if "aggregations" in metric + else None, + "propertyName": property_name + str(i), + } + ) - panel_configuration['scope'] = scope + panel_configuration["scope"] = scope # if chart scope is equal to dashboard scope, set it as non override - panel_configuration['overrideFilter'] = ('scope' in dashboard and dashboard['scope'] != scope) or \ - ('scope' not in dashboard and scope is not None) + panel_configuration["overrideFilter"] = ( + "scope" in dashboard and dashboard["scope"] != scope + ) or ("scope" not in dashboard and scope is not None) # # Configure panel type # - if panel_type == 'timeSeries': - panel_configuration['showAs'] = 'timeSeries' - panel_configuration['showAsType'] = 'line' + if panel_type == "timeSeries": + panel_configuration["showAs"] = "timeSeries" + panel_configuration["showAsType"] = "line" if limit is not None: - panel_configuration['paging'] = { - 'from': 0, - 'to': limit - 1 - } + panel_configuration["paging"] = {"from": 0, "to": limit - 1} - elif panel_type == 'number': - panel_configuration['showAs'] = 'summary' - panel_configuration['showAsType'] = 'summary' - elif panel_type == 'top': - panel_configuration['showAs'] = 'top' - panel_configuration['showAsType'] = 'bars' + elif panel_type == "number": + panel_configuration["showAs"] = "summary" + panel_configuration["showAsType"] = "summary" + elif panel_type == "top": + panel_configuration["showAs"] = "top" + panel_configuration["showAsType"] = "bars" if sort_by is None: - panel_configuration['sorting'] = [{ - 'id': 'v0', - 'mode': 'desc' - }] + panel_configuration["sorting"] = [{"id": "v0", "mode": "desc"}] else: - panel_configuration['sorting'] = [{ - 'id': property_names[sort_by['metric']], - 'mode': sort_by['mode'] - }] + panel_configuration["sorting"] = [ + {"id": property_names[sort_by["metric"]], "mode": sort_by["mode"]} + ] if limit is None: - panel_configuration['paging'] = { - 'from': 0, - 'to': 10 - } + panel_configuration["paging"] = {"from": 0, "to": 10} else: - panel_configuration['paging'] = { - 'from': 0, - 'to': limit - 1 - } + panel_configuration["paging"] = {"from": 0, "to": limit - 1} # # Configure layout # if layout is not None: - panel_configuration['gridConfiguration'] = layout + panel_configuration["gridConfiguration"] = layout # # Clone existing dashboard... # dashboard_configuration = copy.deepcopy(dashboard) - dashboard_configuration['id'] = None + dashboard_configuration["id"] = None # # ... and add the new panel # - dashboard_configuration['items'].append(panel_configuration) + dashboard_configuration["items"].append(panel_configuration) # # Update dashboard # - res = self.http.put(self.url + self._dashboards_api_endpoint + '/' + str(dashboard['id']), headers=self.hdrs, - data=json.dumps({'dashboard': dashboard_configuration}), - verify=self.ssl_verify) + res = self.http.put( + self.url + self._dashboards_api_endpoint + "/" + str(dashboard["id"]), + headers=self.hdrs, + data=json.dumps({"dashboard": dashboard_configuration}), + verify=self.ssl_verify, + ) return self._request_result(res) def remove_dashboard_panel(self, dashboard, panel_name): - '''**Description** + """**Description** Removes a panel from the dashboard. The panel to remove is identified by the specified ``name``. **Arguments** @@ -264,41 +284,44 @@ def remove_dashboard_panel(self, dashboard, panel_name): **Example** `examples/dashboard.py `_ - ''' + """ # # Clone existing dashboard... # dashboard_configuration = copy.deepcopy(dashboard) - dashboard_configuration['id'] = None + dashboard_configuration["id"] = None # # ... find the panel # def filter_fn(panel): - return panel['name'] == panel_name + return panel["name"] == panel_name - panels = list(filter(filter_fn, dashboard_configuration['items'])) + panels = list(filter(filter_fn, dashboard_configuration["items"])) if len(panels) > 0: # # ... and remove it # for panel in panels: - dashboard_configuration['items'].remove(panel) + dashboard_configuration["items"].remove(panel) # # Update dashboard # - res = self.http.put(self.url + self._dashboards_api_endpoint + '/' + str(dashboard['id']), - headers=self.hdrs, data=json.dumps({'dashboard': dashboard_configuration}), - verify=self.ssl_verify) + res = self.http.put( + self.url + self._dashboards_api_endpoint + "/" + str(dashboard["id"]), + headers=self.hdrs, + data=json.dumps({"dashboard": dashboard_configuration}), + verify=self.ssl_verify, + ) return self._request_result(res) else: - return [False, 'Not found'] + return [False, "Not found"] def _get_dashboard_converters(self): - '''**Description** - Internal function to return dashboard converters from one version to another one. - ''' + """**Description** + Internal function to return dashboard converters from one version to another one. + """ # There's not really a previous version... return {} diff --git a/sdcclient/_scanning.py b/sdcclient/_scanning.py index 24de165f..54931d51 100644 --- a/sdcclient/_scanning.py +++ b/sdcclient/_scanning.py @@ -19,12 +19,22 @@ class SdScanningClient(ScanningAlertsClientV1, _SdcCommon): - def __init__(self, token="", sdc_url='https://secure.sysdig.com', ssl_verify=True, custom_headers=None): - super(SdScanningClient, self).__init__(token, sdc_url, ssl_verify, custom_headers) + def __init__( + self, + token="", + sdc_url="https://secure.sysdig.com", + ssl_verify=True, + custom_headers=None, + ): + super(SdScanningClient, self).__init__( + token, sdc_url, ssl_verify, custom_headers + ) self.product = "SDS" - def add_image(self, image, force=False, dockerfile=None, annotations={}, autosubscribe=True): - '''**Description** + def add_image( + self, image, force=False, dockerfile=None, annotations={}, autosubscribe=True + ): + """**Description** Add an image to the scanner **Arguments** @@ -35,31 +45,36 @@ def add_image(self, image, force=False, dockerfile=None, annotations={}, autosub **Success Return Value** A JSON object representing the image that was added. - ''' + """ itype = self._discover_inputimage_format(image) - if itype != 'tag': + if itype != "tag": return [False, "can only add a tag"] payload = {} if dockerfile: - payload['dockerfile'] = base64.b64encode(dockerfile.encode()).decode("utf-8") - payload['tag'] = image + payload["dockerfile"] = base64.b64encode(dockerfile.encode()).decode( + "utf-8" + ) + payload["tag"] = image if annotations: - payload['annotations'] = annotations + payload["annotations"] = annotations url = "{base_url}/api/scanning/v1/anchore/images?autosubscribe={autosubscribe}{force}".format( - base_url=self.url, - autosubscribe=str(autosubscribe), - force="&force=true" if force else "") - - res = self.http.post(url, data=json.dumps(payload), headers=self.hdrs, verify=self.ssl_verify) + base_url=self.url, + autosubscribe=str(autosubscribe), + force="&force=true" if force else "", + ) + + res = self.http.post( + url, data=json.dumps(payload), headers=self.hdrs, verify=self.ssl_verify + ) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()] def get_image(self, image, show_history=False): - '''**Description** + """**Description** Find the image with the tag and return its json description **Arguments** @@ -67,30 +82,34 @@ def get_image(self, image, show_history=False): **Success Return Value** A JSON object representing the image. - ''' + """ itype = self._discover_inputimage_format(image) - if itype not in ['tag', 'imageid', 'imageDigest']: + if itype not in ["tag", "imageid", "imageDigest"]: return [False, "cannot use input image string: no discovered imageDigest"] params = {} - params['history'] = str(show_history and itype not in ['imageid', 'imageDigest']).lower() - if itype == 'tag': - params['fulltag'] = image + params["history"] = str( + show_history and itype not in ["imageid", "imageDigest"] + ).lower() + if itype == "tag": + params["fulltag"] = image url = self.url + "/api/scanning/v1/anchore/images" url += { - 'imageid': '/by_id/{}'.format(image), - 'imageDigest': '/{}'.format(image) - }.get(itype, '') + "imageid": "/by_id/{}".format(image), + "imageDigest": "/{}".format(image), + }.get(itype, "") - res = self.http.get(url, params=params, headers=self.hdrs, verify=self.ssl_verify) + res = self.http.get( + url, params=params, headers=self.hdrs, verify=self.ssl_verify + ) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()] def list_images(self): - '''**Description** + """**Description** List the current set of images in the scanner. **Arguments** @@ -98,7 +117,7 @@ def list_images(self): **Success Return Value** A JSON object containing all the images. - ''' + """ url = self.url + "/api/scanning/v1/anchore/images" res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): @@ -120,7 +139,7 @@ def list_image_tags(self): return [True, res.json()] def list_whitelisted_cves(self): - '''**Description** + """**Description** List the whitelisted global CVEs. **Arguments** @@ -132,10 +151,13 @@ def list_whitelisted_cves(self): **Deprecated** This method has been deprecated since the API has changed. Use the list_vulnerability_exception_bundles and get_vulnerability_exception_bundle methods. - ''' - warn("list_whitelisted_cves has been deprecated and doesn't work properly, please use the " - "list_vulnerability_exception_bundles and get_vulnerability_exception_bundle methods", - DeprecationWarning, 3) + """ + warn( + "list_whitelisted_cves has been deprecated and doesn't work properly, please use the " + "list_vulnerability_exception_bundles and get_vulnerability_exception_bundle methods", + DeprecationWarning, + 3, + ) url = self.url + "/api/scanning/v1/whitelists/global?bundle=default" res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): @@ -144,7 +166,7 @@ def list_whitelisted_cves(self): return [True, res.json()] def query_image_content(self, image, content_type=""): - '''**Description** + """**Description** Find the image with the tag and return its content. **Arguments** @@ -159,16 +181,19 @@ def query_image_content(self, image, content_type=""): **Success Return Value** A JSON object representing the image content. - ''' + """ content_type = content_type.lower() supported_types = ["os", "files", "npm", "gem", "python", "java"] if content_type not in supported_types: - return False, f"unsupported type provided: {content_type}, must be one of {supported_types}" + return ( + False, + f"unsupported type provided: {content_type}, must be one of {supported_types}", + ) - return self._query_image(image, query_group='content', query_type=content_type) + return self._query_image(image, query_group="content", query_type=content_type) def query_image_metadata(self, image, metadata_type=""): - '''**Description** + """**Description** Find the image with the tag and return its metadata. **Arguments** @@ -177,11 +202,13 @@ def query_image_metadata(self, image, metadata_type=""): **Success Return Value** A JSON object representing the image metadata. - ''' - return self._query_image(image, query_group='metadata', query_type=metadata_type) + """ + return self._query_image( + image, query_group="metadata", query_type=metadata_type + ) def query_image_vuln(self, image, vuln_type="", vendor_only=True): - '''**Description** + """**Description** Find the image with the tag and return its vulnerabilities. **Arguments** @@ -191,12 +218,20 @@ def query_image_vuln(self, image, vuln_type="", vendor_only=True): **Success Return Value** A JSON object representing the image vulnerabilities. - ''' - return self._query_image(image, query_group='vuln', query_type=vuln_type, vendor_only=vendor_only) - - def query_images_by_vulnerability(self, vulnerability_id, namespace=None, package=None, severity=None, - vendor_only=True): - '''**Description** + """ + return self._query_image( + image, query_group="vuln", query_type=vuln_type, vendor_only=vendor_only + ) + + def query_images_by_vulnerability( + self, + vulnerability_id, + namespace=None, + package=None, + severity=None, + vendor_only=True, + ): + """**Description** Search system for images with the given vulnerability ID present **Arguments** @@ -208,14 +243,15 @@ def query_images_by_vulnerability(self, vulnerability_id, namespace=None, packag **Success Return Value** A JSON object representing the images. - ''' + """ url = "{base_url}/api/scanning/v1/anchore/query/images/by_vulnerability?vulnerability_id={vulnerability_id}{namespace}{package}{severity}&vendor_only={vendor_only}".format( - base_url=self.url, - vulnerability_id=vulnerability_id, - namespace="&namespace={}".format(namespace) if namespace else "", - package="&affected_package={}".format(package) if package else "", - severity="&severity={}".format(severity) if severity else "", - vendor_only=vendor_only) + base_url=self.url, + vulnerability_id=vulnerability_id, + namespace="&namespace={}".format(namespace) if namespace else "", + package="&affected_package={}".format(package) if package else "", + severity="&severity={}".format(severity) if severity else "", + vendor_only=vendor_only, + ) res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): @@ -224,7 +260,7 @@ def query_images_by_vulnerability(self, vulnerability_id, namespace=None, packag return [True, res.json()] def query_images_by_package(self, name, version=None, package_type=None): - '''**Description** + """**Description** Search system for images with the given package installed **Arguments** @@ -234,12 +270,15 @@ def query_images_by_package(self, name, version=None, package_type=None): **Success Return Value** A JSON object representing the images. - ''' + """ url = "{base_url}/api/scanning/v1/anchore/query/images/by_package?name={name}{version}{package_type}".format( - base_url=self.url, - name=name, - version="&version={}".format(version) if version else "", - package_type="&package_type={}".format(package_type) if package_type else "") + base_url=self.url, + name=name, + version="&version={}".format(version) if version else "", + package_type="&package_type={}".format(package_type) + if package_type + else "", + ) res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): @@ -256,11 +295,14 @@ def _query_image(self, image, query_group="", query_type="", vendor_only=True): return [False, "cannot use input image string (no discovered imageDigest)"] url = "{base_url}/api/scanning/v1/anchore/images/{image_digest}/{query_group}/{query_type}{vendor_only}".format( - base_url=self.url, - image_digest=image_digest, - query_group=query_group, - query_type=query_type if query_type else '', - vendor_only="?vendor_only={}".format(vendor_only) if query_group == 'vuln' else '') + base_url=self.url, + image_digest=image_digest, + query_group=query_group, + query_type=query_type if query_type else "", + vendor_only="?vendor_only={}".format(vendor_only) + if query_group == "vuln" + else "", + ) res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): @@ -269,12 +311,12 @@ def _query_image(self, image, query_group="", query_type="", vendor_only=True): return [True, res.json()] def delete_image(self, image, force=False): - '''**Description** + """**Description** Delete image from the scanner. **Arguments** - None - ''' + """ _, _, image_digest = self._discover_inputimage(image) if not image_digest: return [False, "cannot use input image string: no discovered imageDigest"] @@ -286,8 +328,10 @@ def delete_image(self, image, force=False): return [True, res.json()] - def check_image_evaluation(self, image, show_history=False, detail=False, tag=None, policy=None): - '''**Description** + def check_image_evaluation( + self, image, show_history=False, detail=False, tag=None, policy=None + ): + """**Description** Check the latest policy evaluation for an image **Arguments** @@ -299,23 +343,24 @@ def check_image_evaluation(self, image, show_history=False, detail=False, tag=No **Success Return Value** A JSON object representing the evaluation status. - ''' + """ itype, _, image_digest = self._discover_inputimage(image) if not image_digest: return [False, "could not get image record from anchore"] - if not tag and itype != 'tag': + if not tag and itype != "tag": return [False, "input image name is not a tag, and no --tag is specified"] thetag = tag if tag else image url = "{base_url}/api/scanning/v1/anchore/images/{image_digest}/check?history={history}&detail={detail}&tag={tag}{policy_id}" url = url.format( - base_url=self.url, - image_digest=image_digest, - history=str(show_history).lower(), - detail=str(detail).lower(), - tag=thetag, - policy_id=("&policyId=%s" % policy) if policy else "") + base_url=self.url, + image_digest=image_digest, + history=str(show_history).lower(), + detail=str(detail).lower(), + tag=thetag, + policy_id=("&policyId=%s" % policy) if policy else "", + ) res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): @@ -324,7 +369,7 @@ def check_image_evaluation(self, image, show_history=False, detail=False, tag=No return [True, res.json()] def get_pdf_report(self, image, tag=None, date=None): - '''**Description** + """**Description** Get a pdf report of one image **Arguments** @@ -335,19 +380,20 @@ def get_pdf_report(self, image, tag=None, date=None): **Success Return Value** The pdf content - ''' + """ image_type, _, image_digest = self._discover_inputimage(image) if not image_digest: return [False, "could not get image record from anchore"] - if not tag and image_type != 'tag': + if not tag and image_type != "tag": return [False, "input image name is not a tag"] image_tag = tag if tag else image url = "{base_url}/api/scanning/v1/images/{image_digest}/report?tag={tag}{at}".format( - base_url=self.url, - image_digest=image_digest, - tag=image_tag, - at=("&at=%s" % date) if date else "") + base_url=self.url, + image_digest=image_digest, + tag=image_tag, + at=("&at=%s" % date) if date else "", + ) res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): @@ -356,7 +402,7 @@ def get_pdf_report(self, image, tag=None, date=None): return [True, res.content] def get_latest_pdf_report_by_digest(self, image_digest, full_tag=None): - '''**Description** + """**Description** Get the latest pdf report of one image digest **Arguments** @@ -365,11 +411,12 @@ def get_latest_pdf_report_by_digest(self, image_digest, full_tag=None): **Success Return Value** The pdf content - ''' - url = "{base_url}/api/scanning/v1/images/{image_digest}/report?tag={tag}".format( - base_url=self.url, - image_digest=image_digest, - tag=full_tag) + """ + url = ( + "{base_url}/api/scanning/v1/images/{image_digest}/report?tag={tag}".format( + base_url=self.url, image_digest=image_digest, tag=full_tag + ) + ) res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): @@ -378,7 +425,7 @@ def get_latest_pdf_report_by_digest(self, image_digest, full_tag=None): return [True, res.content] def import_image(self, infile, image_id, digest_id, image_name, sync=False): - '''**Description** + """**Description** Import an image archive **Arguments** @@ -388,18 +435,23 @@ def import_image(self, infile, image_id, digest_id, image_name, sync=False): **Success Return Value** If synchronous, A JSON object representing the image that was imported. - ''' + """ try: m = MultipartEncoder( - fields={'archive_file': (infile, open(infile, 'rb'), 'text/plain')} + fields={"archive_file": (infile, open(infile, "rb"), "text/plain")} ) if sync: url = self.url + "/api/scanning/v1/anchore/import/images" else: url = self.url + "/api/scanning/v1/import/images" - headers = {'Authorization': 'Bearer ' + self.token, 'Content-Type': m.content_type, - 'imageId': image_id, 'digestId': digest_id, 'imageName': image_name} + headers = { + "Authorization": "Bearer " + self.token, + "Content-Type": m.content_type, + "imageId": image_id, + "digestId": digest_id, + "imageName": image_name, + } res = self.http.post(url, data=m, headers=headers, verify=self.ssl_verify) if not self._checkResponse(res): return [False, self.lasterr] @@ -410,7 +462,7 @@ def import_image(self, infile, image_id, digest_id, image_name, sync=False): print(err) def get_anchore_users_account(self): - '''**Description** + """**Description** Get the anchore user account. **Arguments** @@ -418,7 +470,7 @@ def get_anchore_users_account(self): **Success Return Value** A JSON object containing user account information. - ''' + """ url = self.url + "/api/scanning/v1/account" res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): @@ -427,7 +479,7 @@ def get_anchore_users_account(self): return [True, res.json()] def get_image_scan_result_by_id(self, image_id, full_tag_name, detail): - '''**Description** + """**Description** Get the anchore image scan result for an image id. **Arguments** @@ -437,21 +489,29 @@ def get_image_scan_result_by_id(self, image_id, full_tag_name, detail): **Success Return Value** A JSON object containing pass/fail status of image scan policy. - ''' + """ url = "{base_url}/api/scanning/v1/anchore/images/by_id/{image_id}/check?tag={full_tag_name}&detail={detail}".format( - base_url=self.url, - image_id=image_id, - full_tag_name=full_tag_name, - detail=detail) + base_url=self.url, + image_id=image_id, + full_tag_name=full_tag_name, + detail=detail, + ) res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()] - def add_registry(self, registry, registry_user, registry_pass, insecure=False, registry_type="docker_v2", - validate=True): - '''**Description** + def add_registry( + self, + registry, + registry_user, + registry_pass, + insecure=False, + registry_type="docker_v2", + validate=True, + ): + """**Description** Add image registry **Arguments** @@ -464,36 +524,54 @@ def add_registry(self, registry, registry_user, registry_pass, insecure=False, r **Success Return Value** A JSON object representing the registry. - ''' - registry_types = ['docker_v2', 'awsecr'] + """ + registry_types = ["docker_v2", "awsecr"] if registry_type and registry_type not in registry_types: - return [False, "input registry type not supported (supported registry_types: " + str(registry_types)] + return [ + False, + "input registry type not supported (supported registry_types: " + + str(registry_types), + ] if self._registry_string_is_valid(registry): - return [False, - "input registry name cannot contain '/' characters - valid registry names are of the form : where : is optional"] + return [ + False, + "input registry name cannot contain '/' characters - valid registry names are of the form : where : is optional", + ] if not registry_type: registry_type = self._get_registry_type(registry) payload = { - 'registry': registry, - 'registry_user': registry_user, - 'registry_pass': registry_pass, - 'registry_type': registry_type, - 'registry_verify': not insecure} - url = "{base_url}/api/scanning/v1/anchore/registries?validate={validate}".format( - base_url=self.url, - validate=validate) - - res = self.http.post(url, data=json.dumps(payload), headers=self.hdrs, verify=self.ssl_verify) + "registry": registry, + "registry_user": registry_user, + "registry_pass": registry_pass, + "registry_type": registry_type, + "registry_verify": not insecure, + } + url = ( + "{base_url}/api/scanning/v1/anchore/registries?validate={validate}".format( + base_url=self.url, validate=validate + ) + ) + + res = self.http.post( + url, data=json.dumps(payload), headers=self.hdrs, verify=self.ssl_verify + ) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()] - def update_registry(self, registry, registry_user, registry_pass, insecure=False, registry_type="docker_v2", - validate=True): - '''**Description** + def update_registry( + self, + registry, + registry_user, + registry_pass, + insecure=False, + registry_type="docker_v2", + validate=True, + ): + """**Description** Update an existing image registry. **Arguments** @@ -506,39 +584,45 @@ def update_registry(self, registry, registry_user, registry_pass, insecure=False **Success Return Value** A JSON object representing the registry. - ''' + """ if self._registry_string_is_valid(registry): - return [False, - "input registry name cannot contain '/' characters - valid registry names are of the form : where : is optional"] + return [ + False, + "input registry name cannot contain '/' characters - valid registry names are of the form : where : is optional", + ] payload = { - 'registry': registry, - 'registry_user': registry_user, - 'registry_pass': registry_pass, - 'registry_type': registry_type, - 'registry_verify': not insecure} + "registry": registry, + "registry_user": registry_user, + "registry_pass": registry_pass, + "registry_type": registry_type, + "registry_verify": not insecure, + } url = "{base_url}/api/scanning/v1/anchore/registries/{registry}?validate={validate}".format( - base_url=self.url, - registry=registry, - validate=validate) + base_url=self.url, registry=registry, validate=validate + ) - res = self.http.put(url, data=json.dumps(payload), headers=self.hdrs, verify=self.ssl_verify) + res = self.http.put( + url, data=json.dumps(payload), headers=self.hdrs, verify=self.ssl_verify + ) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()] def delete_registry(self, registry): - '''**Description** + """**Description** Delete an existing image registry **Arguments** - registry: Full hostname/port of registry. Eg. myrepo.example.com:5000 - ''' + """ # do some input string checking if re.match(".*\\/.*", registry): - return [False, - "input registry name cannot contain '/' characters - valid registry names are of the form : where : is optional"] + return [ + False, + "input registry name cannot contain '/' characters - valid registry names are of the form : where : is optional", + ] url = self.url + "/api/scanning/v1/anchore/registries/" + registry res = self.http.delete(url, headers=self.hdrs, verify=self.ssl_verify) @@ -548,7 +632,7 @@ def delete_registry(self, registry): return [True, res.json()] def list_registry(self): - '''**Description** + """**Description** List all current image registries **Arguments** @@ -556,7 +640,7 @@ def list_registry(self): **Success Return Value** A JSON object representing the list of registries. - ''' + """ url = self.url + "/api/scanning/v1/anchore/registries" res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): @@ -565,7 +649,7 @@ def list_registry(self): return [True, res.json()] def get_registry(self, registry): - '''**Description** + """**Description** Find the registry and return its json description **Arguments** @@ -573,10 +657,12 @@ def get_registry(self, registry): **Success Return Value** A JSON object representing the registry. - ''' + """ if self._registry_string_is_valid(registry): - return [False, - "input registry name cannot contain '/' characters - valid registry names are of the form : where : is optional"] + return [ + False, + "input registry name cannot contain '/' characters - valid registry names are of the form : where : is optional", + ] url = self.url + "/api/scanning/v1/anchore/registries/" + registry res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) @@ -594,7 +680,7 @@ def _registry_string_is_valid(self, registry): return re.match(".*\\/.*", registry) def add_repo(self, repo, autosubscribe=True, lookuptag=None): - '''**Description** + """**Description** Add a repository **Arguments** @@ -604,12 +690,13 @@ def add_repo(self, repo, autosubscribe=True, lookuptag=None): **Success Return Value** A JSON object representing the repo. - ''' + """ url = "{base_url}/api/scanning/v1/anchore/repositories?repository={repo}&autosubscribe={autosubscribe}{lookuptag}".format( - base_url=self.url, - repo=repo, - autosubscribe=autosubscribe, - lookuptag="&lookuptag={}".format(lookuptag) if lookuptag else "") + base_url=self.url, + repo=repo, + autosubscribe=autosubscribe, + lookuptag="&lookuptag={}".format(lookuptag) if lookuptag else "", + ) res = self.http.post(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): @@ -618,34 +705,34 @@ def add_repo(self, repo, autosubscribe=True, lookuptag=None): return [True, res.json()] def watch_repo(self, repo): - '''**Description** + """**Description** Instruct engine to start automatically watching the repo for image updates **Arguments** - repo: Input repository can be in the following formats: registry/repo - ''' - return self.activate_subscription('repo_update', repo) + """ + return self.activate_subscription("repo_update", repo) def unwatch_repo(self, repo): - '''**Description** + """**Description** Instruct engine to stop automatically watching the repo for image updates **Arguments** - repo: Input repository can be in the following formats: registry/repo - ''' - return self.deactivate_subscription('repo_update', repo) + """ + return self.deactivate_subscription("repo_update", repo) def delete_repo(self, repo): - '''**Description** + """**Description** Delete a repository from the watch list (does not delete already analyzed images) **Arguments** - repo: Input repository can be in the following formats: registry/repo - ''' - return self.delete_subscription('repo_update', repo) + """ + return self.delete_subscription("repo_update", repo) def list_repos(self): - '''**Description** + """**Description** List added repositories **Arguments** @@ -653,11 +740,11 @@ def list_repos(self): **Success Return Value** A JSON object representing the list of repositories. - ''' + """ return self.get_subscriptions("repo_update") def get_repo(self, repo): - '''**Description** + """**Description** Get a repository **Arguments** @@ -665,11 +752,11 @@ def get_repo(self, repo): **Success Return Value** A JSON object representing the registry. - ''' + """ return self.get_subscriptions("repo_update", repo) def add_policy(self, name, rules, comment="", bundleid=None): - '''**Description** + """**Description** Create a new policy **Arguments** @@ -680,17 +767,12 @@ def add_policy(self, name, rules, comment="", bundleid=None): **Success Return Value** A JSON object containing the policy description. - ''' - policy = { - 'name': name, - 'comment': comment, - 'rules': rules, - 'version': '1_0' - } + """ + policy = {"name": name, "comment": comment, "rules": rules, "version": "1_0"} if bundleid: - policy['policyBundleId'] = bundleid + policy["policyBundleId"] = bundleid - url = self.url + '/api/scanning/v1/policies' + url = self.url + "/api/scanning/v1/policies" data = json.dumps(policy) res = self.http.post(url, headers=self.hdrs, data=data, verify=self.ssl_verify) if not self._checkResponse(res): @@ -700,8 +782,8 @@ def add_policy(self, name, rules, comment="", bundleid=None): def list_policy_bundles(self, detail=False): url = "{base_url}/api/scanning/v1/anchore/policies?detail={detail}".format( - base_url=self.url, - detail=str(detail)) + base_url=self.url, detail=str(detail) + ) res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): return [False, self.lasterr] @@ -709,7 +791,7 @@ def list_policy_bundles(self, detail=False): return [True, res.json()] def list_policies(self, bundleid=None): - '''**Description** + """**Description** List the current set of scanning policies. **Arguments** @@ -717,10 +799,10 @@ def list_policies(self, bundleid=None): **Success Return Value** A JSON object containing the list of policies. - ''' - url = self.url + '/api/scanning/v1/policies' + """ + url = self.url + "/api/scanning/v1/policies" if bundleid: - url += '?bundleId=' + bundleid + url += "?bundleId=" + bundleid res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): @@ -729,7 +811,7 @@ def list_policies(self, bundleid=None): return [True, res.json()] def get_policy(self, policyid, bundleid=None): - '''**Description** + """**Description** Retrieve the policy with the given id in the targeted policy bundle **Arguments** @@ -738,7 +820,7 @@ def get_policy(self, policyid, bundleid=None): **Success Return Value** A JSON object containing the policy description. - ''' + """ ok, policies = self.list_policies(bundleid) if not ok: return [ok, policies] @@ -750,7 +832,7 @@ def get_policy(self, policyid, bundleid=None): return [False, "Policy not found"] def update_policy(self, policyid, policy_description): - '''**Description** + """**Description** Update the policy with the given id **Arguments** @@ -759,8 +841,8 @@ def update_policy(self, policyid, policy_description): **Success Return Value** A JSON object containing the policy description. - ''' - url = self.url + '/api/scanning/v1/policies/' + policyid + """ + url = self.url + "/api/scanning/v1/policies/" + policyid data = json.dumps(policy_description) res = self.http.put(url, headers=self.hdrs, data=data, verify=self.ssl_verify) if not self._checkResponse(res): @@ -769,16 +851,16 @@ def update_policy(self, policyid, policy_description): return [True, res.json()] def delete_policy(self, policyid, bundleid=None): - '''**Description** + """**Description** Delete the policy with the given id in the targeted policy Bundle **Arguments** - policyid: Unique identifier associated with this policy. - policy_description: A dictionary with the policy description. - ''' - url = self.url + '/api/scanning/v1/policies/' + policyid + """ + url = self.url + "/api/scanning/v1/policies/" + policyid if bundleid: - url += '?bundleId=' + bundleid + url += "?bundleId=" + bundleid res = self.http.delete(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): @@ -786,9 +868,16 @@ def delete_policy(self, policyid, bundleid=None): return [True, res.text] - def add_alert(self, name, description=None, scope="", triggers={'failed': True, 'unscanned': True}, - enabled=False, notification_channels=[]): - ''' + def add_alert( + self, + name, + description=None, + scope="", + triggers={"failed": True, "unscanned": True}, + enabled=False, + notification_channels=[], + ): + """ Create a new alert **Warning**: `add_alert` is deprecated and will be removed soon, use `add_runtime_alert` or `add_repository_alert` from `ScanningAlertsClientV1` instead. @@ -807,19 +896,19 @@ def add_alert(self, name, description=None, scope="", triggers={'failed': True, .. deprecated:: `add_alert` is deprecated and will be removed soon, use `add_runtime_alert` or `add_repository_alert` from `ScanningAlertsClientV1` instead. - ''' + """ alert = { - 'name': name, - 'description': description, - 'triggers': triggers, - 'scope': scope, - 'enabled': enabled, - 'autoscan': True, - 'notificationChannelIds': notification_channels, + "name": name, + "description": description, + "triggers": triggers, + "scope": scope, + "enabled": enabled, + "autoscan": True, + "notificationChannelIds": notification_channels, } - url = self.url + '/api/scanning/v1/alerts' + url = self.url + "/api/scanning/v1/alerts" data = json.dumps(alert) res = self.http.post(url, headers=self.hdrs, data=data, verify=self.ssl_verify) if not self._checkResponse(res): @@ -828,7 +917,7 @@ def add_alert(self, name, description=None, scope="", triggers={'failed': True, return [True, res.json()] def update_alert(self, alertid, alert_description): - ''' + """ Update the alert with the given id. **Warning**: `update_alert` is deprecated and will be removed soon, use `update_runtime_alert` or `update_repository_alert` from `ScanningAlertsClientV1` instead. @@ -843,8 +932,8 @@ def update_alert(self, alertid, alert_description): .. deprecated:: `update_alert` is deprecated and will be removed soon, use `update_runtime_alert` or `update_repository_alert` from `ScanningAlertsClientV1` instead. - ''' - url = self.url + '/api/scanning/v1/alerts/' + alertid + """ + url = self.url + "/api/scanning/v1/alerts/" + alertid data = json.dumps(alert_description) res = self.http.put(url, headers=self.hdrs, data=data, verify=self.ssl_verify) if not self._checkResponse(res): @@ -853,7 +942,7 @@ def update_alert(self, alertid, alert_description): return [True, res.json()] def get_subscriptions(self, subscription_type=None, subscription_key=None): - '''**Description** + """**Description** Get the list of subscriptions **Arguments** @@ -863,7 +952,7 @@ def get_subscriptions(self, subscription_type=None, subscription_key=None): - 'vuln_update': Receive notification when vulnerabilities are added, removed or modified - 'repo_update': Receive notification when a repo is updated - subscription_key: Fully qualified name of tag to subscribe to. Eg. docker.io/library/alpine:latest - ''' + """ url = self.url + "/api/scanning/v1/anchore/subscriptions/" if subscription_key or subscription_type: url += "?" @@ -878,7 +967,7 @@ def get_subscriptions(self, subscription_type=None, subscription_key=None): return [True, res.json()] def activate_subscription(self, subscription_type, subscription_key): - '''**Description** + """**Description** Activate a subscription **Arguments** @@ -888,11 +977,11 @@ def activate_subscription(self, subscription_type, subscription_key): - 'vuln_update': Receive notification when vulnerabilities are added, removed or modified - 'repo_update': Receive notification when a repo is updated - subscription_key: Fully qualified name of tag to subscribe to. Eg. docker.io/library/alpine:latest - ''' + """ return self._update_subscription(subscription_type, subscription_key, True) def deactivate_subscription(self, subscription_type, subscription_key): - '''**Description** + """**Description** Deactivate a subscription **Arguments** @@ -902,11 +991,11 @@ def deactivate_subscription(self, subscription_type, subscription_key): - 'vuln_update': Receive notification when vulnerabilities are added, removed or modified - 'repo_update': Receive notification when a repo is updated - subscription_key: Fully qualified name of tag to subscribe to. Eg. docker.io/library/alpine:latest - ''' + """ return self._update_subscription(subscription_type, subscription_key, False) def delete_subscription(self, subscription_type, subscription_key): - '''**Description** + """**Description** Delete a subscription **Arguments** @@ -916,7 +1005,7 @@ def delete_subscription(self, subscription_type, subscription_key): - 'vuln_update': Receive notification when vulnerabilities are added, removed or modified - 'repo_update': Receive notification when a repo is updated - subscription_key: Fully qualified name of tag to subscribe to. Eg. docker.io/library/alpine:latest - ''' + """ try: url = self._subscription_url(subscription_type, subscription_key) except Exception as err: @@ -934,8 +1023,14 @@ def _update_subscription(self, subscription_type, subscription_key, activate): except Exception as err: return [False, err] - payload = {'active': activate, 'subscription_key': subscription_key, 'subscription_type': subscription_type} - res = self.http.put(url, data=json.dumps(payload), headers=self.hdrs, verify=self.ssl_verify) + payload = { + "active": activate, + "subscription_key": subscription_key, + "subscription_type": subscription_type, + } + res = self.http.put( + url, data=json.dumps(payload), headers=self.hdrs, verify=self.ssl_verify + ) if not self._checkResponse(res): return [False, self.lasterr] @@ -955,7 +1050,7 @@ def _subscription_url(self, subscription_type, subscription_key): return self.url + "/api/scanning/v1/anchore/subscriptions/" + id def list_subscription(self): - '''**Description** + """**Description** List all subscriptions **Arguments** @@ -963,11 +1058,13 @@ def list_subscription(self): **Success Return Value** A JSON object representing the list of subscriptions. - ''' + """ return self.get_subscriptions() - def list_runtime(self, scope="", skip_policy_evaluation=True, start_time=None, end_time=None): - '''**Description** + def list_runtime( + self, scope="", skip_policy_evaluation=True, start_time=None, end_time=None + ): + """**Description** List runtime containers **Arguments** @@ -978,18 +1075,15 @@ def list_runtime(self, scope="", skip_policy_evaluation=True, start_time=None, e **Success Return Value** A JSON object representing the list of runtime containers. - ''' - containers = { - 'scope': scope, - 'skipPolicyEvaluation': skip_policy_evaluation - } + """ + containers = {"scope": scope, "skipPolicyEvaluation": skip_policy_evaluation} if start_time or end_time: - containers['time'] = {} - containers['time']['from'] = int(start_time * 100000) if start_time else 0 + containers["time"] = {} + containers["time"]["from"] = int(start_time * 100000) if start_time else 0 end_time = end_time if end_time else time.time() - containers['time']['to'] = int(end_time * 1000000) + containers["time"]["to"] = int(end_time * 1000000) - url = self.url + '/api/scanning/v1/query/containers' + url = self.url + "/api/scanning/v1/query/containers" data = json.dumps(containers) res = self.http.post(url, headers=self.hdrs, data=data, verify=self.ssl_verify) if not self._checkResponse(res): @@ -1001,11 +1095,11 @@ def _discover_inputimage_format(self, input_string): itype = None if re.match("^sha256:[0-9a-fA-F]{64}", input_string): - itype = 'imageDigest' + itype = "imageDigest" elif re.match("[0-9a-fA-F]{64}", input_string): - itype = 'imageid' + itype = "imageid" else: - itype = 'tag' + itype = "tag" return itype @@ -1029,9 +1123,9 @@ def _discover_inputimage(self, input_string): ok, ret = self.get_image(input_string) if ok: image_record = ret[0] - urldigest = image_record.get('imageDigest', None) - for image_detail in image_record.get('image_detail', []): - if input_string == image_detail.get('imageId', ''): + urldigest = image_record.get("imageDigest", None) + for image_detail in image_record.get("image_detail", []): + if input_string == image_detail.get("imageId", ""): ret_type = "imageid" break @@ -1044,10 +1138,12 @@ def get_vulnerability_details(self, id): url = f"{self.url}/api/scanning/v1/anchore/query/vulnerabilities" params = { - "id": id, + "id": id, } - res = self.http.get(url, params=params, headers=self.hdrs, verify=self.ssl_verify) + res = self.http.get( + url, params=params, headers=self.hdrs, verify=self.ssl_verify + ) if not self._checkResponse(res): return [False, self.lasterr] @@ -1063,9 +1159,9 @@ def add_vulnerability_exception_bundle(self, name, comment=""): url = f"{self.url}/api/scanning/v1/vulnexceptions" params = { - "version": "1_0", - "name": name, - "comment": comment, + "version": "1_0", + "name": name, + "comment": comment, } data = json.dumps(params) @@ -1076,7 +1172,6 @@ def add_vulnerability_exception_bundle(self, name, comment=""): return [True, res.json()] def delete_vulnerability_exception_bundle(self, id): - url = self.url + f"/api/scanning/v1/vulnexceptions/{id}" res = self.http.delete(url, headers=self.hdrs, verify=self.ssl_verify) @@ -1089,10 +1184,12 @@ def list_vulnerability_exception_bundles(self): url = f"{self.url}/api/scanning/v1/vulnexceptions" params = { - "bundleId": "default", + "bundleId": "default", } - res = self.http.get(url, params=params, headers=self.hdrs, verify=self.ssl_verify) + res = self.http.get( + url, params=params, headers=self.hdrs, verify=self.ssl_verify + ) if not self._checkResponse(res): return [False, self.lasterr] @@ -1102,10 +1199,12 @@ def get_vulnerability_exception_bundle(self, bundle): url = f"{self.url}/api/scanning/v1/vulnexceptions/{bundle}" params = { - "bundleId": "default", + "bundleId": "default", } - res = self.http.get(url, params=params, headers=self.hdrs, verify=self.ssl_verify) + res = self.http.get( + url, params=params, headers=self.hdrs, verify=self.ssl_verify + ) if not self._checkResponse(res): return [False, self.lasterr] @@ -1118,11 +1217,11 @@ def add_vulnerability_exception(self, bundle, cve, note=None, expiration_date=No url = f"{self.url}/api/scanning/v1/vulnexceptions/{bundle}/vulnerabilities" params = { - "gate": "vulnerabilities", - "is_busy": False, - "trigger_id": f"{cve}+*", - "expiration_date": int(expiration_date) if expiration_date else None, - "notes": note, + "gate": "vulnerabilities", + "is_busy": False, + "trigger_id": f"{cve}+*", + "expiration_date": int(expiration_date) if expiration_date else None, + "notes": note, } data = json.dumps(params) @@ -1138,31 +1237,41 @@ def delete_vulnerability_exception(self, bundle, id): url = f"{self.url}/api/scanning/v1/vulnexceptions/{bundle}/vulnerabilities/{id}" params = { - "bundleId": "default", + "bundleId": "default", } - res = self.http.delete(url, params=params, headers=self.hdrs, verify=self.ssl_verify) + res = self.http.delete( + url, params=params, headers=self.hdrs, verify=self.ssl_verify + ) if not self._checkResponse(res): return [False, self.lasterr] return [True, None] - def update_vulnerability_exception(self, bundle, id, cve, enabled, note, expiration_date): + def update_vulnerability_exception( + self, bundle, id, cve, enabled, note, expiration_date + ): url = f"{self.url}/api/scanning/v1/vulnexceptions/{bundle}/vulnerabilities/{id}" data = { - "id": id, - "gate": "vulnerabilities", - "trigger_id": f"{cve}+*", - "enabled": enabled, - "notes": note, - "expiration_date": int(expiration_date) if expiration_date else None, + "id": id, + "gate": "vulnerabilities", + "trigger_id": f"{cve}+*", + "enabled": enabled, + "notes": note, + "expiration_date": int(expiration_date) if expiration_date else None, } params = { - "bundleId": "default", + "bundleId": "default", } - res = self.http.put(url, data=json.dumps(data), params=params, headers=self.hdrs, verify=self.ssl_verify) + res = self.http.put( + url, + data=json.dumps(data), + params=params, + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] @@ -1187,27 +1296,25 @@ def download_cve_report_csv(self, vuln_type="os", scope_type="static"): url = f"{self.url}/api/scanning/v1/reports/csv" params = { - "queryType": "vuln", - "scopeType": scope_type, - "staticScope": { - "registry": "", - "repository": "", - "tag": "" - }, - "runtimeScope": {}, - "imageQueryFilter": {"vType": vuln_type}, - "offset": 0, - "limit": 100000 + "queryType": "vuln", + "scopeType": scope_type, + "staticScope": {"registry": "", "repository": "", "tag": ""}, + "runtimeScope": {}, + "imageQueryFilter": {"vType": vuln_type}, + "offset": 0, + "limit": 100000, } - res = self.http.post(url, data=json.dumps(params), headers=self.hdrs, verify=self.ssl_verify) + res = self.http.post( + url, data=json.dumps(params), headers=self.hdrs, verify=self.ssl_verify + ) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.content.decode("utf-8")] def get_image_scanning_results(self, image_name, policy_id=None): - ''' + """ Args: image_name (str): Image name to retrieve the scanning results from policy_id (str): Policy ID to check against. If not specified, will check against all policies. @@ -1217,7 +1324,7 @@ def get_image_scanning_results(self, image_name, policy_id=None): The first parameter, if true, means that the result is correct, while if false, means that there's been an error. The second parameter will hold the response of the API call. - ''' + """ try: ok, res = self.get_image(image_name) if not ok: @@ -1226,62 +1333,79 @@ def get_image_scanning_results(self, image_name, policy_id=None): image_digest = res[0]["imageDigest"] image_tag = res[0]["image_detail"][0]["fulltag"] except RetryError: - return [False, "could not retrieve image digest for the given image name, " - "ensure that the image has been scanned"] + return [ + False, + "could not retrieve image digest for the given image name, " + "ensure that the image has been scanned", + ] url = f"{self.url}/api/scanning/v1/images/{image_digest}/policyEvaluation" params = { - "tag": image_tag, + "tag": image_tag, } - res = self.http.get(url, headers=self.hdrs, params=params, verify=self.ssl_verify) + res = self.http.get( + url, headers=self.hdrs, params=params, verify=self.ssl_verify + ) if not self._checkResponse(res): return [False, self.lasterr] json_res = res.json() result = { - "image_digest": json_res["imageDigest"], - "image_id": json_res["imageId"], - "status": json_res["status"], - "image_tag": image_tag, - "total_stop": json_res["nStop"], - "total_warn": json_res["nWarn"], - "last_evaluation": datetime.utcfromtimestamp(json_res["at"]), - "policy_id": "*", - "policy_name": "All policies", - "warn_results": [], - "stop_results": [] + "image_digest": json_res["imageDigest"], + "image_id": json_res["imageId"], + "status": json_res["status"], + "image_tag": image_tag, + "total_stop": json_res["nStop"], + "total_warn": json_res["nWarn"], + "last_evaluation": datetime.utcfromtimestamp(json_res["at"]), + "policy_id": "*", + "policy_name": "All policies", + "warn_results": [], + "stop_results": [], } if policy_id: - policy_results = [result for result in json_res["results"] if result["policyId"] == policy_id] + policy_results = [ + result + for result in json_res["results"] + if result["policyId"] == policy_id + ] if policy_results: filtered_result_by_policy_id = policy_results[0] result["policy_id"] = filtered_result_by_policy_id["policyId"] result["policy_name"] = filtered_result_by_policy_id["policyName"] result["total_stop"] = filtered_result_by_policy_id["nStop"] result["total_warn"] = filtered_result_by_policy_id["nWarn"] - result["warn_results"] = [rule_result["checkOutput"] - for gate_result in filtered_result_by_policy_id["gateResults"] - for rule_result in gate_result["ruleResults"] - if rule_result["gateAction"] == "warn"] - result["stop_results"] = [rule_result["checkOutput"] - for gate_result in filtered_result_by_policy_id["gateResults"] - for rule_result in gate_result["ruleResults"] - if rule_result["gateAction"] == "stop"] + result["warn_results"] = [ + rule_result["checkOutput"] + for gate_result in filtered_result_by_policy_id["gateResults"] + for rule_result in gate_result["ruleResults"] + if rule_result["gateAction"] == "warn" + ] + result["stop_results"] = [ + rule_result["checkOutput"] + for gate_result in filtered_result_by_policy_id["gateResults"] + for rule_result in gate_result["ruleResults"] + if rule_result["gateAction"] == "stop" + ] else: return [False, "the specified policy ID doesn't exist"] else: - result["warn_results"] = [rule_result["checkOutput"] - for result in json_res["results"] - for gate_result in result["gateResults"] - for rule_result in gate_result["ruleResults"] - if rule_result["gateAction"] == "warn"] - result["stop_results"] = [rule_result["checkOutput"] - for result in json_res["results"] - for gate_result in result["gateResults"] - for rule_result in gate_result["ruleResults"] - if rule_result["gateAction"] == "stop"] + result["warn_results"] = [ + rule_result["checkOutput"] + for result in json_res["results"] + for gate_result in result["gateResults"] + for rule_result in gate_result["ruleResults"] + if rule_result["gateAction"] == "warn" + ] + result["stop_results"] = [ + rule_result["checkOutput"] + for result in json_res["results"] + for gate_result in result["gateResults"] + for rule_result in gate_result["ruleResults"] + if rule_result["gateAction"] == "stop" + ] return [True, result] diff --git a/sdcclient/_secure.py b/sdcclient/_secure.py index 47289527..73de768e 100644 --- a/sdcclient/_secure.py +++ b/sdcclient/_secure.py @@ -2,15 +2,28 @@ import time from sdcclient._common import _SdcCommon -from sdcclient.secure import FalcoRulesFilesClientOld, PolicyEventsClientV1, PolicyEventsClientOld, PolicyClientV2 - - -class SdSecureClient(FalcoRulesFilesClientOld, - PolicyEventsClientV1, - PolicyEventsClientOld, - PolicyClientV2, - _SdcCommon): - def __init__(self, token="", sdc_url='https://secure.sysdig.com', ssl_verify=True, custom_headers=None): +from sdcclient.secure import ( + FalcoRulesFilesClientOld, + PolicyEventsClientV1, + PolicyEventsClientOld, + PolicyClientV2, +) + + +class SdSecureClient( + FalcoRulesFilesClientOld, + PolicyEventsClientV1, + PolicyEventsClientOld, + PolicyClientV2, + _SdcCommon, +): + def __init__( + self, + token="", + sdc_url="https://secure.sysdig.com", + ssl_verify=True, + custom_headers=None, + ): super(SdSecureClient, self).__init__(token, sdc_url, ssl_verify, custom_headers) self.product = "SDS" @@ -18,16 +31,20 @@ def __init__(self, token="", sdc_url='https://secure.sysdig.com', ssl_verify=Tru @property def policy_v2(self): - '''**Description** - True if policy V2 API is available - ''' + """**Description** + True if policy V2 API is available + """ if self._policy_v2 is None: - res = self.http.get(self.url + '/api/v2/policies/default', headers=self.hdrs, verify=self.ssl_verify) + res = self.http.get( + self.url + "/api/v2/policies/default", + headers=self.hdrs, + verify=self.ssl_verify, + ) self._policy_v2 = res.status_code != 404 return self._policy_v2 def list_rules(self): - '''**Description** + """**Description** Returns the list of rules in the system. These are grouped by name and do not necessarily represent individual rule objects, as multiple rules can have the same name. @@ -37,12 +54,16 @@ def list_rules(self): **Success Return Value** A JSON object representing the list of rules. - ''' - res = self.http.get(self.url + '/api/secure/rules/summaries', headers=self.hdrs, verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/secure/rules/summaries", + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def get_rules_group(self, name): - '''**Description** + """**Description** Retrieve a group of all rules having the given name. This is used to show how a base rule is modified by later rules that override/append to the rule. @@ -52,13 +73,16 @@ def get_rules_group(self, name): **Success Return Value** A JSON object representing the list of rules. - ''' - res = self.http.get(self.url + '/api/secure/rules/groups?name={}'.format(name), headers=self.hdrs, - verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/secure/rules/groups?name={}".format(name), + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def get_rule_id(self, id): - '''**Description** + """**Description** Retrieve info about a single rule **Arguments** @@ -66,12 +90,16 @@ def get_rule_id(self, id): **Success Return Value** A JSON object representing the rule. - ''' - res = self.http.get(self.url + '/api/secure/rules/{}'.format(id), headers=self.hdrs, verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/secure/rules/{}".format(id), + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def add_rule(self, name, details={}, description="", tags=[]): - '''**Description** + """**Description** Create a new rule **Arguments** @@ -82,19 +110,23 @@ def add_rule(self, name, details={}, description="", tags=[]): **Success Return Value** A JSON object representing the rule. - ''' + """ rule = { "name": name, "description": description, "details": details, - "tags": tags + "tags": tags, } - res = self.http.post(self.url + '/api/secure/rules', data=json.dumps(rule), headers=self.hdrs, - verify=self.ssl_verify) + res = self.http.post( + self.url + "/api/secure/rules", + data=json.dumps(rule), + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def update_rule(self, id, details={}, description="", tags=[]): - '''**Description** + """**Description** Update info associated with a rule **Arguments** @@ -105,24 +137,28 @@ def update_rule(self, id, details={}, description="", tags=[]): **Success Return Value** A JSON object representing the rule. - ''' + """ ok, res = self.get_rule_id(id) if not ok: return [False, res] rule = res if details: - rule['details'] = details + rule["details"] = details if description: - rule['description'] = description + rule["description"] = description if tags: - rule['tags'] = tags - res = self.http.put(self.url + '/api/secure/rules/{}'.format(id), data=json.dumps(rule), headers=self.hdrs, - verify=self.ssl_verify) + rule["tags"] = tags + res = self.http.put( + self.url + "/api/secure/rules/{}".format(id), + data=json.dumps(rule), + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def delete_rule(self, id): - '''**Description** + """**Description** Delete the rule with given id. **Arguments** @@ -130,12 +166,16 @@ def delete_rule(self, id): **Success Return Value** A JSON object representing the rule. - ''' - res = self.http.delete(self.url + '/api/secure/rules/{}'.format(id), headers=self.hdrs, verify=self.ssl_verify) + """ + res = self.http.delete( + self.url + "/api/secure/rules/{}".format(id), + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def list_falco_macros(self): - '''**Description** + """**Description** Returns the list of macros in the system. These are grouped by name and do not necessarily represent individual macro objects, as multiple macros can have the same name. @@ -145,12 +185,16 @@ def list_falco_macros(self): **Success Return Value** A JSON object representing the list of falco macros. - ''' - res = self.http.get(self.url + '/api/secure/falco/macros/summaries', headers=self.hdrs, verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/secure/falco/macros/summaries", + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def get_falco_macros_group(self, name): - '''**Description** + """**Description** Retrieve a group of all falco groups having the given name. This is used to show how a base macro is modified by later macrosthat override/append to the macro. @@ -160,13 +204,16 @@ def get_falco_macros_group(self, name): **Success Return Value** A JSON object representing the list of falco macros. - ''' - res = self.http.get(self.url + '/api/secure/falco/macros/groups?name={}'.format(name), headers=self.hdrs, - verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/secure/falco/macros/groups?name={}".format(name), + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def get_falco_macro_id(self, id): - '''**Description** + """**Description** Retrieve info about a single falco macro **Arguments** @@ -174,13 +221,16 @@ def get_falco_macro_id(self, id): **Success Return Value** A JSON object representing the falco macro. - ''' - res = self.http.get(self.url + '/api/secure/falco/macros/{}'.format(id), headers=self.hdrs, - verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/secure/falco/macros/{}".format(id), + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def add_falco_macro(self, name, condition, append=False): - '''**Description** + """**Description** Create a new macro **Arguments** @@ -189,21 +239,22 @@ def add_falco_macro(self, name, condition, append=False): **Success Return Value** A JSON object representing the falco macro. - ''' + """ macro = { "name": name, - "condition": { - "components": [], - "condition": condition - }, - "append": append + "condition": {"components": [], "condition": condition}, + "append": append, } - res = self.http.post(self.url + '/api/secure/falco/macros', data=json.dumps(macro), headers=self.hdrs, - verify=self.ssl_verify) + res = self.http.post( + self.url + "/api/secure/falco/macros", + data=json.dumps(macro), + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def update_falco_macro(self, id, condition): - '''**Description** + """**Description** Update info associated with a macro **Arguments** @@ -212,19 +263,23 @@ def update_falco_macro(self, id, condition): **Success Return Value** A JSON object representing the macro. - ''' + """ ok, res = self.get_falco_macro_id(id) if not ok: return [False, res] macro = res - macro['condition']['condition'] = condition + macro["condition"]["condition"] = condition - res = self.http.put(self.url + '/api/secure/falco/macros/{}'.format(id), data=json.dumps(macro), - headers=self.hdrs, verify=self.ssl_verify) + res = self.http.put( + self.url + "/api/secure/falco/macros/{}".format(id), + data=json.dumps(macro), + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def delete_falco_macro(self, id): - '''**Description** + """**Description** Delete the macro with given id. **Arguments** @@ -232,13 +287,16 @@ def delete_falco_macro(self, id): **Success Return Value** A JSON object representing the macro. - ''' - res = self.http.delete(self.url + '/api/secure/falco/macros/{}'.format(id), headers=self.hdrs, - verify=self.ssl_verify) + """ + res = self.http.delete( + self.url + "/api/secure/falco/macros/{}".format(id), + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def list_falco_lists(self): - '''**Description** + """**Description** Returns the list of falco lists in the system. These are grouped by name and do not necessarily represent individual falco list objects, as multiple falco lists can have the same name. @@ -248,12 +306,16 @@ def list_falco_lists(self): **Success Return Value** A JSON object representing the list of falco lists. - ''' - res = self.http.get(self.url + '/api/secure/falco/lists/summaries', headers=self.hdrs, verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/secure/falco/lists/summaries", + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def get_falco_lists_group(self, name): - '''**Description** + """**Description** Retrieve a group of all falco lists having the given name. This is used to show how a base list is modified by later lists that override/append to the list. @@ -263,13 +325,16 @@ def get_falco_lists_group(self, name): **Success Return Value** A JSON object representing the list of falco lists. - ''' - res = self.http.get(self.url + '/api/secure/falco/lists/groups?name={}'.format(name), headers=self.hdrs, - verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/secure/falco/lists/groups?name={}".format(name), + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def get_falco_list_id(self, id): - '''**Description** + """**Description** Retrieve info about a single falco list **Arguments** @@ -277,13 +342,16 @@ def get_falco_list_id(self, id): **Success Return Value** A JSON object representing the falco list. - ''' - res = self.http.get(self.url + '/api/secure/falco/lists/{}'.format(id), headers=self.hdrs, - verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/secure/falco/lists/{}".format(id), + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def add_falco_list(self, name, items, append=False): - '''**Description** + """**Description** Create a new list **Arguments** @@ -292,20 +360,18 @@ def add_falco_list(self, name, items, append=False): **Success Return Value** A JSON object representing the falco list. - ''' - flist = { - "name": name, - "items": { - "items": items - }, - "append": append - } - res = self.http.post(self.url + '/api/secure/falco/lists', data=json.dumps(flist), headers=self.hdrs, - verify=self.ssl_verify) + """ + flist = {"name": name, "items": {"items": items}, "append": append} + res = self.http.post( + self.url + "/api/secure/falco/lists", + data=json.dumps(flist), + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def update_falco_list(self, id, items): - '''**Description** + """**Description** Update info associated with a list **Arguments** @@ -314,19 +380,23 @@ def update_falco_list(self, id, items): **Success Return Value** A JSON object representing the list. - ''' + """ ok, res = self.get_falco_list_id(id) if not ok: return [False, res] flist = res - flist['items']['items'] = items + flist["items"]["items"] = items - res = self.http.put(self.url + '/api/secure/falco/lists/{}'.format(id), data=json.dumps(flist), - headers=self.hdrs, verify=self.ssl_verify) + res = self.http.put( + self.url + "/api/secure/falco/lists/{}".format(id), + data=json.dumps(flist), + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def delete_falco_list(self, id): - '''**Description** + """**Description** Delete the list with given id. **Arguments** @@ -334,14 +404,23 @@ def delete_falco_list(self, id): **Success Return Value** A JSON object representing the list. - ''' - res = self.http.delete(self.url + '/api/secure/falco/lists/{}'.format(id), headers=self.hdrs, - verify=self.ssl_verify) + """ + res = self.http.delete( + self.url + "/api/secure/falco/lists/{}".format(id), + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) - def add_compliance_task(self, name, module_name='docker-bench-security', schedule='06:00:00Z/PT12H', scope=None, - enabled=True): - '''**Description** + def add_compliance_task( + self, + name, + module_name="docker-bench-security", + schedule="06:00:00Z/PT12H", + scope=None, + enabled=True, + ): + """**Description** Add a new compliance task. **Arguments** @@ -353,21 +432,25 @@ def add_compliance_task(self, name, module_name='docker-bench-security', schedul **Success Return Value** A JSON representation of the compliance task. - ''' + """ task = { "id": None, "name": name, "moduleName": module_name, "enabled": enabled, "scope": scope, - "schedule": schedule + "schedule": schedule, } - res = self.http.post(self.url + '/api/complianceTasks', data=json.dumps(task), headers=self.hdrs, - verify=self.ssl_verify) + res = self.http.post( + self.url + "/api/complianceTasks", + data=json.dumps(task), + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def list_compliance_tasks(self): - '''**Description** + """**Description** Get the list of all compliance tasks. **Arguments** @@ -375,12 +458,14 @@ def list_compliance_tasks(self): **Success Return Value** A JSON list with the representation of each compliance task. - ''' - res = self.http.get(self.url + '/api/complianceTasks', headers=self.hdrs, verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/complianceTasks", headers=self.hdrs, verify=self.ssl_verify + ) return self._request_result(res) def get_compliance_task(self, id): - '''**Description** + """**Description** Get a compliance task. **Arguments** @@ -388,12 +473,18 @@ def get_compliance_task(self, id): **Success Return Value** A JSON representation of the compliance task. - ''' - res = self.http.get(self.url + '/api/complianceTasks/{}'.format(id), headers=self.hdrs, verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/complianceTasks/{}".format(id), + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) - def update_compliance_task(self, id, name=None, module_name=None, schedule=None, scope=None, enabled=None): - '''**Description** + def update_compliance_task( + self, id, name=None, module_name=None, schedule=None, scope=None, enabled=None + ): + """**Description** Update an existing compliance task. **Arguments** @@ -406,40 +497,47 @@ def update_compliance_task(self, id, name=None, module_name=None, schedule=None, **Success Return Value** A JSON representation of the compliance task. - ''' + """ ok, res = self.get_compliance_task(id) if not ok: return ok, res task = res options = { - 'name': name, - 'moduleName': module_name, - 'schedule': schedule, - 'scope': scope, - 'enabled': enabled + "name": name, + "moduleName": module_name, + "schedule": schedule, + "scope": scope, + "enabled": enabled, } task.update({k: v for k, v in options.items() if v is not None}) - res = self.http.put(self.url + '/api/complianceTasks/{}'.format(id), data=json.dumps(task), headers=self.hdrs, - verify=self.ssl_verify) + res = self.http.put( + self.url + "/api/complianceTasks/{}".format(id), + data=json.dumps(task), + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def delete_compliance_task(self, id): - '''**Description** + """**Description** Delete the compliance task with the given id **Arguments** - id: the id of the compliance task to delete - ''' - res = self.http.delete(self.url + '/api/complianceTasks/{}'.format(id), headers=self.hdrs, - verify=self.ssl_verify) + """ + res = self.http.delete( + self.url + "/api/complianceTasks/{}".format(id), + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return False, self.lasterr return True, None def list_compliance_results(self, limit=50, direction=None, cursor=None, filter=""): - '''**Description** + """**Description** Get the list of all compliance tasks runs. **Arguments** @@ -450,18 +548,19 @@ def list_compliance_results(self, limit=50, direction=None, cursor=None, filter= **Success Return Value** A JSON list with the representation of each compliance task run. - ''' + """ url = "{url}/api/complianceResults?cursor{cursor}&filter={filter}&limit={limit}{direction}".format( url=self.url, limit=limit, direction="&direction=%s" % direction if direction else "", cursor="=%d" % cursor if cursor is not None else "", - filter=filter) + filter=filter, + ) res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) return self._request_result(res) def get_compliance_results(self, id): - '''**Description** + """**Description** Retrieve the details for a specific compliance task run result. **Arguments** @@ -469,13 +568,16 @@ def get_compliance_results(self, id): **Success Return Value** A JSON representation of the compliance task run result. - ''' - res = self.http.get(self.url + '/api/complianceResults/{}'.format(id), headers=self.hdrs, - verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/complianceResults/{}".format(id), + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def get_compliance_results_csv(self, id): - '''**Description** + """**Description** Retrieve the details for a specific compliance task run result in csv. **Arguments** @@ -483,17 +585,28 @@ def get_compliance_results_csv(self, id): **Success Return Value** A CSV representation of the compliance task run result. - ''' - res = self.http.get(self.url + '/api/complianceResults/{}/csv'.format(id), headers=self.hdrs, - verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/complianceResults/{}/csv".format(id), + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return False, self.lasterr return True, res.text - def list_commands_audit(self, from_sec=None, to_sec=None, scope_filter=None, command_filter=None, limit=100, - offset=0, metrics=[]): - '''**Description** + def list_commands_audit( + self, + from_sec=None, + to_sec=None, + scope_filter=None, + command_filter=None, + limit=100, + offset=0, + metrics=[], + ): + """**Description** List the commands audit. **DEPRECATED**: Use sdcclient.secure.ActivityAuditClientV1 instead. This is maintained for old on-prem versions, but will be removed over time. @@ -508,7 +621,7 @@ def list_commands_audit(self, from_sec=None, to_sec=None, scope_filter=None, com **Success Return Value** A JSON representation of the commands audit. - ''' + """ if to_sec is None: to_sec = time.time() if from_sec is None: @@ -518,16 +631,17 @@ def list_commands_audit(self, from_sec=None, to_sec=None, scope_filter=None, com url=self.url, offset=offset, limit=limit, - frm=int(from_sec * 10 ** 6), - to=int(to_sec * 10 ** 6), + frm=int(from_sec * 10**6), + to=int(to_sec * 10**6), scope="&scopeFilter=" + scope_filter if scope_filter else "", commandFilter="&commandFilter=" + command_filter if command_filter else "", - metrics="&metrics=" + json.dumps(metrics) if metrics else "") + metrics="&metrics=" + json.dumps(metrics) if metrics else "", + ) res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) return self._request_result(res) def get_command_audit(self, id, metrics=[]): - '''**Description** + """**Description** Get a command audit. **DEPRECATED**: Use sdcclient.secure.ActivityAuditClientV1 instead. This is maintained for old on-prem versions, but will be removed over time. @@ -537,17 +651,18 @@ def get_command_audit(self, id, metrics=[]): **Success Return Value** A JSON representation of the command audit. - ''' + """ url = "{url}/api/commands/{id}?from=0&to={to}{metrics}".format( url=self.url, id=id, - to=int(time.time() * 10 ** 6), - metrics="&metrics=" + json.dumps(metrics) if metrics else "") + to=int(time.time() * 10**6), + metrics="&metrics=" + json.dumps(metrics) if metrics else "", + ) res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) return self._request_result(res) def list_image_profiles(self): - '''**Description** + """**Description** List the current set of image profiles. **Arguments** @@ -556,16 +671,14 @@ def list_image_profiles(self): **Success Return Value** A JSON object containing the details of each profile. - ''' - url = "{url}/api/v1/profiling/profileGroups/0/profiles".format( - url=self.url - ) + """ + url = "{url}/api/v1/profiling/profileGroups/0/profiles".format(url=self.url) res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) return self._request_result(res) def get_image_profile(self, profileId): - '''**Description** + """**Description** Find the image profile with a (partial) profile ID and return its json description. **Arguments** @@ -578,7 +691,7 @@ def get_image_profile(self, profileId): collision profiles is returned, and the full complete ID string is printed. In this case, it returns false. - ''' + """ # RETRIEVE ALL THE IMAGE PROFILES ok, image_profiles = self.list_image_profiles() @@ -586,7 +699,7 @@ def get_image_profile(self, profileId): if not ok: return [False, self.lasterr] - ''' + """ The content of the json stored in the image_profiles dictionary: { @@ -597,9 +710,11 @@ def get_image_profile(self, profileId): ... ] } - ''' + """ - matched_profiles = self.__get_matched_profileIDs(profileId, image_profiles['profiles']) + matched_profiles = self.__get_matched_profileIDs( + profileId, image_profiles["profiles"] + ) # Profile ID not found if len(matched_profiles) == 0: @@ -609,8 +724,7 @@ def get_image_profile(self, profileId): elif len(matched_profiles) == 1: # Matched id. Return information url = "{url}/api/v1/profiling/profiles/{profileId}".format( - url=self.url, - profileId=matched_profiles[0]['profileId'] + url=self.url, profileId=matched_profiles[0]["profileId"] ) res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) @@ -621,7 +735,7 @@ def get_image_profile(self, profileId): return [False, matched_profiles] def __get_matched_profileIDs(self, requested_profile, profile_list): - ''' + """ **Description** Helper function for retrieving the list of matching profile @@ -705,17 +819,19 @@ def __get_matched_profileIDs(self, requested_profile, profile_list): }, ... ] - ''' + """ matched_profiles = [] request_len = len(requested_profile) for profile in profile_list: - # get the length of the substring to match str_len_match = min(len(profile), request_len) - if profile['profileId'][0:str_len_match] == requested_profile[0:str_len_match]: + if ( + profile["profileId"][0:str_len_match] + == requested_profile[0:str_len_match] + ): matched_profiles.append(profile) return matched_profiles diff --git a/sdcclient/_secure_v1.py b/sdcclient/_secure_v1.py index aeba49dd..41deb765 100644 --- a/sdcclient/_secure_v1.py +++ b/sdcclient/_secure_v1.py @@ -4,12 +4,12 @@ class SdSecureClientV1(SdSecureClient): - '''**Description** - Handles policies version 1 (ie. up to August 2019). For later Sysdig Secure versions, please use :class:`~SdSecureClient` instead. - ''' + """**Description** + Handles policies version 1 (ie. up to August 2019). For later Sysdig Secure versions, please use :class:`~SdSecureClient` instead. + """ def create_default_policies(self): - '''**Description** + """**Description** Create a set of default policies using the current system falco rules file as a reference. For every falco rule in the system falco rules file, one policy will be created. The policy will take the name and description from the name and description of the corresponding falco rule. If a policy already exists with the same name, no policy is added or modified. Existing @@ -20,12 +20,16 @@ def create_default_policies(self): **Success Return Value** JSON containing details on any new policies that were added. - ''' - res = self.http.post(self.url + '/api/policies/createDefault', headers=self.hdrs, verify=self.ssl_verify) + """ + res = self.http.post( + self.url + "/api/policies/createDefault", + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def delete_all_policies(self): - '''**Description** + """**Description** Delete all existing policies. The falco rules file is unchanged. **Arguments** @@ -33,15 +37,19 @@ def delete_all_policies(self): **Success Return Value** The string "Policies Deleted" - ''' - res = self.http.post(self.url + '/api/policies/deleteAll', headers=self.hdrs, verify=self.ssl_verify) + """ + res = self.http.post( + self.url + "/api/policies/deleteAll", + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] return [True, "Policies Deleted"] def list_policies(self): - '''**Description** + """**Description** List the current set of policies. **Arguments** @@ -49,12 +57,14 @@ def list_policies(self): **Success Return Value** A JSON object containing the number and details of each policy. - ''' - res = self.http.get(self.url + '/api/policies', headers=self.hdrs, verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/policies", headers=self.hdrs, verify=self.ssl_verify + ) return self._request_result(res) def get_policy_priorities(self): - '''**Description** + """**Description** Get a list of policy ids in the order they will be evaluated. **Arguments** @@ -62,13 +72,17 @@ def get_policy_priorities(self): **Success Return Value** A JSON object representing the list of policy ids. - ''' + """ - res = self.http.get(self.url + '/api/policies/priorities', headers=self.hdrs, verify=self.ssl_verify) + res = self.http.get( + self.url + "/api/policies/priorities", + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def set_policy_priorities(self, priorities_json): - '''**Description** + """**Description** Change the policy evaluation order **Arguments** @@ -76,18 +90,23 @@ def set_policy_priorities(self, priorities_json): **Success Return Value** A JSON object representing the updated list of policy ids. - ''' + """ try: json.loads(priorities_json) except Exception as e: return [False, "priorities json is not valid json: {}".format(str(e))] - res = self.http.put(self.url + '/api/policies/priorities', headers=self.hdrs, data=priorities_json, verify=self.ssl_verify) + res = self.http.put( + self.url + "/api/policies/priorities", + headers=self.hdrs, + data=priorities_json, + verify=self.ssl_verify, + ) return self._request_result(res) def get_policy(self, name): - '''**Description** + """**Description** Find the policy with name and return its json description. **Arguments** @@ -96,7 +115,7 @@ def get_policy(self, name): **Success Return Value** A JSON object containing the description of the policy. If there is no policy with the given name, returns False. - ''' + """ ok, res = self.list_policies() if not ok: return [False, res] @@ -111,7 +130,7 @@ def get_policy(self, name): return [False, "No policy with name {}".format(name)] def get_policy_id(self, id): - '''**Description** + """**Description** Find the policy with id and return its json description. **Arguments** @@ -120,12 +139,16 @@ def get_policy_id(self, id): **Success Return Value** A JSON object containing the description of the policy. If there is no policy with the given name, returns False. - ''' - res = self.http.get(self.url + '/api/policies/{}'.format(id), headers=self.hdrs, verify=self.ssl_verify) + """ + res = self.http.get( + self.url + "/api/policies/{}".format(id), + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def add_policy(self, policy_json): - '''**Description** + """**Description** Add a new policy using the provided json. **Arguments** @@ -133,18 +156,23 @@ def add_policy(self, policy_json): **Success Return Value** The string "OK" - ''' + """ try: policy_obj = json.loads(policy_json) except Exception as e: return [False, "policy json is not valid json: {}".format(str(e))] body = {"policy": policy_obj} - res = self.http.post(self.url + '/api/policies', headers=self.hdrs, data=json.dumps(body), verify=self.ssl_verify) + res = self.http.post( + self.url + "/api/policies", + headers=self.hdrs, + data=json.dumps(body), + verify=self.ssl_verify, + ) return self._request_result(res) def update_policy(self, policy_json): - '''**Description** + """**Description** Update an existing policy using the provided json. The 'id' field from the policy is used to determine which policy to update. @@ -153,7 +181,7 @@ def update_policy(self, policy_json): **Success Return Value** The string "OK" - ''' + """ try: policy_obj = json.loads(policy_json) @@ -165,11 +193,16 @@ def update_policy(self, policy_json): body = {"policy": policy_obj} - res = self.http.put(self.url + '/api/policies/{}'.format(policy_obj["id"]), headers=self.hdrs, data=json.dumps(body), verify=self.ssl_verify) + res = self.http.put( + self.url + "/api/policies/{}".format(policy_obj["id"]), + headers=self.hdrs, + data=json.dumps(body), + verify=self.ssl_verify, + ) return self._request_result(res) def delete_policy_name(self, name): - '''**Description** + """**Description** Delete the policy with the given name. **Arguments** @@ -177,7 +210,7 @@ def delete_policy_name(self, name): **Success Return Value** The JSON object representing the now-deleted policy. - ''' + """ ok, res = self.list_policies() if not ok: return [False, res] @@ -190,7 +223,7 @@ def delete_policy_name(self, name): return [False, "No policy with name {}".format(name)] def delete_policy_id(self, id): - '''**Description** + """**Description** Delete the policy with the given id **Arguments** @@ -198,6 +231,10 @@ def delete_policy_id(self, id): **Success Return Value** The JSON object representing the now-deleted policy. - ''' - res = self.http.delete(self.url + '/api/policies/{}'.format(id), headers=self.hdrs, verify=self.ssl_verify) + """ + res = self.http.delete( + self.url + "/api/policies/{}".format(id), + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) diff --git a/sdcclient/ibm_auth_helper.py b/sdcclient/ibm_auth_helper.py index 25db43fe..67c079c4 100644 --- a/sdcclient/ibm_auth_helper.py +++ b/sdcclient/ibm_auth_helper.py @@ -2,7 +2,7 @@ class IbmAuthHelper: - '''Authenticate with IBM Cloud IAM. + """Authenticate with IBM Cloud IAM. **Arguments** **url**: Sysdig endpoint URL that should point to IBM Cloud @@ -11,41 +11,34 @@ class IbmAuthHelper: **Returns** A dictionary that will authenticate you with the IBM Cloud IAM API. - ''' + """ @staticmethod def get_headers(url, apikey, guid): iam_token = IbmAuthHelper.__get_iam_token(url, apikey) - return { - 'Authorization': 'Bearer ' + iam_token, - 'IBMInstanceID': guid - } + return {"Authorization": "Bearer " + iam_token, "IBMInstanceID": guid} @staticmethod def __get_iam_endpoint(url): - IAM_ENDPOINT = { - 'stage': 'iam.test.cloud.ibm.com', - 'prod': 'iam.cloud.ibm.com' - } - if '.test.' in url: - return IAM_ENDPOINT['stage'] + IAM_ENDPOINT = {"stage": "iam.test.cloud.ibm.com", "prod": "iam.cloud.ibm.com"} + if ".test." in url: + return IAM_ENDPOINT["stage"] else: - return IAM_ENDPOINT['prod'] + return IAM_ENDPOINT["prod"] @staticmethod def __get_iam_token(url, apikey): env_url = IbmAuthHelper.__get_iam_endpoint(url) response = requests.post( - 'https://' + env_url + '/identity/token', + "https://" + env_url + "/identity/token", data={ - 'grant_type': 'urn:ibm:params:oauth:grant-type:apikey', - 'response_type': 'cloud_iam', - 'apikey': apikey + "grant_type": "urn:ibm:params:oauth:grant-type:apikey", + "response_type": "cloud_iam", + "apikey": apikey, }, - headers={ - 'Accept': 'application/json' - }) + headers={"Accept": "application/json"}, + ) if response.status_code == 200: - return response.json()['access_token'] + return response.json()["access_token"] else: response.raise_for_status() diff --git a/sdcclient/monitor/__init__.py b/sdcclient/monitor/__init__.py index 01f5a735..7401f020 100644 --- a/sdcclient/monitor/__init__.py +++ b/sdcclient/monitor/__init__.py @@ -3,4 +3,9 @@ from ._events_v1 import EventsClientV1 from ._events_v2 import EventsClientV2 -__all__ = ["DashboardsClientV3", "DashboardsClientV2", "EventsClientV1", "EventsClientV2"] +__all__ = [ + "DashboardsClientV3", + "DashboardsClientV2", + "EventsClientV1", + "EventsClientV2", +] diff --git a/sdcclient/monitor/_dashboards_v2.py b/sdcclient/monitor/_dashboards_v2.py index 61fd9b7a..948c1cd6 100644 --- a/sdcclient/monitor/_dashboards_v2.py +++ b/sdcclient/monitor/_dashboards_v2.py @@ -3,20 +3,37 @@ from sdcclient._common import _SdcCommon from sdcclient.monitor.dashboard_converters import convert_dashboard_between_versions -from sdcclient.monitor.dashboard_converters._dashboard_scope import convert_scope_string_to_expression +from sdcclient.monitor.dashboard_converters._dashboard_scope import ( + convert_scope_string_to_expression, +) class DashboardsClientV2(_SdcCommon): - def __init__(self, token="", sdc_url='https://app.sysdigcloud.com', ssl_verify=True, custom_headers=None): - super(DashboardsClientV2, self).__init__(token, sdc_url, ssl_verify, custom_headers) + def __init__( + self, + token="", + sdc_url="https://app.sysdigcloud.com", + ssl_verify=True, + custom_headers=None, + ): + super(DashboardsClientV2, self).__init__( + token, sdc_url, ssl_verify, custom_headers + ) self.product = "SDC" - self._dashboards_api_version = 'v2' - self._dashboards_api_endpoint = '/api/{}/dashboards'.format(self._dashboards_api_version) - self._default_dashboards_api_endpoint = '/api/{}/defaultDashboards'.format(self._dashboards_api_version) + self._dashboards_api_version = "v2" + self._dashboards_api_endpoint = "/api/{}/dashboards".format( + self._dashboards_api_version + ) + self._default_dashboards_api_endpoint = "/api/{}/defaultDashboards".format( + self._dashboards_api_version + ) def get_views_list(self): - res = self.http.get(self.url + self._default_dashboards_api_endpoint, headers=self.hdrs, - verify=self.ssl_verify) + res = self.http.get( + self.url + self._default_dashboards_api_endpoint, + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()] @@ -26,24 +43,27 @@ def get_view(self, name): if gvres[0] is False: return gvres - vlist = gvres[1]['defaultDashboards'] + vlist = gvres[1]["defaultDashboards"] id = None for v in vlist: - if v['name'] == name: - id = v['id'] + if v["name"] == name: + id = v["id"] break if not id: - return [False, 'view ' + name + ' not found'] + return [False, "view " + name + " not found"] - res = self.http.get(self.url + self._default_dashboards_api_endpoint + '/' + id, headers=self.hdrs, - verify=self.ssl_verify) + res = self.http.get( + self.url + self._default_dashboards_api_endpoint + "/" + id, + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def get_dashboards(self): - '''**Description** + """**Description** Return the list of dashboards available under the given user account. This includes the dashboards created by the user and the ones shared with her by other users. **Success Return Value** @@ -51,12 +71,16 @@ def get_dashboards(self): **Example** `examples/list_dashboards.py `_ - ''' - res = self.http.get(self.url + self._dashboards_api_endpoint, headers=self.hdrs, verify=self.ssl_verify) + """ + res = self.http.get( + self.url + self._dashboards_api_endpoint, + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def update_dashboard(self, dashboard_data): - '''**Description** + """**Description** Updates dashboard with provided in data. Please note that the dictionary will require a valid ID and version field to work as expected. **Success Return Value** @@ -64,13 +88,17 @@ def update_dashboard(self, dashboard_data): **Example** `examples/dashboard_basic_crud.py `_ - ''' - res = self.http.put(self.url + self._dashboards_api_endpoint + "/" + str(dashboard_data['id']), - headers=self.hdrs, verify=self.ssl_verify, data=json.dumps({'dashboard': dashboard_data})) + """ + res = self.http.put( + self.url + self._dashboards_api_endpoint + "/" + str(dashboard_data["id"]), + headers=self.hdrs, + verify=self.ssl_verify, + data=json.dumps({"dashboard": dashboard_data}), + ) return self._request_result(res) def find_dashboard_by(self, name=None): - '''**Description** + """**Description** Finds dashboards with the specified name. You can then delete the dashboard (with :func:`~SdcClient.delete_dashboard`) or edit panels (with :func:`~SdcClient.add_dashboard_panel` and :func:`~SdcClient.remove_dashboard_panel`) **Arguments** @@ -81,35 +109,41 @@ def find_dashboard_by(self, name=None): **Example** `examples/dashboard.py `_ - ''' + """ res = self.get_dashboards() if res[0] is False: return res else: + def filter_fn(configuration): - return configuration['name'] == name + return configuration["name"] == name def create_item(configuration): - return {'dashboard': configuration} + return {"dashboard": configuration} - dashboards = list(map(create_item, list(filter(filter_fn, res[1]['dashboards'])))) + dashboards = list( + map(create_item, list(filter(filter_fn, res[1]["dashboards"]))) + ) return [True, dashboards] def create_dashboard_with_configuration(self, configuration): # Remove id and version properties if already set configuration_clone = copy.deepcopy(configuration) - if 'id' in configuration_clone: - del configuration_clone['id'] - if 'version' in configuration_clone: - del configuration_clone['version'] - - res = self.http.post(self.url + self._dashboards_api_endpoint, headers=self.hdrs, - data=json.dumps({'dashboard': configuration_clone}), - verify=self.ssl_verify) + if "id" in configuration_clone: + del configuration_clone["id"] + if "version" in configuration_clone: + del configuration_clone["version"] + + res = self.http.post( + self.url + self._dashboards_api_endpoint, + headers=self.hdrs, + data=json.dumps({"dashboard": configuration_clone}), + verify=self.ssl_verify, + ) return self._request_result(res) def create_dashboard(self, name): - ''' + """ **Description** Creates an empty dashboard. You can then add panels by using ``add_dashboard_panel``. @@ -121,27 +155,37 @@ def create_dashboard(self, name): **Example** `examples/dashboard.py `_ - ''' + """ dashboard_configuration = { - 'name': name, - 'schema': 2, - 'widgets': [], - 'eventsOverlaySettings': { - 'filterNotificationsUserInputFilter': '' - } + "name": name, + "schema": 2, + "widgets": [], + "eventsOverlaySettings": {"filterNotificationsUserInputFilter": ""}, } # # Create the new dashboard # - res = self.http.post(self.url + self._dashboards_api_endpoint, headers=self.hdrs, - data=json.dumps({'dashboard': dashboard_configuration}), - verify=self.ssl_verify) + res = self.http.post( + self.url + self._dashboards_api_endpoint, + headers=self.hdrs, + data=json.dumps({"dashboard": dashboard_configuration}), + verify=self.ssl_verify, + ) return self._request_result(res) # TODO COVER - def add_dashboard_panel(self, dashboard, name, panel_type, metrics, scope=None, sort_direction='desc', limit=None, - layout=None): + def add_dashboard_panel( + self, + dashboard, + name, + panel_type, + metrics, + scope=None, + sort_direction="desc", + limit=None, + layout=None, + ): """**Description** Adds a panel to the dashboard. A panel can be a time series, or a top chart (i.e. bar chart), or a number panel. @@ -165,19 +209,14 @@ def add_dashboard_panel(self, dashboard, name, panel_type, metrics, scope=None, `examples/dashboard.py `_ """ panel_configuration = { - 'name': name, - 'showAs': None, - 'metrics': [], - 'gridConfiguration': { - 'col': 1, - 'row': 1, - 'size_x': 12, - 'size_y': 6 - }, - 'customDisplayOptions': {} + "name": name, + "showAs": None, + "metrics": [], + "gridConfiguration": {"col": 1, "row": 1, "size_x": 12, "size_y": 6}, + "customDisplayOptions": {}, } - if panel_type == 'timeSeries': + if panel_type == "timeSeries": # # In case of a time series, the current dashboard implementation # requires the timestamp to be explicitly specified as "key". @@ -186,7 +225,7 @@ def add_dashboard_panel(self, dashboard, name, panel_type, metrics, scope=None, # specify time window and sampling) # metrics = copy.copy(metrics) - metrics.insert(0, {'id': 'timestamp'}) + metrics.insert(0, {"id": "timestamp"}) # # Convert list of metrics to format used by Sysdig Monitor @@ -195,79 +234,72 @@ def add_dashboard_panel(self, dashboard, name, panel_type, metrics, scope=None, k_count = 0 v_count = 0 for i, metric in enumerate(metrics): - property_name = 'v' if 'aggregations' in metric else 'k' + property_name = "v" if "aggregations" in metric else "k" - if property_name == 'k': + if property_name == "k": i = k_count k_count += 1 else: i = v_count v_count += 1 - property_names[metric['id']] = property_name + str(i) - - panel_configuration['metrics'].append({ - 'id': metric['id'], - 'timeAggregation': metric['aggregations']['time'] if 'aggregations' in metric else None, - 'groupAggregation': metric['aggregations']['group'] if 'aggregations' in metric else None, - 'propertyName': property_name + str(i) - }) + property_names[metric["id"]] = property_name + str(i) + + panel_configuration["metrics"].append( + { + "id": metric["id"], + "timeAggregation": metric["aggregations"]["time"] + if "aggregations" in metric + else None, + "groupAggregation": metric["aggregations"]["group"] + if "aggregations" in metric + else None, + "propertyName": property_name + str(i), + } + ) - panel_configuration['scope'] = scope + panel_configuration["scope"] = scope # if chart scope is equal to dashboard scope, set it as non override - panel_configuration['overrideScope'] = ('scope' in dashboard and dashboard['scope'] != scope) or \ - ('scope' not in dashboard and scope is not None) - - if 'custom_display_options' not in panel_configuration: - panel_configuration['custom_display_options'] = { - 'valueLimit': { - 'count': 10, - 'direction': 'desc' - }, - 'histogram': { - 'numberOfBuckets': 10 - }, - 'yAxisScale': 'linear', - 'yAxisLeftDomain': { - 'from': 0, - 'to': None - }, - 'yAxisRightDomain': { - 'from': 0, - 'to': None - }, - 'xAxis': { - 'from': 0, - 'to': None - } + panel_configuration["overrideScope"] = ( + "scope" in dashboard and dashboard["scope"] != scope + ) or ("scope" not in dashboard and scope is not None) + + if "custom_display_options" not in panel_configuration: + panel_configuration["custom_display_options"] = { + "valueLimit": {"count": 10, "direction": "desc"}, + "histogram": {"numberOfBuckets": 10}, + "yAxisScale": "linear", + "yAxisLeftDomain": {"from": 0, "to": None}, + "yAxisRightDomain": {"from": 0, "to": None}, + "xAxis": {"from": 0, "to": None}, } # # Configure panel type # - if panel_type == 'timeSeries': - panel_configuration['showAs'] = 'timeSeries' + if panel_type == "timeSeries": + panel_configuration["showAs"] = "timeSeries" if limit is not None: - panel_configuration['custom_display_options']['valueLimit'] = { - 'count': limit, - 'direction': 'desc' + panel_configuration["custom_display_options"]["valueLimit"] = { + "count": limit, + "direction": "desc", } - elif panel_type == 'number': - panel_configuration['showAs'] = 'summary' - elif panel_type == 'top': - panel_configuration['showAs'] = 'top' + elif panel_type == "number": + panel_configuration["showAs"] = "summary" + elif panel_type == "top": + panel_configuration["showAs"] = "top" if limit is not None: - panel_configuration['custom_display_options']['valueLimit'] = { - 'count': limit, - 'direction': sort_direction + panel_configuration["custom_display_options"]["valueLimit"] = { + "count": limit, + "direction": sort_direction, } # # Configure layout # if layout is not None: - panel_configuration['gridConfiguration'] = layout + panel_configuration["gridConfiguration"] = layout # # Clone existing dashboard... @@ -277,19 +309,22 @@ def add_dashboard_panel(self, dashboard, name, panel_type, metrics, scope=None, # # ... and add the new panel # - dashboard_configuration['widgets'].append(panel_configuration) + dashboard_configuration["widgets"].append(panel_configuration) # # Update dashboard # - res = self.http.put(self.url + self._dashboards_api_endpoint + '/' + str(dashboard['id']), headers=self.hdrs, - data=json.dumps({'dashboard': dashboard_configuration}), - verify=self.ssl_verify) + res = self.http.put( + self.url + self._dashboards_api_endpoint + "/" + str(dashboard["id"]), + headers=self.hdrs, + data=json.dumps({"dashboard": dashboard_configuration}), + verify=self.ssl_verify, + ) return self._request_result(res) # TODO COVER def remove_dashboard_panel(self, dashboard, panel_name): - '''**Description** + """**Description** Removes a panel from the dashboard. The panel to remove is identified by the specified ``name``. **Arguments** @@ -300,7 +335,7 @@ def remove_dashboard_panel(self, dashboard, panel_name): **Example** `examples/dashboard.py `_ - ''' + """ # # Clone existing dashboard... # @@ -310,49 +345,56 @@ def remove_dashboard_panel(self, dashboard, panel_name): # ... find the panel # def filter_fn(panel): - return panel['name'] == panel_name + return panel["name"] == panel_name - panels = list(filter(filter_fn, dashboard_configuration['widgets'])) + panels = list(filter(filter_fn, dashboard_configuration["widgets"])) if len(panels) > 0: # # ... and remove it # for panel in panels: - dashboard_configuration['widgets'].remove(panel) + dashboard_configuration["widgets"].remove(panel) # # Update dashboard # - res = self.http.put(self.url + self._dashboards_api_endpoint + '/' + str(dashboard['id']), - headers=self.hdrs, - data=json.dumps({'dashboard': dashboard_configuration}), - verify=self.ssl_verify) + res = self.http.put( + self.url + self._dashboards_api_endpoint + "/" + str(dashboard["id"]), + headers=self.hdrs, + data=json.dumps({"dashboard": dashboard_configuration}), + verify=self.ssl_verify, + ) return self._request_result(res) else: - return [False, 'Not found'] + return [False, "Not found"] - def create_dashboard_from_template(self, dashboard_name, template, scope, shared=False, public=False): + def create_dashboard_from_template( + self, dashboard_name, template, scope, shared=False, public=False + ): if scope is not None: if not isinstance(scope, str): - return [False, 'Invalid scope format: Expected a string'] + return [False, "Invalid scope format: Expected a string"] # # Clean up the dashboard we retireved so it's ready to be pushed # - template['id'] = None - template['version'] = None - template['schema'] = 2 - template['name'] = dashboard_name - template['shared'] = shared - template['public'] = public - template['publicToken'] = None + template["id"] = None + template["version"] = None + template["schema"] = 2 + template["name"] = dashboard_name + template["shared"] = shared + template["public"] = public + template["publicToken"] = None # default dashboards don't have eventsOverlaySettings property # make sure to add the default set if the template doesn't include it - if 'eventsOverlaySettings' not in template or not template['eventsOverlaySettings']: - template['eventsOverlaySettings'] = { - 'filterNotificationsUserInputFilter': '' + if ( + "eventsOverlaySettings" not in template + or not template["eventsOverlaySettings"] + ): + template["eventsOverlaySettings"] = { + "filterNotificationsUserInputFilter": "" } # set dashboard scope to the specific parameter @@ -360,40 +402,55 @@ def create_dashboard_from_template(self, dashboard_name, template, scope, shared if not scopeOk: return scopeOk, scopeRes if scopeRes: - template['scopeExpressionList'] = list(map( - lambda ex: {'operand': ex['operand'], 'operator': ex['operator'], 'value': ex['value'], - 'displayName': '', 'variable': False}, scopeRes)) + template["scopeExpressionList"] = list( + map( + lambda ex: { + "operand": ex["operand"], + "operator": ex["operator"], + "value": ex["value"], + "displayName": "", + "variable": False, + }, + scopeRes, + ) + ) else: - template['scopeExpressionList'] = None + template["scopeExpressionList"] = None # NOTE: Individual panels might override the dashboard scope, the override will NOT be reset - if 'widgets' in template and template['widgets'] is not None: - for chart in template['widgets']: - if 'overrideScope' not in chart: - chart['overrideScope'] = False + if "widgets" in template and template["widgets"] is not None: + for chart in template["widgets"]: + if "overrideScope" not in chart: + chart["overrideScope"] = False - if not chart['overrideScope']: + if not chart["overrideScope"]: # patch frontend bug to hide scope override warning even when it's not really overridden - chart['scope'] = scope + chart["scope"] = scope - if chart['showAs'] != 'map': + if chart["showAs"] != "map": # if chart scope is equal to dashboard scope, set it as non override - chart_scope = chart['scope'] if 'scope' in chart else None - chart['overrideScope'] = chart_scope != scope + chart_scope = chart["scope"] if "scope" in chart else None + chart["overrideScope"] = chart_scope != scope else: # topology panels must override the scope - chart['overrideScope'] = True + chart["overrideScope"] = True # # Create the new dashboard # - res = self.http.post(self.url + self._dashboards_api_endpoint, headers=self.hdrs, - data=json.dumps({'dashboard': template}), verify=self.ssl_verify) + res = self.http.post( + self.url + self._dashboards_api_endpoint, + headers=self.hdrs, + data=json.dumps({"dashboard": template}), + verify=self.ssl_verify, + ) return self._request_result(res) - def create_dashboard_from_view(self, newdashname, viewname, filter, shared=False, public=False): - '''**Description** + def create_dashboard_from_view( + self, newdashname, viewname, filter, shared=False, public=False + ): + """**Description** Create a new dasboard using one of the Sysdig Monitor views as a template. You will be able to define the scope of the new dashboard. **Arguments** @@ -408,7 +465,7 @@ def create_dashboard_from_view(self, newdashname, viewname, filter, shared=False **Example** `examples/create_dashboard.py `_ - ''' + """ # # Find our template view # @@ -416,18 +473,23 @@ def create_dashboard_from_view(self, newdashname, viewname, filter, shared=False if not ok: return ok, gvres - view = gvres['defaultDashboard'] + view = gvres["defaultDashboard"] - view['timeMode'] = {'mode': 1} - view['time'] = {'last': 2 * 60 * 60 * 1000000, 'sampling': 2 * 60 * 60 * 1000000} + view["timeMode"] = {"mode": 1} + view["time"] = { + "last": 2 * 60 * 60 * 1000000, + "sampling": 2 * 60 * 60 * 1000000, + } # # Create the new dashboard # - return self.create_dashboard_from_template(newdashname, view, filter, shared, public) + return self.create_dashboard_from_template( + newdashname, view, filter, shared, public + ) def get_dashboard(self, dashboard_id): - '''**Description** + """**Description** Return a dashboard with the pased in ID. This includes the dashboards created by the user and the ones shared with them by other users. **Success Return Value** @@ -435,13 +497,18 @@ def get_dashboard(self, dashboard_id): **Example** `examples/dashboard_basic_crud.py `_ - ''' - res = self.http.get(self.url + self._dashboards_api_endpoint + "/" + str(dashboard_id), headers=self.hdrs, - verify=self.ssl_verify) + """ + res = self.http.get( + self.url + self._dashboards_api_endpoint + "/" + str(dashboard_id), + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) - def create_dashboard_from_dashboard(self, newdashname, templatename, filter, shared=False, public=False): - '''**Description** + def create_dashboard_from_dashboard( + self, newdashname, templatename, filter, shared=False, public=False + ): + """**Description** Create a new dasboard using one of the existing dashboards as a template. You will be able to define the scope of the new dasboard. **Arguments** @@ -456,11 +523,15 @@ def create_dashboard_from_dashboard(self, newdashname, templatename, filter, sha **Example** `examples/create_dashboard.py `_ - ''' + """ # # Get the list of dashboards from the server # - res = self.http.get(self.url + self._dashboards_api_endpoint, headers=self.hdrs, verify=self.ssl_verify) + res = self.http.get( + self.url + self._dashboards_api_endpoint, + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] @@ -471,22 +542,28 @@ def create_dashboard_from_dashboard(self, newdashname, templatename, filter, sha # dboard = None - for db in j['dashboards']: - if db['name'] == templatename: + for db in j["dashboards"]: + if db["name"] == templatename: dboard = db break if dboard is None: - self.lasterr = 'can\'t find dashboard ' + templatename + ' to use as a template' + self.lasterr = ( + "can't find dashboard " + templatename + " to use as a template" + ) return [False, self.lasterr] # # Create the dashboard # - return self.create_dashboard_from_template(newdashname, dboard, filter, shared, public) + return self.create_dashboard_from_template( + newdashname, dboard, filter, shared, public + ) - def create_dashboard_from_file(self, dashboard_name, filename, filter, shared=False, public=False): - ''' + def create_dashboard_from_file( + self, dashboard_name, filename, filter, shared=False, public=False + ): + """ **Description** Create a new dasboard using a dashboard template saved to disk. See :func:`~SdcClient.save_dashboard_to_file` to use the file to create a dashboard (usefl to create and restore backups). @@ -508,7 +585,7 @@ def create_dashboard_from_file(self, dashboard_name, filename, filter, shared=Fa **Example** `examples/dashboard_save_load.py `_ - ''' + """ # # Load the Dashboard # @@ -518,21 +595,18 @@ def create_dashboard_from_file(self, dashboard_name, filename, filter, shared=Fa # # Handle old files # - if 'dashboard' not in loaded_object: - loaded_object = { - 'version': 'v1', - 'dashboard': loaded_object - } + if "dashboard" not in loaded_object: + loaded_object = {"version": "v1", "dashboard": loaded_object} - dashboard = loaded_object['dashboard'] + dashboard = loaded_object["dashboard"] - if loaded_object['version'] != self._dashboards_api_version: + if loaded_object["version"] != self._dashboards_api_version: # # Convert the dashboard (if possible) # - conversion_result, dashboard = convert_dashboard_between_versions(dashboard, - loaded_object['version'], - self._dashboards_api_version) + conversion_result, dashboard = convert_dashboard_between_versions( + dashboard, loaded_object["version"], self._dashboards_api_version + ) if not conversion_result: return conversion_result, dashboard @@ -540,10 +614,12 @@ def create_dashboard_from_file(self, dashboard_name, filename, filter, shared=Fa # # Create the new dashboard # - return self.create_dashboard_from_template(dashboard_name, dashboard, filter, shared, public) + return self.create_dashboard_from_template( + dashboard_name, dashboard, filter, shared, public + ) def save_dashboard_to_file(self, dashboard, filename): - ''' + """ **Description** Save a dashboard to disk. See :func:`~SdcClient.create_dashboard_from_file` to use the file to create a dashboard (usefl to create and restore backups). @@ -557,15 +633,14 @@ def save_dashboard_to_file(self, dashboard, filename): **Example** `examples/dashboard_save_load.py `_ - ''' - with open(filename, 'w') as outf: - json.dump({ - 'version': self._dashboards_api_version, - 'dashboard': dashboard - }, outf) + """ + with open(filename, "w") as outf: + json.dump( + {"version": self._dashboards_api_version, "dashboard": dashboard}, outf + ) def delete_dashboard(self, dashboard): - '''**Description** + """**Description** Deletes a dashboard. **Arguments** @@ -576,12 +651,15 @@ def delete_dashboard(self, dashboard): **Example** `examples/delete_dashboard.py `_ - ''' - if 'id' not in dashboard: + """ + if "id" not in dashboard: return [False, "Invalid dashboard format"] - res = self.http.delete(self.url + self._dashboards_api_endpoint + '/' + str(dashboard['id']), headers=self.hdrs, - verify=self.ssl_verify) + res = self.http.delete( + self.url + self._dashboards_api_endpoint + "/" + str(dashboard["id"]), + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] diff --git a/sdcclient/monitor/_dashboards_v3.py b/sdcclient/monitor/_dashboards_v3.py index b690f4d3..e964970d 100644 --- a/sdcclient/monitor/_dashboards_v3.py +++ b/sdcclient/monitor/_dashboards_v3.py @@ -2,24 +2,41 @@ import json from sdcclient._common import _SdcCommon -from sdcclient.monitor.dashboard_converters import convert_dashboard_between_versions, \ - convert_scope_string_to_expression +from sdcclient.monitor.dashboard_converters import ( + convert_dashboard_between_versions, + convert_scope_string_to_expression, +) PANEL_VISUALIZATION_TIMECHART = "advancedTimechart" PANEL_VISUALIZATION_NUMBER = "advancedNumber" class DashboardsClientV3(_SdcCommon): - def __init__(self, token="", sdc_url='https://app.sysdigcloud.com', ssl_verify=True, custom_headers=None): - super(DashboardsClientV3, self).__init__(token, sdc_url, ssl_verify, custom_headers) + def __init__( + self, + token="", + sdc_url="https://app.sysdigcloud.com", + ssl_verify=True, + custom_headers=None, + ): + super(DashboardsClientV3, self).__init__( + token, sdc_url, ssl_verify, custom_headers + ) self.product = "SDC" - self._dashboards_api_version = 'v3' - self._dashboards_api_endpoint = '/api/{}/dashboards'.format(self._dashboards_api_version) - self._default_dashboards_api_endpoint = '/api/{}/dashboards/templates'.format(self._dashboards_api_version) + self._dashboards_api_version = "v3" + self._dashboards_api_endpoint = "/api/{}/dashboards".format( + self._dashboards_api_version + ) + self._default_dashboards_api_endpoint = "/api/{}/dashboards/templates".format( + self._dashboards_api_version + ) def get_views_list(self): - res = self.http.get(self.url + self._default_dashboards_api_endpoint, headers=self.hdrs, - verify=self.ssl_verify) + res = self.http.get( + self.url + self._default_dashboards_api_endpoint, + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()] @@ -29,20 +46,23 @@ def get_view(self, name): if gvres[0] is False: return gvres - vlist = gvres[1]['dashboardTemplates'] + vlist = gvres[1]["dashboardTemplates"] id = None for v in vlist: - if v['name'] == name: - id = v['dashboardId'] + if v["name"] == name: + id = v["dashboardId"] break if not id: - return [False, 'view ' + name + ' not found'] + return [False, "view " + name + " not found"] - res = self.http.get(self.url + self._default_dashboards_api_endpoint + '/' + id, headers=self.hdrs, - verify=self.ssl_verify) + res = self.http.get( + self.url + self._default_dashboards_api_endpoint + "/" + id, + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def get_dashboards(self, light=True): @@ -69,16 +89,17 @@ def get_dashboards(self, light=True): >>> for dashboard in res["dashboards"]: >>> print(dashboard["name"]) """ - params = { - "light": "true" if light else "false" - } - res = self.http.get(self.url + self._dashboards_api_endpoint, params=params, - headers=self.hdrs, - verify=self.ssl_verify) + params = {"light": "true" if light else "false"} + res = self.http.get( + self.url + self._dashboards_api_endpoint, + params=params, + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def update_dashboard(self, dashboard_data): - '''**Description** + """**Description** Updates dashboard with provided in data. Please note that the dictionary will require a valid ID and version field to work as expected. **Success Return Value** @@ -86,13 +107,17 @@ def update_dashboard(self, dashboard_data): **Example** `examples/dashboard_basic_crud.py `_ - ''' - res = self.http.put(self.url + self._dashboards_api_endpoint + "/" + str(dashboard_data['id']), - headers=self.hdrs, verify=self.ssl_verify, data=json.dumps({'dashboard': dashboard_data})) + """ + res = self.http.put( + self.url + self._dashboards_api_endpoint + "/" + str(dashboard_data["id"]), + headers=self.hdrs, + verify=self.ssl_verify, + data=json.dumps({"dashboard": dashboard_data}), + ) return self._request_result(res) def find_dashboard_by(self, name=None): - '''**Description** + """**Description** Finds dashboards with the specified name. You can then delete the dashboard (with :func:`~SdcClient.delete_dashboard`) or edit panels (with :func:`~SdcClient.add_dashboard_panel` and :func:`~SdcClient.remove_dashboard_panel`) **Arguments** @@ -103,35 +128,41 @@ def find_dashboard_by(self, name=None): **Example** `examples/dashboard.py `_ - ''' + """ res = self.get_dashboards() if res[0] is False: return res else: + def filter_fn(configuration): - return configuration['name'] == name + return configuration["name"] == name def create_item(configuration): - return {'dashboard': configuration} + return {"dashboard": configuration} - dashboards = list(map(create_item, list(filter(filter_fn, res[1]['dashboards'])))) + dashboards = list( + map(create_item, list(filter(filter_fn, res[1]["dashboards"]))) + ) return [True, dashboards] def create_dashboard_with_configuration(self, configuration): # Remove id and version properties if already set configuration_clone = copy.deepcopy(configuration) - if 'id' in configuration_clone: - del configuration_clone['id'] - if 'version' in configuration_clone: - del configuration_clone['version'] - - res = self.http.post(self.url + self._dashboards_api_endpoint, headers=self.hdrs, - data=json.dumps({'dashboard': configuration_clone}), - verify=self.ssl_verify) + if "id" in configuration_clone: + del configuration_clone["id"] + if "version" in configuration_clone: + del configuration_clone["version"] + + res = self.http.post( + self.url + self._dashboards_api_endpoint, + headers=self.hdrs, + data=json.dumps({"dashboard": configuration_clone}), + verify=self.ssl_verify, + ) return self._request_result(res) def create_dashboard(self, name): - ''' + """ **Description** Creates an empty dashboard. You can then add panels by using ``add_dashboard_panel``. @@ -143,24 +174,25 @@ def create_dashboard(self, name): **Example** `examples/dashboard.py `_ - ''' + """ dashboard_configuration = { - 'name': name, - 'schema': 3, - 'widgets': [], - 'eventsOverlaySettings': { - 'filterNotificationsUserInputFilter': '' - }, - 'layout': [], - 'panels': [], + "name": name, + "schema": 3, + "widgets": [], + "eventsOverlaySettings": {"filterNotificationsUserInputFilter": ""}, + "layout": [], + "panels": [], } # # Create the new dashboard # - res = self.http.post(self.url + self._dashboards_api_endpoint, headers=self.hdrs, - data=json.dumps({'dashboard': dashboard_configuration}), - verify=self.ssl_verify) + res = self.http.post( + self.url + self._dashboards_api_endpoint, + headers=self.hdrs, + data=json.dumps({"dashboard": dashboard_configuration}), + verify=self.ssl_verify, + ) return self._request_result(res) # TODO COVER @@ -178,18 +210,18 @@ def add_dashboard_panel(self, dashboard, panel_name, visualization, query): "displayInfo": { "displayName": "", "timeSeriesDisplayNameTemplate": "", - "type": "lines" + "type": "lines", }, "format": { "unit": "%", "inputFormat": "0-100", "displayFormat": "auto", "decimals": None, - "yAxis": "auto" + "yAxis": "auto", }, - "query": query + "query": query, } - ] + ], } new_layout = { "panelId": new_panel_id, @@ -202,9 +234,7 @@ def add_dashboard_panel(self, dashboard, panel_name, visualization, query): if visualization == PANEL_VISUALIZATION_TIMECHART: new_panel["axesConfiguration"] = { - "bottom": { - "enabled": True - }, + "bottom": {"enabled": True}, "left": { "enabled": True, "displayName": None, @@ -215,7 +245,7 @@ def add_dashboard_panel(self, dashboard, panel_name, visualization, query): "maxValue": None, "minInputFormat": "0-100", "maxInputFormat": "0-100", - "scale": "linear" + "scale": "linear", }, "right": { "enabled": True, @@ -227,14 +257,14 @@ def add_dashboard_panel(self, dashboard, panel_name, visualization, query): "maxValue": None, "minInputFormat": "1", "maxInputFormat": "1", - "scale": "linear" - } + "scale": "linear", + }, } new_panel["legendConfiguration"] = { "enabled": True, "position": "right", "layout": "table", - "showCurrent": True + "showCurrent": True, } if visualization == PANEL_VISUALIZATION_NUMBER: new_panel["numberThresholds"] = { @@ -242,7 +272,7 @@ def add_dashboard_panel(self, dashboard, panel_name, visualization, query): "base": { "severity": "none", "displayText": "", - } + }, } dboard["panels"].append(new_panel) @@ -253,78 +283,96 @@ def add_dashboard_panel(self, dashboard, panel_name, visualization, query): # TODO COVER def remove_dashboard_panel(self, dashboard, panel_id): dboard = copy.deepcopy(dashboard) - dboard["panels"] = [panel for panel in dboard["panels"] if panel["id"] != panel_id] - dboard["layout"] = [layout for layout in dboard["layout"] if layout["panelId"] != panel_id] + dboard["panels"] = [ + panel for panel in dboard["panels"] if panel["id"] != panel_id + ] + dboard["layout"] = [ + layout for layout in dboard["layout"] if layout["panelId"] != panel_id + ] return self.update_dashboard(dboard) - def create_dashboard_from_template(self, dashboard_name, template, scope=None, shared=False, public=False): + def create_dashboard_from_template( + self, dashboard_name, template, scope=None, shared=False, public=False + ): if scope is not None: if not isinstance(scope, list) and not isinstance(scope, str): - return [False, 'Invalid scope format: Expected a list, a string or None'] + return [ + False, + "Invalid scope format: Expected a list, a string or None", + ] else: scope = [] # # Clean up the dashboard we retireved so it's ready to be pushed # - template['id'] = None - template['version'] = None - template['schema'] = 3 - template['name'] = dashboard_name - template['shared'] = shared - template['public'] = public - template['publicToken'] = None + template["id"] = None + template["version"] = None + template["schema"] = 3 + template["name"] = dashboard_name + template["shared"] = shared + template["public"] = public + template["publicToken"] = None # default dashboards don't have eventsOverlaySettings property # make sure to add the default set if the template doesn't include it - if 'eventsOverlaySettings' not in template or not template['eventsOverlaySettings']: - template['eventsOverlaySettings'] = { - 'filterNotificationsUserInputFilter': '' + if ( + "eventsOverlaySettings" not in template + or not template["eventsOverlaySettings"] + ): + template["eventsOverlaySettings"] = { + "filterNotificationsUserInputFilter": "" } # set dashboard scope to the specific parameter - template['scopeExpressionList'] = [] + template["scopeExpressionList"] = [] if isinstance(scope, list): for s in scope: ok, converted_scope = convert_scope_string_to_expression(s) if not ok: return ok, converted_scope - template['scopeExpressionList'].append(converted_scope[0]) + template["scopeExpressionList"].append(converted_scope[0]) elif isinstance(scope, str): ok, converted_scope = convert_scope_string_to_expression(scope) if not ok: return ok, converted_scope - template['scopeExpressionList'] = converted_scope + template["scopeExpressionList"] = converted_scope # NOTE: Individual panels might override the dashboard scope, the override will NOT be reset - if 'widgets' in template and template['widgets'] is not None: - for chart in template['widgets']: - if 'overrideScope' not in chart: - chart['overrideScope'] = False + if "widgets" in template and template["widgets"] is not None: + for chart in template["widgets"]: + if "overrideScope" not in chart: + chart["overrideScope"] = False - if not chart['overrideScope']: + if not chart["overrideScope"]: # patch frontend bug to hide scope override warning even when it's not really overridden - chart['scope'] = scope + chart["scope"] = scope - if chart['showAs'] != 'map': + if chart["showAs"] != "map": # if chart scope is equal to dashboard scope, set it as non override - chart_scope = chart['scope'] if 'scope' in chart else None - chart['overrideScope'] = chart_scope != scope + chart_scope = chart["scope"] if "scope" in chart else None + chart["overrideScope"] = chart_scope != scope else: # topology panels must override the scope - chart['overrideScope'] = True + chart["overrideScope"] = True # # Create the new dashboard # - res = self.http.post(self.url + self._dashboards_api_endpoint, headers=self.hdrs, - data=json.dumps({'dashboard': template}), verify=self.ssl_verify) + res = self.http.post( + self.url + self._dashboards_api_endpoint, + headers=self.hdrs, + data=json.dumps({"dashboard": template}), + verify=self.ssl_verify, + ) return self._request_result(res) - def create_dashboard_from_file(self, dashboard_name, filename, filter=None, shared=False, public=False): - ''' + def create_dashboard_from_file( + self, dashboard_name, filename, filter=None, shared=False, public=False + ): + """ **Description** Create a new dasboard using a dashboard template saved to disk. See :func:`~SdcClient.save_dashboard_to_file` to use the file to create a dashboard (usefl to create and restore backups). @@ -346,7 +394,7 @@ def create_dashboard_from_file(self, dashboard_name, filename, filter=None, shar **Example** `examples/dashboard_save_load.py `_ - ''' + """ # # Load the Dashboard # @@ -356,21 +404,21 @@ def create_dashboard_from_file(self, dashboard_name, filename, filter=None, shar # # Handle old files # - if 'dashboard' not in loaded_object: + if "dashboard" not in loaded_object: loaded_object = { - 'version': f'v{loaded_object["schema"]}', - 'dashboard': loaded_object + "version": f"v{loaded_object['schema']}", + "dashboard": loaded_object, } - dashboard = loaded_object['dashboard'] + dashboard = loaded_object["dashboard"] - if loaded_object['version'] != self._dashboards_api_version: + if loaded_object["version"] != self._dashboards_api_version: # # Convert the dashboard (if possible) # - conversion_result, dashboard = convert_dashboard_between_versions(dashboard, - loaded_object['version'], - self._dashboards_api_version) + conversion_result, dashboard = convert_dashboard_between_versions( + dashboard, loaded_object["version"], self._dashboards_api_version + ) if not conversion_result: return conversion_result, dashboard @@ -378,10 +426,12 @@ def create_dashboard_from_file(self, dashboard_name, filename, filter=None, shar # # Create the new dashboard # - return self.create_dashboard_from_template(dashboard_name, dashboard, filter, shared, public) + return self.create_dashboard_from_template( + dashboard_name, dashboard, filter, shared, public + ) def get_dashboard(self, dashboard_id): - '''**Description** + """**Description** Return a dashboard with the pased in ID. This includes the dashboards created by the user and the ones shared with them by other users. **Success Return Value** @@ -389,13 +439,18 @@ def get_dashboard(self, dashboard_id): **Example** `examples/dashboard_basic_crud.py `_ - ''' - res = self.http.get(self.url + self._dashboards_api_endpoint + "/" + str(dashboard_id), headers=self.hdrs, - verify=self.ssl_verify) + """ + res = self.http.get( + self.url + self._dashboards_api_endpoint + "/" + str(dashboard_id), + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) - def create_dashboard_from_dashboard(self, newdashname, templatename, filter=None, shared=False, public=False): - '''**Description** + def create_dashboard_from_dashboard( + self, newdashname, templatename, filter=None, shared=False, public=False + ): + """**Description** Create a new dasboard using one of the existing dashboards as a template. You will be able to define the scope of the new dasboard. **Arguments** @@ -410,12 +465,16 @@ def create_dashboard_from_dashboard(self, newdashname, templatename, filter=None **Example** `examples/create_dashboard.py `_ - ''' + """ # # Get the list of dashboards from the server # - dashboard = self.http.get(self.url + self._dashboards_api_endpoint, params={"light": "true"}, headers=self.hdrs, - verify=self.ssl_verify) + dashboard = self.http.get( + self.url + self._dashboards_api_endpoint, + params={"light": "true"}, + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(dashboard): return [False, self.lasterr] @@ -426,13 +485,15 @@ def create_dashboard_from_dashboard(self, newdashname, templatename, filter=None # dboard = None - for db in j['dashboards']: - if db['name'] == templatename: + for db in j["dashboards"]: + if db["name"] == templatename: dboard = db break if dboard is None: - self.lasterr = 'can\'t find dashboard ' + templatename + ' to use as a template' + self.lasterr = ( + "can't find dashboard " + templatename + " to use as a template" + ) return [False, self.lasterr] ok, dboard = self.get_dashboard(dboard["id"]) @@ -441,12 +502,18 @@ def create_dashboard_from_dashboard(self, newdashname, templatename, filter=None # # Create the dashboard # - return self.create_dashboard_from_template(newdashname, dboard["dashboard"], filter, shared, public) + return self.create_dashboard_from_template( + newdashname, dboard["dashboard"], filter, shared, public + ) def favorite_dashboard(self, dashboard_id, favorite): data = {"dashboard": {"favorite": favorite}} - res = self.http.patch(self.url + self._dashboards_api_endpoint + "/" + str(dashboard_id), json=data, - headers=self.hdrs, verify=self.ssl_verify) + res = self.http.patch( + self.url + self._dashboards_api_endpoint + "/" + str(dashboard_id), + json=data, + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def share_dashboard_with_all_teams(self, dashboard, mode="r"): @@ -478,19 +545,23 @@ def share_dashboard_with_team(self, dashboard, team_id, mode="r"): if dboard["sharingSettings"] is None: dboard["sharingSettings"] = [] - dboard["sharingSettings"].append({ - "member": { - "type": "TEAM", - "id": team_id, - }, - "role": role, - }) + dboard["sharingSettings"].append( + { + "member": { + "type": "TEAM", + "id": team_id, + }, + "role": role, + } + ) dboard["shared"] = True return self.update_dashboard(dboard) - def create_dashboard_from_view(self, newdashname, viewname, filter, shared=False, public=False): - '''**Description** + def create_dashboard_from_view( + self, newdashname, viewname, filter, shared=False, public=False + ): + """**Description** Create a new dasboard using one of the Sysdig Monitor views as a template. You will be able to define the scope of the new dashboard. **Arguments** @@ -505,7 +576,7 @@ def create_dashboard_from_view(self, newdashname, viewname, filter, shared=False **Example** `examples/create_dashboard.py `_ - ''' + """ # # Find our template view # @@ -513,18 +584,23 @@ def create_dashboard_from_view(self, newdashname, viewname, filter, shared=False if gvres[0] is False: return gvres - view = gvres[1]['dashboard'] + view = gvres[1]["dashboard"] - view['timeMode'] = {'mode': 1} - view['time'] = {'last': 2 * 60 * 60 * 1000000, 'sampling': 2 * 60 * 60 * 1000000} + view["timeMode"] = {"mode": 1} + view["time"] = { + "last": 2 * 60 * 60 * 1000000, + "sampling": 2 * 60 * 60 * 1000000, + } # # Create the new dashboard # - return self.create_dashboard_from_template(newdashname, view, filter, shared, public) + return self.create_dashboard_from_template( + newdashname, view, filter, shared, public + ) def save_dashboard_to_file(self, dashboard, filename): - ''' + """ **Description** Save a dashboard to disk. See :func:`~SdcClient.create_dashboard_from_file` to use the file to create a dashboard (usefl to create and restore backups). @@ -538,15 +614,14 @@ def save_dashboard_to_file(self, dashboard, filename): **Example** `examples/dashboard_save_load.py `_ - ''' - with open(filename, 'w') as outf: - json.dump({ - 'version': self._dashboards_api_version, - 'dashboard': dashboard - }, outf) + """ + with open(filename, "w") as outf: + json.dump( + {"version": self._dashboards_api_version, "dashboard": dashboard}, outf + ) def delete_dashboard(self, dashboard): - '''**Description** + """**Description** Deletes a dashboard. **Arguments** @@ -557,12 +632,15 @@ def delete_dashboard(self, dashboard): **Example** `examples/delete_dashboard.py `_ - ''' - if 'id' not in dashboard: + """ + if "id" not in dashboard: return [False, "Invalid dashboard format"] - res = self.http.delete(self.url + self._dashboards_api_endpoint + '/' + str(dashboard['id']), headers=self.hdrs, - verify=self.ssl_verify) + res = self.http.delete( + self.url + self._dashboards_api_endpoint + "/" + str(dashboard["id"]), + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] diff --git a/sdcclient/monitor/_events_v1.py b/sdcclient/monitor/_events_v1.py index 7419122c..a538bff9 100644 --- a/sdcclient/monitor/_events_v1.py +++ b/sdcclient/monitor/_events_v1.py @@ -4,12 +4,18 @@ class EventsClientV1(_SdcCommon): - def __init__(self, token="", sdc_url='https://app.sysdigcloud.com', ssl_verify=True, custom_headers=None): + def __init__( + self, + token="", + sdc_url="https://app.sysdigcloud.com", + ssl_verify=True, + custom_headers=None, + ): super().__init__(token, sdc_url, ssl_verify, custom_headers) self.product = "SDC" def get_events(self, from_s=None, to_s=None, last_s=None): - '''**Description** + """**Description** Returns the list of Sysdig Monitor events. **Arguments** @@ -25,7 +31,7 @@ def get_events(self, from_s=None, to_s=None, last_s=None): **Example** `examples/list_events.py `_ - ''' + """ options = { "from": from_s, @@ -33,7 +39,12 @@ def get_events(self, from_s=None, to_s=None, last_s=None): "last": last_s, } params = {k: v for k, v in options.items() if v is not None} - res = self.http.get(self.url + '/api/events/', headers=self.hdrs, params=params, verify=self.ssl_verify) + res = self.http.get( + self.url + "/api/events/", + headers=self.hdrs, + params=params, + verify=self.ssl_verify, + ) return self._request_result(res) def get_event(self, id): @@ -53,12 +64,14 @@ def get_event(self, id): >>> if ok: >>> print(res["event"]) """ - url = f'{self.url}/api/events/{id}' + url = f"{self.url}/api/events/{id}" res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) return self._request_result(res) - def post_event(self, name, description=None, severity=None, event_filter=None, tags=None): - '''**Description** + def post_event( + self, name, description=None, severity=None, event_filter=None, tags=None + ): + """**Description** Send an event to Sysdig Monitor. The events you post are available in the Events tab in the Sysdig Monitor UI and can be overlied to charts. **Arguments** @@ -74,23 +87,25 @@ def post_event(self, name, description=None, severity=None, event_filter=None, t **Examples** - `examples/post_event_simple.py `_ - `examples/post_event.py `_ - ''' + """ options = { - 'name': name, - 'description': description, - 'severity': severity, - 'filter': event_filter, - 'tags': tags + "name": name, + "description": description, + "severity": severity, + "filter": event_filter, + "tags": tags, } - edata = { - 'event': {k: v for k, v in options.items() if v is not None} - } - res = self.http.post(self.url + '/api/events/', headers=self.hdrs, data=json.dumps(edata), - verify=self.ssl_verify) + edata = {"event": {k: v for k, v in options.items() if v is not None}} + res = self.http.post( + self.url + "/api/events/", + headers=self.hdrs, + data=json.dumps(edata), + verify=self.ssl_verify, + ) return self._request_result(res) def delete_event(self, event): - '''**Description** + """**Description** Deletes an event. **Arguments** @@ -101,11 +116,15 @@ def delete_event(self, event): **Example** `examples/delete_event.py `_ - ''' - if 'id' not in event: + """ + if "id" not in event: return [False, "Invalid event format"] - res = self.http.delete(self.url + '/api/events/' + str(event['id']), headers=self.hdrs, verify=self.ssl_verify) + res = self.http.delete( + self.url + "/api/events/" + str(event["id"]), + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] return [True, None] diff --git a/sdcclient/monitor/_events_v2.py b/sdcclient/monitor/_events_v2.py index ecc27fa9..381ef297 100644 --- a/sdcclient/monitor/_events_v2.py +++ b/sdcclient/monitor/_events_v2.py @@ -5,13 +5,28 @@ class EventsClientV2(_SdcCommon): - def __init__(self, token="", sdc_url='https://app.sysdigcloud.com', ssl_verify=True, custom_headers=None): + def __init__( + self, + token="", + sdc_url="https://app.sysdigcloud.com", + ssl_verify=True, + custom_headers=None, + ): super().__init__(token, sdc_url, ssl_verify, custom_headers) self.product = "SDC" - def get_events(self, name=None, category=None, direction='before', status=None, limit=100, pivot=None, from_s=None, - to_s=None): - '''**Description** + def get_events( + self, + name=None, + category=None, + direction="before", + status=None, + limit=100, + pivot=None, + from_s=None, + to_s=None, + ): + """**Description** Returns the list of Sysdig Monitor events. **Arguments** @@ -29,8 +44,8 @@ def get_events(self, name=None, category=None, direction='before', status=None, **Example** `examples/list_events.py `_ - ''' - valid_categories = ['alert', 'custom', 'docker', 'containerd', 'kubernetes'] + """ + valid_categories = ["alert", "custom", "docker", "containerd", "kubernetes"] if category is None: category = valid_categories @@ -48,7 +63,12 @@ def get_events(self, name=None, category=None, direction='before', status=None, return False, "Invalid status '{}'".format(s) if direction not in ["before", "after"]: - return False, "Invalid direction '{}', must be either 'before' or 'after'".format(direction) + return ( + False, + "Invalid direction '{}', must be either 'before' or 'after'".format( + direction + ), + ) if from_s is not None and isinstance(from_s, datetime): from_s = int(from_s.timestamp() * 1000) @@ -56,27 +76,35 @@ def get_events(self, name=None, category=None, direction='before', status=None, to_s = int(to_s.timestamp() * 1000) if to_s is None and from_s is not None or from_s is None and to_s is not None: - return False, "only one of 'from_s' or 'to_s' has been specified, both are required when filtering by time" + return ( + False, + "only one of 'from_s' or 'to_s' has been specified, both are required when filtering by time", + ) if to_s is not None and from_s is not None: if int(to_s) < int(from_s): return False, "'from_s' must be lower than 'to_s'" options = { - 'alertStatus': status, - 'category': ','.join(category), - 'dir': direction, - 'feed': 'true', - 'include_pivot': 'true', - 'include_total': 'true', - 'limit': str(limit), - 'pivot': pivot, - 'filter': name, - 'from': from_s, - 'to': to_s, + "alertStatus": status, + "category": ",".join(category), + "dir": direction, + "feed": "true", + "include_pivot": "true", + "include_total": "true", + "limit": str(limit), + "pivot": pivot, + "filter": name, + "from": from_s, + "to": to_s, } params = {k: v for k, v in options.items() if v is not None} - res = self.http.get(self.url + '/api/v2/events/', headers=self.hdrs, params=params, verify=self.ssl_verify) + res = self.http.get( + self.url + "/api/v2/events/", + headers=self.hdrs, + params=params, + verify=self.ssl_verify, + ) return self._request_result(res) def get_event(self, id): @@ -97,12 +125,12 @@ def get_event(self, id): >>> print(res["event"]) """ - url = f'{self.url}/api/v2/events/{id}' + url = f"{self.url}/api/v2/events/{id}" res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) return self._request_result(res) def delete_event(self, event): - '''**Description** + """**Description** Deletes an event. **Arguments** @@ -113,18 +141,23 @@ def delete_event(self, event): **Example** `examples/delete_event.py `_ - ''' - if 'id' not in event: + """ + if "id" not in event: return [False, "Invalid event format"] - res = self.http.delete(self.url + '/api/v2/events/' + str(event['id']), headers=self.hdrs, - verify=self.ssl_verify) + res = self.http.delete( + self.url + "/api/v2/events/" + str(event["id"]), + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] return [True, None] - def post_event(self, name, description=None, severity=None, event_filter=None, tags=None): - '''**Description** + def post_event( + self, name, description=None, severity=None, event_filter=None, tags=None + ): + """**Description** Send an event to Sysdig Monitor. The events you post are available in the Events tab in the Sysdig Monitor UI and can be overlied to charts. **Arguments** @@ -140,17 +173,19 @@ def post_event(self, name, description=None, severity=None, event_filter=None, t **Examples** - `examples/post_event_simple.py `_ - `examples/post_event.py `_ - ''' + """ options = { - 'name': name, - 'description': description, - 'severity': severity, - 'scope': event_filter, - 'tags': tags - } - edata = { - 'event': {k: v for k, v in options.items() if v is not None} + "name": name, + "description": description, + "severity": severity, + "scope": event_filter, + "tags": tags, } - res = self.http.post(self.url + '/api/v2/events/', headers=self.hdrs, data=json.dumps(edata), - verify=self.ssl_verify) + edata = {"event": {k: v for k, v in options.items() if v is not None}} + res = self.http.post( + self.url + "/api/v2/events/", + headers=self.hdrs, + data=json.dumps(edata), + verify=self.ssl_verify, + ) return self._request_result(res) diff --git a/sdcclient/monitor/dashboard_converters/_dashboard_scope.py b/sdcclient/monitor/dashboard_converters/_dashboard_scope.py index f35e2703..de91c662 100644 --- a/sdcclient/monitor/dashboard_converters/_dashboard_scope.py +++ b/sdcclient/monitor/dashboard_converters/_dashboard_scope.py @@ -64,10 +64,9 @@ def flatten(S): grammar = tatsu.compile(_SCOPE_GRAMMAR) scope_list = [] - scope_expressions = scope.strip(' \t\n\r').split(' and ') + scope_expressions = scope.strip(" \t\n\r").split(" and ") for scope in scope_expressions: - operand, parsed_operator, value = grammar.parse(scope) operator_match = { @@ -85,19 +84,25 @@ def flatten(S): value = flatten(value) if len(value) > 1: value = list(value[1:-1]) # Remove '[' and ']' - value = [elem for elem in value if elem != ','] # Remove ',' + value = [elem for elem in value if elem != ","] # Remove ',' else: value = [value] - operator = "" if parsed_operator not in operator_match else operator_match[parsed_operator] - - scope_list.append({ - 'displayName': "", - "isVariable": False, - 'operand': operand, - 'operator': operator, - 'value': value - }) + operator = ( + "" + if parsed_operator not in operator_match + else operator_match[parsed_operator] + ) + + scope_list.append( + { + "displayName": "", + "isVariable": False, + "operand": operand, + "operator": operator, + "value": value, + } + ) return [True, scope_list] except Exception as ex: return [False, f"invalid scope: {scope}, {ex.message}"] diff --git a/sdcclient/monitor/dashboard_converters/_dashboard_versions.py b/sdcclient/monitor/dashboard_converters/_dashboard_versions.py index 370479a9..39c0ab4c 100644 --- a/sdcclient/monitor/dashboard_converters/_dashboard_versions.py +++ b/sdcclient/monitor/dashboard_converters/_dashboard_versions.py @@ -1,6 +1,8 @@ import copy -from sdcclient.monitor.dashboard_converters._dashboard_scope import convert_scope_string_to_expression +from sdcclient.monitor.dashboard_converters._dashboard_scope import ( + convert_scope_string_to_expression, +) def _convert_dashboard_v1_to_v2(dashboard): @@ -56,58 +58,67 @@ def convert_scope(prop_name, old_dashboard, new_dashboard): if scope_conversion[0]: if scope_conversion[1]: - new_dashboard['scopeExpressionList'] = scope_conversion[1] + new_dashboard["scopeExpressionList"] = scope_conversion[1] else: # the property can be either `null` or a non-empty array - new_dashboard['scopeExpressionList'] = None + new_dashboard["scopeExpressionList"] = None else: - raise SyntaxError('scope not supported by the current grammar') + raise SyntaxError("scope not supported by the current grammar") def convert_events_filter(prop_name, old_dashboard, new_dashboard): - rename_to('eventsOverlaySettings')(prop_name, old_dashboard, new_dashboard) - - if 'showNotificationsDoNotFilterSameMetrics' in new_dashboard['eventsOverlaySettings']: - del new_dashboard['eventsOverlaySettings']['showNotificationsDoNotFilterSameMetrics'] - if 'showNotificationsDoNotFilterSameScope' in new_dashboard['eventsOverlaySettings']: - del new_dashboard['eventsOverlaySettings']['showNotificationsDoNotFilterSameScope'] + rename_to("eventsOverlaySettings")(prop_name, old_dashboard, new_dashboard) + + if ( + "showNotificationsDoNotFilterSameMetrics" + in new_dashboard["eventsOverlaySettings"] + ): + del new_dashboard["eventsOverlaySettings"][ + "showNotificationsDoNotFilterSameMetrics" + ] + if ( + "showNotificationsDoNotFilterSameScope" + in new_dashboard["eventsOverlaySettings"] + ): + del new_dashboard["eventsOverlaySettings"][ + "showNotificationsDoNotFilterSameScope" + ] def convert_items(prop_name, old_dashboard, new_dashboard): def convert_color_coding(prop_name, old_widget, new_widget): best_value = None worst_value = None - for item in old_widget[prop_name]['thresholds']: - if item['color'] == 'best': - best_value = item['max'] if not item['max'] else item['min'] - elif item['color'] == 'worst': - worst_value = item['min'] if not item['min'] else item['max'] + for item in old_widget[prop_name]["thresholds"]: + if item["color"] == "best": + best_value = item["max"] if not item["max"] else item["min"] + elif item["color"] == "worst": + worst_value = item["min"] if not item["min"] else item["max"] if best_value is not None and worst_value is not None: - new_widget[prop_name] = { - 'best': best_value, - 'worst': worst_value - } + new_widget[prop_name] = {"best": best_value, "worst": worst_value} def convert_display_options(prop_name, old_widget, new_widget): keep_as_is(prop_name, old_widget, new_widget) - if 'yAxisScaleFactor' in new_widget[prop_name]: - del new_widget[prop_name]['yAxisScaleFactor'] + if "yAxisScaleFactor" in new_widget[prop_name]: + del new_widget[prop_name]["yAxisScaleFactor"] def convert_group(prop_name, old_widget, new_widget): - group_by_metrics = old_widget[prop_name]['configuration']['groups'][0]['groupBy'] + group_by_metrics = old_widget[prop_name]["configuration"]["groups"][0][ + "groupBy" + ] migrated = [] for metric in group_by_metrics: - migrated.append({'id': metric['metric']}) + migrated.append({"id": metric["metric"]}) - new_widget['groupingLabelIds'] = migrated + new_widget["groupingLabelIds"] = migrated def convert_override_filter(prop_name, old_widget, new_widget): - if old_widget['showAs'] == 'map': + if old_widget["showAs"] == "map": # override scope always true if scope is set - new_widget['overrideScope'] = True + new_widget["overrideScope"] = True else: - new_widget['overrideScope'] = old_widget[prop_name] + new_widget["overrideScope"] = old_widget[prop_name] def convert_name(prop_name, old_widget, new_widget): # @@ -116,12 +127,12 @@ def convert_name(prop_name, old_widget, new_widget): unique_id = 1 name = old_widget[prop_name] - for widget in old_dashboard['items']: + for widget in old_dashboard["items"]: if widget == old_widget: break if old_widget[prop_name] == widget[prop_name]: - old_widget[prop_name] = '{} ({})'.format(name, unique_id) + old_widget[prop_name] = "{} ({})".format(name, unique_id) unique_id += 1 keep_as_is(prop_name, old_widget, new_widget) @@ -130,14 +141,14 @@ def convert_metrics(prop_name, old_widget, new_widget): def convert_property_name(prop_name, old_metric, new_metric): keep_as_is(prop_name, old_metric, new_metric) - if old_metric['metricId'] == 'timestamp': - return 'k0' + if old_metric["metricId"] == "timestamp": + return "k0" metric_migrations = { - 'metricId': rename_to('id'), - 'aggregation': rename_to('timeAggregation'), - 'groupAggregation': rename_to('groupAggregation'), - 'propertyName': convert_property_name + "metricId": rename_to("id"), + "aggregation": rename_to("timeAggregation"), + "groupAggregation": rename_to("groupAggregation"), + "propertyName": convert_property_name, } migrated_metrics = [] @@ -155,52 +166,57 @@ def convert_property_name(prop_name, old_metric, new_metric): # other keys: k* (from 0 or 1, depending on timestamp) # values: v* (from 0) sorted_metrics = [] - timestamp_key = [m for m in migrated_metrics - if m['id'] == 'timestamp' and - 'timeAggregation' not in m or - not (m['timeAggregation'] is not None) - ] - no_timestamp_keys = [m for m in migrated_metrics - if m['id'] != 'timestamp' and - 'timeAggregation' not in m or - not (m['timeAggregation'] is not None) - ] - values = [m for m in migrated_metrics - if 'timeAggregation' in m and - m['timeAggregation'] is not None - ] + timestamp_key = [ + m + for m in migrated_metrics + if m["id"] == "timestamp" + and "timeAggregation" not in m + or not (m["timeAggregation"] is not None) + ] + no_timestamp_keys = [ + m + for m in migrated_metrics + if m["id"] != "timestamp" + and "timeAggregation" not in m + or not (m["timeAggregation"] is not None) + ] + values = [ + m + for m in migrated_metrics + if "timeAggregation" in m and m["timeAggregation"] is not None + ] if timestamp_key: - timestamp_key[0]['propertyName'] = 'k0' + timestamp_key[0]["propertyName"] = "k0" sorted_metrics.append(timestamp_key[0]) k_offset = 1 if timestamp_key else 0 for i in range(0, len(no_timestamp_keys)): - no_timestamp_keys[i]['propertyName'] = 'k{}'.format(i + k_offset) + no_timestamp_keys[i]["propertyName"] = "k{}".format(i + k_offset) sorted_metrics.append(no_timestamp_keys[i]) for i in range(0, len(values)): - values[i]['propertyName'] = 'v{}'.format(i) + values[i]["propertyName"] = "v{}".format(i) sorted_metrics.append(values[i]) - new_widget['metrics'] = sorted_metrics + new_widget["metrics"] = sorted_metrics widget_migrations = { - 'colorCoding': when_set(convert_color_coding), - 'compareToConfig': when_set(keep_as_is), - 'customDisplayOptions': with_default(convert_display_options, {}), - 'gridConfiguration': keep_as_is, - 'group': when_set(convert_group), - 'hasTransparentBackground': when_set(rename_to('transparentBackground')), - 'limitToScope': when_set(keep_as_is), - 'isPanelTitleVisible': when_set(rename_to('panelTitleVisible')), - 'markdownSource': when_set(keep_as_is), - 'metrics': with_default(convert_metrics, []), - 'name': with_default(convert_name, 'Panel'), - 'overrideFilter': convert_override_filter, - 'paging': drop_it, - 'scope': with_default(keep_as_is, None), - 'showAs': keep_as_is, - 'showAsType': drop_it, - 'sorting': drop_it, - 'textpanelTooltip': when_set(keep_as_is), + "colorCoding": when_set(convert_color_coding), + "compareToConfig": when_set(keep_as_is), + "customDisplayOptions": with_default(convert_display_options, {}), + "gridConfiguration": keep_as_is, + "group": when_set(convert_group), + "hasTransparentBackground": when_set(rename_to("transparentBackground")), + "limitToScope": when_set(keep_as_is), + "isPanelTitleVisible": when_set(rename_to("panelTitleVisible")), + "markdownSource": when_set(keep_as_is), + "metrics": with_default(convert_metrics, []), + "name": with_default(convert_name, "Panel"), + "overrideFilter": convert_override_filter, + "paging": drop_it, + "scope": with_default(keep_as_is, None), + "showAs": keep_as_is, + "showAsType": drop_it, + "sorting": drop_it, + "textpanelTooltip": when_set(keep_as_is), } migrated_widgets = [] @@ -212,30 +228,30 @@ def convert_property_name(prop_name, old_metric, new_metric): migrated_widgets.append(migrated_widget) - new_dashboard['widgets'] = migrated_widgets + new_dashboard["widgets"] = migrated_widgets return migrated migrations = { - 'autoCreated': keep_as_is, - 'createdOn': keep_as_is, - 'eventsFilter': with_default(convert_events_filter, { - 'filterNotificationsUserInputFilter': '' - }), - 'filterExpression': convert_scope, - 'scopeExpressionList': ignore, # scope will be generated from 'filterExpression' - 'id': keep_as_is, - 'isPublic': rename_to('public'), - 'isShared': rename_to('shared'), - 'items': convert_items, - 'layout': drop_it, - 'modifiedOn': keep_as_is, - 'name': keep_as_is, - 'publicToken': drop_it, - 'schema': convert_schema, - 'teamId': keep_as_is, - 'username': keep_as_is, - 'version': keep_as_is, + "autoCreated": keep_as_is, + "createdOn": keep_as_is, + "eventsFilter": with_default( + convert_events_filter, {"filterNotificationsUserInputFilter": ""} + ), + "filterExpression": convert_scope, + "scopeExpressionList": ignore, # scope will be generated from 'filterExpression' + "id": keep_as_is, + "isPublic": rename_to("public"), + "isShared": rename_to("shared"), + "items": convert_items, + "layout": drop_it, + "modifiedOn": keep_as_is, + "name": keep_as_is, + "publicToken": drop_it, + "schema": convert_schema, + "teamId": keep_as_is, + "username": keep_as_is, + "version": keep_as_is, } # @@ -248,15 +264,11 @@ def convert_property_name(prop_name, old_metric, new_metric): return True, migrated -_DASHBOARD_CONVERTERS = { - 'v2': { - 'v1': _convert_dashboard_v1_to_v2 - } -} +_DASHBOARD_CONVERTERS = {"v2": {"v1": _convert_dashboard_v1_to_v2}} def convert_dashboard_between_versions(dashboard, version_from, version_to): - ''' + """ **Description** Converts a dashboard from a version to another version. Current conversions supported: @@ -268,15 +280,20 @@ def convert_dashboard_between_versions(dashboard, version_from, version_to): **Success Return Value** A dashboard transformed between versions. - ''' + """ converters_to = _DASHBOARD_CONVERTERS.get(version_to, None) if converters_to is None: - return False, f'unexpected error: no dashboard converters from version {version_to} are supported' + return ( + False, + f"unexpected error: no dashboard converters from version {version_to} are supported", + ) converter = converters_to.get(version_from, None) if converter is None: - return False, 'dashboard version {} cannot be converted to {}'.format(version_from, version_to) + return False, "dashboard version {} cannot be converted to {}".format( + version_from, version_to + ) try: return converter(dashboard) diff --git a/sdcclient/secure/__init__.py b/sdcclient/secure/__init__.py index 1537faf9..3946b95e 100644 --- a/sdcclient/secure/__init__.py +++ b/sdcclient/secure/__init__.py @@ -2,10 +2,25 @@ from ._falco_rules_files_old import FalcoRulesFilesClientOld from ._policy_events_old import PolicyEventsClientOld from ._policy_events_v1 import PolicyEventsClientV1 -from ._policy_v2 import (policy_action_capture, policy_action_kill, policy_action_pause, policy_action_stop, - PolicyClientV2) +from ._policy_v2 import ( + policy_action_capture, + policy_action_kill, + policy_action_pause, + policy_action_stop, + PolicyClientV2, +) from .scanning._alerts import ScanningAlertsClientV1 -__all__ = ["PolicyEventsClientOld", "PolicyEventsClientV1", "FalcoRulesFilesClientOld", - "PolicyClientV2", "policy_action_pause", "policy_action_stop", "policy_action_kill", "policy_action_capture", - "ActivityAuditClientV1", "ActivityAuditDataSource", "ScanningAlertsClientV1"] +__all__ = [ + "PolicyEventsClientOld", + "PolicyEventsClientV1", + "FalcoRulesFilesClientOld", + "PolicyClientV2", + "policy_action_pause", + "policy_action_stop", + "policy_action_kill", + "policy_action_capture", + "ActivityAuditClientV1", + "ActivityAuditDataSource", + "ScanningAlertsClientV1", +] diff --git a/sdcclient/secure/_activity_audit_v1.py b/sdcclient/secure/_activity_audit_v1.py index 2a13f5fe..00460a1e 100644 --- a/sdcclient/secure/_activity_audit_v1.py +++ b/sdcclient/secure/_activity_audit_v1.py @@ -10,16 +10,30 @@ class ActivityAuditDataSource: FILE = "fileaccess" -_seconds_to_nanoseconds = 10 ** 9 +_seconds_to_nanoseconds = 10**9 class ActivityAuditClientV1(_SdcCommon): - def __init__(self, token="", sdc_url='https://secure.sysdig.com', ssl_verify=True, custom_headers=None): - super(ActivityAuditClientV1, self).__init__(token, sdc_url, ssl_verify, custom_headers) + def __init__( + self, + token="", + sdc_url="https://secure.sysdig.com", + ssl_verify=True, + custom_headers=None, + ): + super(ActivityAuditClientV1, self).__init__( + token, sdc_url, ssl_verify, custom_headers + ) self.product = "SDS" - def list_events(self, from_date=None, to_date=None, scope_filter=None, limit=0, - data_sources=None): + def list_events( + self, + from_date=None, + to_date=None, + scope_filter=None, + limit=0, + data_sources=None, + ): """ List the events in the Activity Audit. @@ -56,7 +70,7 @@ def list_events(self, from_date=None, to_date=None, scope_filter=None, limit=0, filters = scope_filter if scope_filter else [] if data_sources: quoted_data_sources = [f'"{data_source}"' for data_source in data_sources] - data_source_filter = f'type in ({",".join(quoted_data_sources)})' + data_source_filter = f"type in ({','.join(quoted_data_sources)})" filters.append(data_source_filter) query_params = { @@ -66,8 +80,12 @@ def list_events(self, from_date=None, to_date=None, scope_filter=None, limit=0, "filter": " and ".join(filters), } - res = self.http.get(self.url + '/api/v1/activityAudit/events', headers=self.hdrs, verify=self.ssl_verify, - params=query_params) + res = self.http.get( + self.url + "/api/v1/activityAudit/events", + headers=self.hdrs, + verify=self.ssl_verify, + params=query_params, + ) ok, res = self._request_result(res) if not ok: return False, res @@ -75,23 +93,29 @@ def list_events(self, from_date=None, to_date=None, scope_filter=None, limit=0, events = [] # Pagination required by Secure API - while "page" in res and \ - "total" in res["page"] and \ - res["page"]["total"] > number_of_events_per_query: + while ( + "page" in res + and "total" in res["page"] + and res["page"]["total"] > number_of_events_per_query + ): events = events + res["data"] if 0 < limit < len(events): - events = events[0:limit - 1] + events = events[0 : limit - 1] break paginated_query_params = { "limit": number_of_events_per_query, "filter": " and ".join(filters), - "cursor": res["page"]["prev"] + "cursor": res["page"]["prev"], } - res = self.http.get(self.url + '/api/v1/activityAudit/events', headers=self.hdrs, verify=self.ssl_verify, - params=paginated_query_params) + res = self.http.get( + self.url + "/api/v1/activityAudit/events", + headers=self.hdrs, + verify=self.ssl_verify, + params=paginated_query_params, + ) ok, res = self._request_result(res) if not ok: return False, res @@ -129,8 +153,10 @@ def list_trace(self, traceable_event): if not traceable_event or not traceable_event["traceable"]: return False, "a traceable event must be provided" - endpoint = f'/api/v1/activityAudit/events/{traceable_event["type"]}/{traceable_event["id"]}/trace' - res = self.http.get(self.url + endpoint, headers=self.hdrs, verify=self.ssl_verify) + endpoint = f"/api/v1/activityAudit/events/{traceable_event['type']}/{traceable_event['id']}/trace" + res = self.http.get( + self.url + endpoint, headers=self.hdrs, verify=self.ssl_verify + ) ok, res = self._request_result(res) if not ok: return False, res diff --git a/sdcclient/secure/_falco_rules_files_old.py b/sdcclient/secure/_falco_rules_files_old.py index a6da3ac1..209e167f 100644 --- a/sdcclient/secure/_falco_rules_files_old.py +++ b/sdcclient/secure/_falco_rules_files_old.py @@ -8,14 +8,25 @@ class FalcoRulesFilesClientOld(_SdcCommon): - def __init__(self, token="", sdc_url='https://secure.sysdig.com', ssl_verify=True, custom_headers=None): - super(FalcoRulesFilesClientOld, self).__init__(token, sdc_url, ssl_verify, custom_headers) + def __init__( + self, + token="", + sdc_url="https://secure.sysdig.com", + ssl_verify=True, + custom_headers=None, + ): + super(FalcoRulesFilesClientOld, self).__init__( + token, sdc_url, ssl_verify, custom_headers + ) self.product = "SDS" # TODO: Remove this one, deprecated def _get_falco_rules(self, kind): - res = self.http.get(self.url + '/api/settings/falco/{}RulesFile'.format(kind), headers=self.hdrs, - verify=self.ssl_verify) + res = self.http.get( + self.url + "/api/settings/falco/{}RulesFile".format(kind), + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] data = res.json() @@ -23,7 +34,7 @@ def _get_falco_rules(self, kind): # TODO: Change this one to use newestDefaultRulesFiles endpoint def get_system_falco_rules(self): - '''**Description** + """**Description** Get the system falco rules file in use for this customer. See the `Falco wiki `_ for documentation on the falco rules format. **Arguments** @@ -34,12 +45,12 @@ def get_system_falco_rules(self): **Example** `examples/get_secure_system_falco_rules.py `_ - ''' + """ return self._get_falco_rules("system") def get_user_falco_rules(self): - '''**Description** + """**Description** Get the user falco rules file in use for this customer. See the `Falco wiki `_ for documentation on the falco rules format. **Arguments** @@ -50,22 +61,27 @@ def get_user_falco_rules(self): **Example** `examples/get_secure_user_falco_rules.py `_ - ''' + """ ok, res = self._get_user_falco_rules() if not ok: return [False, res] - local_rules_file = [file - for file in res["customFalcoRulesFiles"]["files"] - if file["name"] == "falco_rules_local.yaml"] + local_rules_file = [ + file + for file in res["customFalcoRulesFiles"]["files"] + if file["name"] == "falco_rules_local.yaml" + ] if len(local_rules_file) == 0: return [False, "Expected falco_rules_local.yaml file, but no file found"] return [True, local_rules_file[0]["variants"][0]["content"]] def _get_user_falco_rules(self): - res = self.http.get(self.url + '/api/settings/falco/customRulesFiles', headers=self.hdrs, - verify=self.ssl_verify) + res = self.http.get( + self.url + "/api/settings/falco/customRulesFiles", + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] @@ -81,14 +97,18 @@ def _set_falco_rules(self, kind, rules_content): payload[1]["{}RulesFile".format(kind)]["content"] = rules_content # pylint: disable=unsubscriptable-object - res = self.http.put(self.url + '/api/settings/falco/{}RulesFile'.format(kind), headers=self.hdrs, - data=json.dumps(payload[1]), verify=self.ssl_verify) + res = self.http.put( + self.url + "/api/settings/falco/{}RulesFile".format(kind), + headers=self.hdrs, + data=json.dumps(payload[1]), + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()] def set_system_falco_rules(self, rules_content): - '''**Description** + """**Description** Set the system falco rules file in use for this customer. NOTE: This API endpoint can *only* be used in on-premise deployments. Generally the system falco rules file is only modified in conjunction with Sysdig support. See the `Falco wiki `_ for documentation on the falco rules format. **Arguments** @@ -100,11 +120,11 @@ def set_system_falco_rules(self, rules_content): **Example** `examples/set_secure_system_falco_rules.py `_ - ''' + """ return self._set_falco_rules("system", rules_content) def set_user_falco_rules(self, rules_content): - '''**Description** + """**Description** Set the user falco rules file in use for this customer. See the `Falco wiki `_ for documentation on the falco rules format. **Arguments** @@ -116,27 +136,36 @@ def set_user_falco_rules(self, rules_content): **Example** `examples/set_secure_user_falco_rules.py `_ - ''' + """ ok, res = self._get_user_falco_rules() if not ok: return res - local_rules_file = [file - for file in res["customFalcoRulesFiles"]["files"] - if file["name"] == "falco_rules_local.yaml"] + local_rules_file = [ + file + for file in res["customFalcoRulesFiles"]["files"] + if file["name"] == "falco_rules_local.yaml" + ] if len(local_rules_file) == 0: return [False, "Expected falco_rules_local.yaml file, but no file found"] local_rules_file[0]["variants"][0]["content"] = rules_content - res = self.http.put(self.url + '/api/settings/falco/customRulesFiles', headers=self.hdrs, - data=json.dumps(res), verify=self.ssl_verify) + res = self.http.put( + self.url + "/api/settings/falco/customRulesFiles", + headers=self.hdrs, + data=json.dumps(res), + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] res_json = res.json() - return [True, res_json["customFalcoRulesFiles"]["files"][0]["variants"][0]["content"]] + return [ + True, + res_json["customFalcoRulesFiles"]["files"][0]["variants"][0]["content"], + ] # get_falco_syscall_rules() @@ -145,9 +174,11 @@ def set_user_falco_rules(self, rules_content): # Only one kind for now called "default", but might add a "custom" kind later. # TODO Remove this one def _get_falco_rules_files(self, kind): - - res = self.http.get(self.url + '/api/settings/falco/{}RulesFiles'.format(kind), headers=self.hdrs, - verify=self.ssl_verify) + res = self.http.get( + self.url + "/api/settings/falco/{}RulesFiles".format(kind), + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] data = res.json() @@ -155,7 +186,7 @@ def _get_falco_rules_files(self, kind): return [True, data] def get_default_falco_rules_files(self): - '''**Description** + """**Description** Get the set of falco rules files from the backend. The _files programs and endpoints are a replacement for the system_file endpoints and allow for publishing multiple files instead of a single file as well as publishing multiple variants of a given file that are compatible @@ -202,7 +233,7 @@ def get_default_falco_rules_files(self): **Example** `examples/get_default_falco_rules_files.py `_ - ''' + """ res = self._get_falco_rules_files("default") @@ -226,7 +257,7 @@ def get_default_falco_rules_files(self): return [True, ret] def save_default_falco_rules_files(self, fsobj, save_dir): - '''**Description** + """**Description** Given a dict returned from get_default_falco_rules_files, save those files to a set of files below save_dir. The first level below save_dir is a directory with the tag name and an optional default_policies.yaml file, which groups rules into recommended default policies. The second level is a directory per file. @@ -252,7 +283,7 @@ def save_default_falco_rules_files(self, fsobj, save_dir): **Example** `examples/get_default_falco_rules_files.py `_ - ''' + """ if os.path.exists(save_dir): try: if os.path.isdir(save_dir): @@ -260,16 +291,24 @@ def save_default_falco_rules_files(self, fsobj, save_dir): else: os.unlink(save_dir) except Exception as e: - return [False, "Could not remove existing save dir {}: {}".format(save_dir, str(e))] + return [ + False, + "Could not remove existing save dir {}: {}".format( + save_dir, str(e) + ), + ] prefix = os.path.join(save_dir, fsobj["tag"]) try: os.makedirs(prefix) except Exception as e: - return [False, "Could not create tag directory {}: {}".format(prefix, str(e))] + return [ + False, + "Could not create tag directory {}: {}".format(prefix, str(e)), + ] if "defaultPolicies" in fsobj: - with open(os.path.join(save_dir, "default_policies.yaml"), 'w') as outfile: + with open(os.path.join(save_dir, "default_policies.yaml"), "w") as outfile: yaml.safe_dump(fsobj["defaultPolicies"], outfile) if "files" in fsobj: @@ -278,25 +317,39 @@ def save_default_falco_rules_files(self, fsobj, save_dir): try: os.makedirs(fprefix) except Exception as e: - return [False, "Could not create file directory {}: {}".format(fprefix, str(e))] + return [ + False, + "Could not create file directory {}: {}".format( + fprefix, str(e) + ), + ] for variant in fobj["variants"]: - vprefix = os.path.join(fprefix, str(variant["requiredEngineVersion"])) + vprefix = os.path.join( + fprefix, str(variant["requiredEngineVersion"]) + ) try: os.makedirs(vprefix) except Exception as e: - return [False, "Could not create variant directory {}: {}".format(vprefix, str(e))] + return [ + False, + "Could not create variant directory {}: {}".format( + vprefix, str(e) + ), + ] cpath = os.path.join(vprefix, "content") try: with open(cpath, "w") as cfile: cfile.write(variant["content"]) except Exception as e: - return [False, "Could not write content to {}: {}".format(cfile, str(e))] + return [ + False, + "Could not write content to {}: {}".format(cfile, str(e)), + ] return [True, None] # Only One kind for now, but might add a "custom" kind later. def _set_falco_rules_files(self, kind, rules_files): - payload = self._get_falco_rules_files(kind) if not payload[0]: @@ -309,14 +362,18 @@ def _set_falco_rules_files(self, kind, rules_files): if "defaultPolicies" in rules_files: obj["defaultPolicies"] = rules_files["defaultPolicies"] - res = self.http.put(self.url + '/api/settings/falco/{}RulesFiles'.format(kind), headers=self.hdrs, - data=json.dumps(payload[1]), verify=self.ssl_verify) + res = self.http.put( + self.url + "/api/settings/falco/{}RulesFiles".format(kind), + headers=self.hdrs, + data=json.dumps(payload[1]), + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()] def set_default_falco_rules_files(self, rules_files): - '''**Description** + """**Description** Update the set of falco rules files to the provided set of files. See the `Falco wiki `_ for documentation on the falco rules format. The _files programs and endpoints are a replacement for the system_file endpoints and allow for publishing multiple files instead of a single file as well as publishing @@ -331,12 +388,12 @@ def set_default_falco_rules_files(self, rules_files): **Example** `examples/set_default_falco_rules_files.py `_ - ''' + """ return self._set_falco_rules_files("default", rules_files) def load_default_falco_rules_files(self, save_dir): - '''**Description** + """**Description** Given a file and directory layout as described in save_default_falco_rules_files(), load those files and return a dict representing the contents. This dict is suitable for passing to set_default_falco_rules_files(). @@ -348,7 +405,7 @@ def load_default_falco_rules_files(self, save_dir): **Example** `examples/set_default_falco_rules_files.py `_ - ''' + """ tags = os.listdir(save_dir) @@ -359,7 +416,10 @@ def load_default_falco_rules_files(self, save_dir): pass if len(tags) != 1: - return [False, "Directory {} did not contain exactly 1 entry".format(save_dir)] + return [ + False, + "Directory {} did not contain exactly 1 entry".format(save_dir), + ] tpath = os.path.join(save_dir, tags[0]) @@ -388,19 +448,32 @@ def load_default_falco_rules_files(self, save_dir): return [False, "Variant path {} is not a directory".format(vpath)] cpath = os.path.join(vpath, "content") try: - with open(cpath, 'r') as content_file: + with open(cpath, "r") as content_file: try: required_engine_version = int(os.path.basename(vpath)) if int(os.path.basename(vpath)) < 0: - return [False, "Variant directory {} must be a positive number".format(vpath)] - fobj["variants"].append({ - "requiredEngineVersion": required_engine_version, - "content": content_file.read() - }) + return [ + False, + "Variant directory {} must be a positive number".format( + vpath + ), + ] + fobj["variants"].append( + { + "requiredEngineVersion": required_engine_version, + "content": content_file.read(), + } + ) except ValueError: - return [False, "Variant directory {} must be a number".format(vpath)] + return [ + False, + "Variant directory {} must be a number".format(vpath), + ] except Exception as e: - return [False, "Could not read content at {}: {}".format(cpath, str(e))] + return [ + False, + "Could not read content at {}: {}".format(cpath, str(e)), + ] ret["files"].append(fobj) diff --git a/sdcclient/secure/_policy_events_old.py b/sdcclient/secure/_policy_events_old.py index a5a316d1..3ae6db91 100644 --- a/sdcclient/secure/_policy_events_old.py +++ b/sdcclient/secure/_policy_events_old.py @@ -6,36 +6,69 @@ class PolicyEventsClientOld(_SdcCommon): - def __init__(self, token="", sdc_url='https://secure.sysdig.com', ssl_verify=True, custom_headers=None): - super(PolicyEventsClientOld, self).__init__(token, sdc_url, ssl_verify, custom_headers) + def __init__( + self, + token="", + sdc_url="https://secure.sysdig.com", + ssl_verify=True, + custom_headers=None, + ): + super(PolicyEventsClientOld, self).__init__( + token, sdc_url, ssl_verify, custom_headers + ) self.product = "SDS" def _get_policy_events_int(self, ctx): - warn("The PolicyEventsClientOld class is deprecated in favour of PolicyEventsClientV1; use it only if you have " - "an old on-premises installation", DeprecationWarning, 3) - policy_events_url = self.url + '/api/policyEvents{id}?from={frm:d}&to={to:d}&offset={offset}&limit={limit}{sampling}{aggregations}{scope}{filter}'.format( - id="/%s" % ctx["id"] if "id" in ctx else "", - frm=int(ctx['from']), - to=int(ctx['to']), - offset=ctx['offset'], - limit=ctx['limit'], - sampling='&sampling=%d' % int(ctx['sampling']) if "sampling" in ctx else "", - aggregations='&aggregations=%s' % json.dumps(ctx['aggregations']) if "aggregations" in ctx else "", - scope='&scopeFilter=%s' % ctx['scopeFilter'] if "scopeFilter" in ctx else "", - filter='&eventFilter=%s' % ctx['eventFilter'] if "eventFilter" in ctx else "") - - res = self.http.get(policy_events_url, headers=self.hdrs, verify=self.ssl_verify) + warn( + "The PolicyEventsClientOld class is deprecated in favour of PolicyEventsClientV1; use it only if you have " + "an old on-premises installation", + DeprecationWarning, + 3, + ) + policy_events_url = ( + self.url + + "/api/policyEvents{id}?from={frm:d}&to={to:d}&offset={offset}&limit={limit}{sampling}{aggregations}{scope}{filter}".format( + id="/%s" % ctx["id"] if "id" in ctx else "", + frm=int(ctx["from"]), + to=int(ctx["to"]), + offset=ctx["offset"], + limit=ctx["limit"], + sampling="&sampling=%d" % int(ctx["sampling"]) + if "sampling" in ctx + else "", + aggregations="&aggregations=%s" % json.dumps(ctx["aggregations"]) + if "aggregations" in ctx + else "", + scope="&scopeFilter=%s" % ctx["scopeFilter"] + if "scopeFilter" in ctx + else "", + filter="&eventFilter=%s" % ctx["eventFilter"] + if "eventFilter" in ctx + else "", + ) + ) + + res = self.http.get( + policy_events_url, headers=self.hdrs, verify=self.ssl_verify + ) if not self._checkResponse(res): return [False, self.lasterr] # Increment the offset by limit - ctx['offset'] += ctx['limit'] + ctx["offset"] += ctx["limit"] return [True, {"ctx": ctx, "data": res.json()}] - def get_policy_events_range(self, from_sec, to_sec, sampling=None, aggregations=None, scope_filter=None, - event_filter=None): - '''**Description** + def get_policy_events_range( + self, + from_sec, + to_sec, + sampling=None, + aggregations=None, + scope_filter=None, + event_filter=None, + ): + """**Description** Fetch all policy events that occurred in the time range [from_sec:to_sec]. This method is used in conjunction with :func:`~sdcclient.SdSecureClient.get_more_policy_events` to provide paginated access to policy events. @@ -56,21 +89,29 @@ def get_policy_events_range(self, from_sec, to_sec, sampling=None, aggregations= **Example** `examples/get_secure_policy_events.py `_ - ''' - options = {"from": int(from_sec) * 1000000, - "to": int(to_sec) * 1000000, - "offset": 0, - "limit": 1000, - "sampling": sampling, - "aggregations": aggregations, - "scopeFilter": scope_filter, - "eventFilter": event_filter} + """ + options = { + "from": int(from_sec) * 1000000, + "to": int(to_sec) * 1000000, + "offset": 0, + "limit": 1000, + "sampling": sampling, + "aggregations": aggregations, + "scopeFilter": scope_filter, + "eventFilter": event_filter, + } ctx = {k: v for k, v in options.items() if v is not None} return self._get_policy_events_int(ctx) - def get_policy_events_duration(self, duration_sec, sampling=None, aggregations=None, scope_filter=None, - event_filter=None): - '''**Description** + def get_policy_events_duration( + self, + duration_sec, + sampling=None, + aggregations=None, + scope_filter=None, + event_filter=None, + ): + """**Description** Fetch all policy events that occurred in the last duration_sec seconds. This method is used in conjunction with :func:`~sdcclient.SdSecureClient.get_more_policy_events` to provide paginated access to policy events. @@ -89,25 +130,35 @@ def get_policy_events_duration(self, duration_sec, sampling=None, aggregations=N **Example** `examples/get_secure_policy_events.py `_ - ''' + """ epoch = datetime.datetime.utcfromtimestamp(0) to_ts = (datetime.datetime.utcnow() - epoch).total_seconds() * 1000 * 1000 from_ts = to_ts - (int(duration_sec) * 1000 * 1000) - options = {"to": to_ts, - "from": from_ts, - "offset": 0, - "limit": 1000, - "sampling": sampling, - "aggregations": aggregations, - "scopeFilter": scope_filter, - "eventFilter": event_filter} + options = { + "to": to_ts, + "from": from_ts, + "offset": 0, + "limit": 1000, + "sampling": sampling, + "aggregations": aggregations, + "scopeFilter": scope_filter, + "eventFilter": event_filter, + } ctx = {k: v for k, v in options.items() if v is not None} return self._get_policy_events_int(ctx) - def get_policy_events_id_range(self, id, from_sec, to_sec, sampling=None, aggregations=None, scope_filter=None, - event_filter=None): - '''**Description** + def get_policy_events_id_range( + self, + id, + from_sec, + to_sec, + sampling=None, + aggregations=None, + scope_filter=None, + event_filter=None, + ): + """**Description** Fetch all policy events with id that occurred in the time range [from_sec:to_sec]. This method is used in conjunction with :func:`~sdcclient.SdSecureClient.get_more_policy_events` to provide paginated access to policy events. @@ -128,23 +179,32 @@ def get_policy_events_id_range(self, id, from_sec, to_sec, sampling=None, aggreg **Example** `examples/get_secure_policy_events.py `_ - ''' - - options = {"id": id, - "from": int(from_sec) * 1000000, - "to": int(to_sec) * 1000000, - "offset": 0, - "limit": 1000, - "sampling": sampling, - "aggregations": aggregations, - "scopeFilter": scope_filter, - "eventFilter": event_filter} + """ + + options = { + "id": id, + "from": int(from_sec) * 1000000, + "to": int(to_sec) * 1000000, + "offset": 0, + "limit": 1000, + "sampling": sampling, + "aggregations": aggregations, + "scopeFilter": scope_filter, + "eventFilter": event_filter, + } ctx = {k: v for k, v in options.items() if v is not None} return self._get_policy_events_int(ctx) - def get_policy_events_id_duration(self, id, duration_sec, sampling=None, aggregations=None, scope_filter=None, - event_filter=None): - '''**Description** + def get_policy_events_id_duration( + self, + id, + duration_sec, + sampling=None, + aggregations=None, + scope_filter=None, + event_filter=None, + ): + """**Description** Fetch all policy events with id that occurred in the last duration_sec seconds. This method is used in conjunction with :func:`~sdcclient.SdSecureClient.get_more_policy_events` to provide paginated access to policy events. @@ -164,25 +224,27 @@ def get_policy_events_id_duration(self, id, duration_sec, sampling=None, aggrega **Example** `examples/get_secure_policy_events.py `_ - ''' + """ epoch = datetime.datetime.utcfromtimestamp(0) to_ts = (datetime.datetime.utcnow() - epoch).total_seconds() * 1000 * 1000 from_ts = to_ts - (int(duration_sec) * 1000 * 1000) - options = {"id": id, - "to": to_ts, - "from": from_ts, - "offset": 0, - "limit": 1000, - "sampling": sampling, - "aggregations": aggregations, - "scopeFilter": scope_filter, - "eventFilter": event_filter} + options = { + "id": id, + "to": to_ts, + "from": from_ts, + "offset": 0, + "limit": 1000, + "sampling": sampling, + "aggregations": aggregations, + "scopeFilter": scope_filter, + "eventFilter": event_filter, + } ctx = {k: v for k, v in options.items() if v is not None} return self._get_policy_events_int(ctx) def get_more_policy_events(self, ctx): - '''**Description** + """**Description** Fetch additional policy events after an initial call to :func:`~sdcclient.SdSecureClient.get_policy_events_range` / :func:`~sdcclient.SdSecureClient.get_policy_events_duration` or a prior call to get_more_policy_events. @@ -208,5 +270,5 @@ def get_more_policy_events(self, ctx): **Example** `examples/get_secure_policy_events.py `_ - ''' + """ return self._get_policy_events_int(ctx) diff --git a/sdcclient/secure/_policy_events_v1.py b/sdcclient/secure/_policy_events_v1.py index 6d73cf29..98e66681 100644 --- a/sdcclient/secure/_policy_events_v1.py +++ b/sdcclient/secure/_policy_events_v1.py @@ -4,32 +4,43 @@ class PolicyEventsClientV1(_SdcCommon): - def __init__(self, token="", sdc_url='https://secure.sysdig.com', ssl_verify=True, custom_headers=None): - super(PolicyEventsClientV1, self).__init__(token, sdc_url, ssl_verify, custom_headers) + def __init__( + self, + token="", + sdc_url="https://secure.sysdig.com", + ssl_verify=True, + custom_headers=None, + ): + super(PolicyEventsClientV1, self).__init__( + token, sdc_url, ssl_verify, custom_headers + ) self.product = "SDS" def _get_policy_events_int(self, ctx): limit = ctx.get("limit", 50) - policy_events_url = self.url + '/api/v1/secureEvents?limit={limit}{frm}{to}{filter}{cursor}'.format( - limit=limit, - frm=f"&from={int(ctx['from']):d}" if "from" in ctx else "", - to=f"&to={int(ctx['to']):d}" if "to" in ctx else "", - filter=f'&filter={ctx["filter"]}' if "filter" in ctx else "", - cursor=f'&cursor={ctx["cursor"]}' if "cursor" in ctx else "") - - res = self.http.get(policy_events_url, headers=self.hdrs, verify=self.ssl_verify) + policy_events_url = ( + self.url + + "/api/v1/secureEvents?limit={limit}{frm}{to}{filter}{cursor}".format( + limit=limit, + frm=f"&from={int(ctx['from']):d}" if "from" in ctx else "", + to=f"&to={int(ctx['to']):d}" if "to" in ctx else "", + filter=f"&filter={ctx['filter']}" if "filter" in ctx else "", + cursor=f"&cursor={ctx['cursor']}" if "cursor" in ctx else "", + ) + ) + + res = self.http.get( + policy_events_url, headers=self.hdrs, verify=self.ssl_verify + ) if not self._checkResponse(res): return [False, self.lasterr] - ctx = { - "limit": limit, - "cursor": res.json()["page"].get("prev", None) - } + ctx = {"limit": limit, "cursor": res.json()["page"].get("prev", None)} return [True, {"ctx": ctx, "data": res.json()["data"]}] def get_policy_events_range(self, from_sec, to_sec, filter=None): - '''**Description** + """**Description** Fetch all policy events that occurred in the time range [from_sec:to_sec]. This method is used in conjunction with :func:`~sdcclient.SdSecureClient.get_more_policy_events` to provide paginated access to policy events. @@ -47,16 +58,18 @@ def get_policy_events_range(self, from_sec, to_sec, filter=None): **Example** `examples/get_secure_policy_events.py `_ - ''' - options = {"from": int(from_sec) * 1_000_000_000, - "to": int(to_sec) * 1_000_000_000, - "limit": 999, - "filter": filter} + """ + options = { + "from": int(from_sec) * 1_000_000_000, + "to": int(to_sec) * 1_000_000_000, + "limit": 999, + "filter": filter, + } ctx = {k: v for k, v in options.items() if v is not None} return self._get_policy_events_int(ctx) def get_policy_events_duration(self, duration_sec, filter=None): - '''**Description** + """**Description** Fetch all policy events that occurred in the last duration_sec seconds. This method is used in conjunction with :func:`~sdcclient.SdSecureClient.get_more_policy_events` to provide paginated access to policy events. @@ -73,14 +86,18 @@ def get_policy_events_duration(self, duration_sec, filter=None): **Example** `examples/get_secure_policy_events.py `_ - ''' - to_sec = int((datetime.datetime.utcnow() - datetime.datetime.utcfromtimestamp(0)).total_seconds()) + """ + to_sec = int( + ( + datetime.datetime.utcnow() - datetime.datetime.utcfromtimestamp(0) + ).total_seconds() + ) from_sec = to_sec - (int(duration_sec)) return self.get_policy_events_range(from_sec, to_sec, filter) def get_more_policy_events(self, ctx): - '''**Description** + """**Description** Fetch additional policy events after an initial call to :func:`~sdcclient.SdSecureClient.get_policy_events_range` / :func:`~sdcclient.SdSecureClient.get_policy_events_duration` or a prior call to get_more_policy_events. @@ -113,7 +130,7 @@ def get_more_policy_events(self, ctx): **Example** `examples/get_secure_policy_events.py `_ - ''' + """ return self._get_policy_events_int(ctx) def get_policy_event(self, event_id): @@ -126,9 +143,11 @@ def get_policy_event(self, event_id): A tuple where the first parameter indicates if the request was successful, and the second parameter holds the info from the policy event or the error. """ - policy_events_url = f'{self.url}/api/v1/secureEvents/{event_id}' + policy_events_url = f"{self.url}/api/v1/secureEvents/{event_id}" - res = self.http.get(policy_events_url, headers=self.hdrs, verify=self.ssl_verify) + res = self.http.get( + policy_events_url, headers=self.hdrs, verify=self.ssl_verify + ) if not self._checkResponse(res): return False, self.lasterr diff --git a/sdcclient/secure/_policy_v2.py b/sdcclient/secure/_policy_v2.py index ff88fade..8a8549bf 100644 --- a/sdcclient/secure/_policy_v2.py +++ b/sdcclient/secure/_policy_v2.py @@ -18,7 +18,7 @@ def policy_action_capture(file_name, secs_before=5, secs_after=15, filter=""): "filter": filter, "name": file_name, "bucketName": "", - "storageType": "S3" + "storageType": "S3", } @@ -35,7 +35,13 @@ def policy_action_kill(): class PolicyClientV2(_SdcCommon): - def __init__(self, token="", sdc_url='https://secure.sysdig.com', ssl_verify=True, custom_headers=None): + def __init__( + self, + token="", + sdc_url="https://secure.sysdig.com", + ssl_verify=True, + custom_headers=None, + ): super(PolicyClientV2, self).__init__(token, sdc_url, ssl_verify, custom_headers) self.product = "SDS" @@ -54,7 +60,11 @@ def create_default_policies(self): >>> ok, res = client.create_default_policies() """ - res = self.http.post(self.url + '/api/v2/policies/default', headers=self.hdrs, verify=self.ssl_verify) + res = self.http.post( + self.url + "/api/v2/policies/default", + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) def delete_all_policies(self): @@ -92,7 +102,9 @@ def list_policies(self): >>> ok, res = client.list_policies() """ - res = self.http.get(self.url + '/api/v2/policies', headers=self.hdrs, verify=self.ssl_verify) + res = self.http.get( + self.url + "/api/v2/policies", headers=self.hdrs, verify=self.ssl_verify + ) return self._request_result(res) def get_policy(self, name): @@ -145,11 +157,25 @@ def get_policy_id(self, id): >>> print((json.dumps(res, indent=2))) """ - res = self.http.get(self.url + '/api/v2/policies/{}'.format(id), headers=self.hdrs, verify=self.ssl_verify) + res = self.http.get( + self.url + "/api/v2/policies/{}".format(id), + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) - def add_policy(self, name, description, rule_names=[], actions=[], scope=None, severity=0, enabled=True, - notification_channels=[], type="falco"): + def add_policy( + self, + name, + description, + rule_names=[], + actions=[], + scope=None, + severity=0, + enabled=True, + notification_channels=[], + type="falco", + ): """ Adds a new policy. @@ -188,8 +214,12 @@ def add_policy(self, name, description, rule_names=[], actions=[], scope=None, s "type": type, } - res = self.http.post(self.url + '/api/v2/policies', headers=self.hdrs, data=json.dumps(policy), - verify=self.ssl_verify) + res = self.http.post( + self.url + "/api/v2/policies", + headers=self.hdrs, + data=json.dumps(policy), + verify=self.ssl_verify, + ) return self._request_result(res) def add_policy_json(self, policy_json): @@ -217,12 +247,26 @@ def add_policy_json(self, policy_json): except Exception as e: return [False, "policy json is not valid json: {}".format(str(e))] - res = self.http.post(self.url + '/api/v2/policies', headers=self.hdrs, data=json.dumps(policy_obj), - verify=self.ssl_verify) + res = self.http.post( + self.url + "/api/v2/policies", + headers=self.hdrs, + data=json.dumps(policy_obj), + verify=self.ssl_verify, + ) return self._request_result(res) - def update_policy(self, id, name=None, description=None, rule_names=None, actions=None, scope=None, - severity=None, enabled=None, notification_channels=None): + def update_policy( + self, + id, + name=None, + description=None, + rule_names=None, + actions=None, + scope=None, + severity=None, + enabled=None, + notification_channels=None, + ): """ Update policy with the provided values. Only the defined values will be updated. @@ -270,8 +314,12 @@ def update_policy(self, id, name=None, description=None, rule_names=None, action if notification_channels is not None: policy["notificationChannelIds"] = notification_channels - res = self.http.put(self.url + '/api/v2/policies/{}'.format(id), headers=self.hdrs, data=json.dumps(policy), - verify=self.ssl_verify) + res = self.http.put( + self.url + "/api/v2/policies/{}".format(id), + headers=self.hdrs, + data=json.dumps(policy), + verify=self.ssl_verify, + ) return self._request_result(res) def update_policy_json(self, policy_json): @@ -302,8 +350,12 @@ def update_policy_json(self, policy_json): if "id" not in policy_obj: return [False, "Policy Json does not have an 'id' field"] - res = self.http.put(self.url + '/api/v2/policies/{}'.format(policy_obj["id"]), headers=self.hdrs, - data=json.dumps(policy_obj), verify=self.ssl_verify) + res = self.http.put( + self.url + "/api/v2/policies/{}".format(policy_obj["id"]), + headers=self.hdrs, + data=json.dumps(policy_obj), + verify=self.ssl_verify, + ) return self._request_result(res) def delete_policy_name(self, name): @@ -347,5 +399,9 @@ def delete_policy_id(self, id): >>> ok, res = client.delete_policy_id(id=123456) """ - res = self.http.delete(self.url + '/api/v2/policies/{}'.format(id), headers=self.hdrs, verify=self.ssl_verify) + res = self.http.delete( + self.url + "/api/v2/policies/{}".format(id), + headers=self.hdrs, + verify=self.ssl_verify, + ) return self._request_result(res) diff --git a/sdcclient/secure/scanning/_alerts.py b/sdcclient/secure/scanning/_alerts.py index c39f356d..999a1947 100644 --- a/sdcclient/secure/scanning/_alerts.py +++ b/sdcclient/secure/scanning/_alerts.py @@ -4,8 +4,16 @@ class ScanningAlertsClientV1(_SdcCommon): - def __init__(self, token="", sdc_url='https://secure.sysdig.com', ssl_verify=True, custom_headers=None): - super(ScanningAlertsClientV1, self).__init__(token, sdc_url, ssl_verify, custom_headers) + def __init__( + self, + token="", + sdc_url="https://secure.sysdig.com", + ssl_verify=True, + custom_headers=None, + ): + super(ScanningAlertsClientV1, self).__init__( + token, sdc_url, ssl_verify, custom_headers + ) self.product = "SDS" class RepositoryAlertTrigger: @@ -27,8 +35,18 @@ def scan_result_change_any(alert): def cve_update(alert): alert["triggers"]["vuln_update"] = True - def add_repository_alert(self, name, registry, repository, tag, description="", triggers=None, notification_channels=None, enabled=True): - ''' + def add_repository_alert( + self, + name, + registry, + repository, + tag, + description="", + triggers=None, + notification_channels=None, + enabled=True, + ): + """ Create a new repository alert Args: @@ -59,43 +77,63 @@ def add_repository_alert(self, name, registry, repository, tag, description="", >>> if not ok: >>> print(f"error creating alert: {res}") >>> alert_id = res["alertId"] - ''' + """ if not triggers: - triggers = [ScanningAlertsClientV1.RepositoryAlertTrigger.new_image_analyzed] + triggers = [ + ScanningAlertsClientV1.RepositoryAlertTrigger.new_image_analyzed + ] alert = { - 'name': name, - 'description': description, - 'type': 'repository', - 'triggers': { - "unscanned": False, - "analysis_update": False, - "vuln_update": False, - "policy_eval": False, - "failed": False - }, - 'repositories': [{ - 'registry': registry, - 'repository': repository, - 'tag': tag, - }], - "onlyPassFail": False, - "skipEventSend": False, - 'enabled': enabled, - 'notificationChannelIds': notification_channels, + "name": name, + "description": description, + "type": "repository", + "triggers": { + "unscanned": False, + "analysis_update": False, + "vuln_update": False, + "policy_eval": False, + "failed": False, + }, + "repositories": [ + { + "registry": registry, + "repository": repository, + "tag": tag, + } + ], + "onlyPassFail": False, + "skipEventSend": False, + "enabled": enabled, + "notificationChannelIds": notification_channels, } for trigger in triggers: trigger(alert) - res = self.http.post(f"{self.url}/api/scanning/v1/alerts", headers=self.hdrs, data=json.dumps(alert), verify=self.ssl_verify) + res = self.http.post( + f"{self.url}/api/scanning/v1/alerts", + headers=self.hdrs, + data=json.dumps(alert), + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()] - def update_repository_alert(self, id, name=None, registry=None, repository=None, tag=None, description=None, triggers=None, notification_channels=None, enabled=None): - ''' + def update_repository_alert( + self, + id, + name=None, + registry=None, + repository=None, + tag=None, + description=None, + triggers=None, + notification_channels=None, + enabled=None, + ): + """ Updates a repository alert. Fields that are not specified, will not be modified. Args: @@ -126,7 +164,7 @@ def update_repository_alert(self, id, name=None, registry=None, repository=None, >>> if not ok: >>> print(f"error updating alert: {res}") >>> alert_id = res["alertId"] - ''' + """ ok, alert = self.get_alert(id) if not ok: return False, f"unable to retrieve alert by ID {id}: {alert}" @@ -143,11 +181,11 @@ def update_repository_alert(self, id, name=None, registry=None, repository=None, alert["repositories"][0]["tag"] = tag if triggers is not None: alert["triggers"] = { - "unscanned": False, - "analysis_update": False, - "vuln_update": False, - "policy_eval": False, - "failed": False + "unscanned": False, + "analysis_update": False, + "vuln_update": False, + "policy_eval": False, + "failed": False, } alert["onlyPassFail"] = False for trigger in triggers: @@ -157,7 +195,12 @@ def update_repository_alert(self, id, name=None, registry=None, repository=None, if enabled is not None: alert["enabled"] = enabled - res = self.http.put(f"{self.url}/api/scanning/v1/alerts/{id}", headers=self.hdrs, data=json.dumps(alert), verify=self.ssl_verify) + res = self.http.put( + f"{self.url}/api/scanning/v1/alerts/{id}", + headers=self.hdrs, + data=json.dumps(alert), + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] @@ -182,8 +225,16 @@ def scan_result_change_any(alert): def cve_update(alert): alert["triggers"]["vuln_update"] = True - def add_runtime_alert(self, name, description="", scope="", triggers=None, notification_channels=None, enabled=True): - ''' + def add_runtime_alert( + self, + name, + description="", + scope="", + triggers=None, + notification_channels=None, + enabled=True, + ): + """ Create a new runtime alert Args: @@ -210,39 +261,53 @@ def add_runtime_alert(self, name, description="", scope="", triggers=None, notif >>> if not ok: >>> print(f"error creating alert: {res}") >>> alert_id = res["alertId"] - ''' + """ if not triggers: triggers = [ScanningAlertsClientV1.RuntimeAlertTrigger.unscanned_image] alert = { - 'name': name, - 'description': description, - 'type': 'runtime', - 'triggers': { - "unscanned": False, - "analysis_update": False, - "vuln_update": False, - "policy_eval": False, - "failed": False - }, - 'scope': scope, - "onlyPassFail": False, - "skipEventSend": False, - 'enabled': enabled, - 'notificationChannelIds': notification_channels, + "name": name, + "description": description, + "type": "runtime", + "triggers": { + "unscanned": False, + "analysis_update": False, + "vuln_update": False, + "policy_eval": False, + "failed": False, + }, + "scope": scope, + "onlyPassFail": False, + "skipEventSend": False, + "enabled": enabled, + "notificationChannelIds": notification_channels, } for trigger in triggers: trigger(alert) - res = self.http.post(f"{self.url}/api/scanning/v1/alerts", headers=self.hdrs, data=json.dumps(alert), verify=self.ssl_verify) + res = self.http.post( + f"{self.url}/api/scanning/v1/alerts", + headers=self.hdrs, + data=json.dumps(alert), + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()] - def update_runtime_alert(self, id, name=None, description=None, scope=None, triggers=None, notification_channels=None, enabled=None): - ''' + def update_runtime_alert( + self, + id, + name=None, + description=None, + scope=None, + triggers=None, + notification_channels=None, + enabled=None, + ): + """ Updates a runtime alert. Fields that are not specified, will not be modified. Args: @@ -269,7 +334,7 @@ def update_runtime_alert(self, id, name=None, description=None, scope=None, trig >>> if not ok: >>> print(f"error updating alert: {res}") >>> alert_id = res["alertId"] - ''' + """ ok, alert = self.get_alert(id) if not ok: return False, f"unable to retrieve alert by ID {id}: {alert}" @@ -282,11 +347,11 @@ def update_runtime_alert(self, id, name=None, description=None, scope=None, trig alert["scope"] = scope if triggers is not None: alert["triggers"] = { - "unscanned": False, - "analysis_update": False, - "vuln_update": False, - "policy_eval": False, - "failed": False + "unscanned": False, + "analysis_update": False, + "vuln_update": False, + "policy_eval": False, + "failed": False, } alert["onlyPassFail"] = False for trigger in triggers: @@ -296,14 +361,19 @@ def update_runtime_alert(self, id, name=None, description=None, scope=None, trig if enabled is not None: alert["enabled"] = enabled - res = self.http.put(f"{self.url}/api/scanning/v1/alerts/{id}", headers=self.hdrs, data=json.dumps(alert), verify=self.ssl_verify) + res = self.http.put( + f"{self.url}/api/scanning/v1/alerts/{id}", + headers=self.hdrs, + data=json.dumps(alert), + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()] def get_alert(self, alertid): - ''' + """ Retrieve the scanning alert with the given id Args: @@ -319,15 +389,19 @@ def get_alert(self, alertid): >>> if not ok: >>> print(f"error retrieving alert {alert_id}: {res}") >>> alert = res - ''' + """ - res = self.http.get(f"{self.url}/api/scanning/v1/alerts/{alertid}", headers=self.hdrs, verify=self.ssl_verify) + res = self.http.get( + f"{self.url}/api/scanning/v1/alerts/{alertid}", + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()] def list_alerts(self, limit=None, cursor=None): - ''' + """ List the current set of scanning alerts. Args: limit(int): Maximum number of alerts in the response. @@ -348,13 +422,13 @@ def list_alerts(self, limit=None, cursor=None): >>> # Load more alerts >>> if res["responseMetadata"] is not None: >>> ok, res = client.list_alerts(cursor=res["responseMetadata"]["next_cursor"]) - ''' + """ url = f"{self.url}/api/scanning/v1/alerts" if limit: - url += '?limit=' + str(limit) + url += "?limit=" + str(limit) if cursor: - url += '&cursor=' + cursor + url += "&cursor=" + cursor res = self.http.get(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): @@ -362,8 +436,10 @@ def list_alerts(self, limit=None, cursor=None): return [True, res.json()] - def delete_alert(self, policyid): # FIXME: policyid must be maintained for backwards compatibility reasons with older versions, but should be renamed to id or alert_id - ''' + def delete_alert( + self, policyid + ): # FIXME: policyid must be maintained for backwards compatibility reasons with older versions, but should be renamed to id or alert_id + """ Delete the alert with the given id Args: @@ -373,15 +449,19 @@ def delete_alert(self, policyid): # FIXME: policyid must be maintained for back >>> client = ScanningAlertsClientV1(sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), >>> token=os.getenv("SDC_SECURE_TOKEN")) >>> client.delete_alert(alert_id) - ''' + """ - res = self.http.delete(f"{self.url}/api/scanning/v1/alerts/{policyid}", headers=self.hdrs, verify=self.ssl_verify) + res = self.http.delete( + f"{self.url}/api/scanning/v1/alerts/{policyid}", + headers=self.hdrs, + verify=self.ssl_verify, + ) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.text] def add_alert_object(self, object): - ''' + """ Adds alert object as raw JSON object. Args: @@ -407,8 +487,8 @@ def add_alert_object(self, object): >>> "notificationChannelIds": [] >>> } >>> client.add_alert_object(alert) - ''' - url = self.url + '/api/scanning/v1/alerts' + """ + url = self.url + "/api/scanning/v1/alerts" data = json.dumps(object) res = self.http.post(url, headers=self.hdrs, data=data, verify=self.ssl_verify) if not self._checkResponse(res): diff --git a/specs/_common/agent_spec.py b/specs/_common/agent_spec.py index e0734ab1..92763488 100644 --- a/specs/_common/agent_spec.py +++ b/specs/_common/agent_spec.py @@ -33,22 +33,18 @@ def _debug_enabled(): def _agent_configuration(): return { "files": [ - { - "filter": "host.mac = \"08:00:27:de:5b:b9\"", - "content": _mysql_app_check() - }, - { - "filter": "*", - "content": _debug_enabled() - } + {"filter": 'host.mac = "08:00:27:de:5b:b9"', "content": _mysql_app_check()}, + {"filter": "*", "content": _debug_enabled()}, ] } with description("Agent", "integration-agent") as self: with before.all: - self.client = SdcClient(sdc_url=os.getenv("SDC_MONITOR_URL", "https://app.sysdigcloud.com"), - token=os.getenv("SDC_MONITOR_TOKEN")) + self.client = SdcClient( + sdc_url=os.getenv("SDC_MONITOR_URL", "https://app.sysdigcloud.com"), + token=os.getenv("SDC_MONITOR_TOKEN"), + ) with it("is able to retrieve the agent configuration"): ok, res = self.client.get_agents_config() @@ -60,8 +56,12 @@ def _agent_configuration(): ok, res = self.client.set_agents_config(_agent_configuration()) expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_key("files", contain(have_key("content", contain(_mysql_app_check()))))) - expect(res).to(have_key("files", contain(have_key("content", contain(_debug_enabled()))))) + expect(res).to( + have_key("files", contain(have_key("content", contain(_mysql_app_check())))) + ) + expect(res).to( + have_key("files", contain(have_key("content", contain(_debug_enabled())))) + ) with it("is able to clean up the agent configuration"): ok, res = self.client.clear_agents_config() @@ -79,13 +79,17 @@ def _agent_configuration(): ok, res = self.client.get_connected_agents() expect((ok, res)).to(be_successful_api_call) - expect(res).to(contain(have_keys( - "customer", - "machineId", - "hostName", - connected=True, - attributes=have_keys( - "hidden", - "version", + expect(res).to( + contain( + have_keys( + "customer", + "machineId", + "hostName", + connected=True, + attributes=have_keys( + "hidden", + "version", + ), + ) ) - ))) + ) diff --git a/specs/_common/team_spec.py b/specs/_common/team_spec.py index 9579686c..70642e63 100644 --- a/specs/_common/team_spec.py +++ b/specs/_common/team_spec.py @@ -6,21 +6,21 @@ from specs import be_successful_api_call from collections import defaultdict -TEAM_PREFIX_NAME = 'sysdig-sdk - ' +TEAM_PREFIX_NAME = "sysdig-sdk - " with description("Teams", "integration", "teams") as self: with before.all: self.secure_client = SdSecureClient( - sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), - token=os.getenv("SDC_SECURE_TOKEN") + sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), + token=os.getenv("SDC_SECURE_TOKEN"), ) self.monitor_client = SdMonitorClient( - sdc_url=os.getenv("SDC_MONITOR_URL", "https://app.sysdigcloud.com"), - token=os.getenv("SDC_MONITOR_TOKEN") + sdc_url=os.getenv("SDC_MONITOR_URL", "https://app.sysdigcloud.com"), + token=os.getenv("SDC_MONITOR_TOKEN"), ) with before.each: - self.team_name = f'{TEAM_PREFIX_NAME}{uuid.uuid4()}' + self.team_name = f"{TEAM_PREFIX_NAME}{uuid.uuid4()}" with it("it should list all teams"): ok, teams_monitor = self.monitor_client.get_teams() @@ -34,7 +34,7 @@ def count_products(teams, count): for team in teams: - for product in team['products']: + for product in team["products"]: count[product] += 1 count_products(teams_monitor, count_monitor) @@ -49,10 +49,10 @@ def count_products(teams, count): ok, team = self.secure_client.create_team(self.team_name) expect((ok, team)).to(be_successful_api_call) - ok, teams = self.monitor_client.get_teams(product_filter='SDC') + ok, teams = self.monitor_client.get_teams(product_filter="SDC") expect((ok, teams)).to(be_successful_api_call) - secure_teams = [t for t in teams if 'SDS' in t['products']] + secure_teams = [t for t in teams if "SDS" in t["products"]] expect(len(secure_teams)).to(equal(0)) ok, res = self.secure_client.delete_team(self.team_name) @@ -62,10 +62,10 @@ def count_products(teams, count): ok, team = self.monitor_client.create_team(self.team_name) expect((ok, team)).to(be_successful_api_call) - ok, teams = self.secure_client.get_teams(product_filter='SDS') + ok, teams = self.secure_client.get_teams(product_filter="SDS") expect((ok, teams)).to(be_successful_api_call) - monitor_teams = [t for t in teams if 'SDC' in t['products']] + monitor_teams = [t for t in teams if "SDC" in t["products"]] expect(len(monitor_teams)).to(equal(0)) ok, res = self.monitor_client.delete_team(self.team_name) diff --git a/specs/_common/user_provisioning_spec.py b/specs/_common/user_provisioning_spec.py index 066ccab7..5aaf52a8 100644 --- a/specs/_common/user_provisioning_spec.py +++ b/specs/_common/user_provisioning_spec.py @@ -10,8 +10,10 @@ with description("User Provisioning", "integration") as self: with before.each: - self.client = SdcClient(sdc_url=os.getenv("SDC_MONITOR_URL", "https://app.sysdigcloud.com"), - token=os.getenv("SDC_MONITOR_TOKEN")) + self.client = SdcClient( + sdc_url=os.getenv("SDC_MONITOR_URL", "https://app.sysdigcloud.com"), + token=os.getenv("SDC_MONITOR_TOKEN"), + ) self.user_name = "terraform-test+user@sysdig.com" with after.each: @@ -32,7 +34,9 @@ with it("is able to provision the user with name, lastname and password"): random_password = "".join(choice(ascii_letters) for _ in range(20)) - ok, res = self.client.create_user(self.user_name, "Name", "LastName", random_password) + ok, res = self.client.create_user( + self.user_name, "Name", "LastName", random_password + ) expect((ok, res)).to(be_successful_api_call) with context("when the customer already exists"): diff --git a/specs/monitor/alerts_v1_spec.py b/specs/monitor/alerts_v1_spec.py index 99b53176..e7404998 100644 --- a/specs/monitor/alerts_v1_spec.py +++ b/specs/monitor/alerts_v1_spec.py @@ -12,8 +12,10 @@ with description("Alerts v1", "integration") as self: with before.all: - self.client = SdMonitorClient(sdc_url=os.getenv("SDC_MONITOR_URL", "https://app.sysdigcloud.com"), - token=os.getenv("SDC_MONITOR_TOKEN")) + self.client = SdMonitorClient( + sdc_url=os.getenv("SDC_MONITOR_URL", "https://app.sysdigcloud.com"), + token=os.getenv("SDC_MONITOR_TOKEN"), + ) with before.each: self.cleanup_alerts() @@ -21,7 +23,6 @@ with after.each: self.cleanup_alerts() - def cleanup_alerts(self): ok, res = self.client.get_alerts() expect((ok, res)).to(be_successful_api_call) @@ -31,27 +32,26 @@ def cleanup_alerts(self): call = self.client.delete_alert(alert) expect(call).to(be_successful_api_call) - def create_test_alert(self): ok, res = self.client.create_alert( name=_ALERT_NAME, description=_ALERT_DESCRIPTION, severity=6, for_atleast_s=60, - condition='avg(cpu.used.percent) > 80', + condition="avg(cpu.used.percent) > 80", # We want to check this metric for every process on every machine. - segmentby=['host.mac', 'proc.name'], - segment_condition='ANY', + segmentby=["host.mac", "proc.name"], + segment_condition="ANY", # if there is more than one tomcat process, this alert will fire when a single one of them crosses the # 80% threshold. user_filter='proc.name = "tomcat"', - enabled=False) + enabled=False, + ) if ok: self.test_alert = res["alert"] return ok, res - with it("is able to create an alert"): ok, res = self.create_test_alert() expect((ok, res)).to(be_successful_api_call) diff --git a/specs/monitor/captures_v1_spec.py b/specs/monitor/captures_v1_spec.py index c3c05cac..1027200b 100644 --- a/specs/monitor/captures_v1_spec.py +++ b/specs/monitor/captures_v1_spec.py @@ -15,13 +15,15 @@ def randomword(length): letters = string.ascii_lowercase + string.digits - return ''.join(random.choice(letters) for _ in range(length)) + return "".join(random.choice(letters) for _ in range(length)) with description("Captures v1", "integration-agent") as self: with before.all: - self.client = SdMonitorClient(sdc_url=os.getenv("SDC_MONITOR_URL", "https://app.sysdigcloud.com"), - token=os.getenv("SDC_MONITOR_TOKEN")) + self.client = SdMonitorClient( + sdc_url=os.getenv("SDC_MONITOR_URL", "https://app.sysdigcloud.com"), + token=os.getenv("SDC_MONITOR_TOKEN"), + ) self.capture_name = f"apicapture-sdk-{randomword(10)}" self.hostname = socket.gethostname() @@ -34,32 +36,40 @@ def randomword(length): expect((ok, res)).to(be_successful_api_call) with it("is able to create a capture"): - ok, res = self.client.create_sysdig_capture(hostname=self.hostname, - capture_name=self.capture_name, - duration=10) + ok, res = self.client.create_sysdig_capture( + hostname=self.hostname, capture_name=self.capture_name, duration=10 + ) expect((ok, res)).to(be_successful_api_call) with it("is able to retrieve the capture we have created"): ok, res = self.client.get_sysdig_captures() expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_key( - "dumps", contain( - have_keys( - "size", - "status", - "folder", - agent=have_key("hostName", equal(self.hostname)), - name=equal(f"{self.capture_name}.scap"), - )) - )) + expect(res).to( + have_key( + "dumps", + contain( + have_keys( + "size", + "status", + "folder", + agent=have_key("hostName", equal(self.hostname)), + name=equal(f"{self.capture_name}.scap"), + ) + ), + ) + ) # DEACTIVATED: This test is not enabled because sometimes the agent does not trigger the capture # and therefore this test fails. As it is not our duty to verify that the agent is able to create the capture, # we assume this won't be covered by the library. with _it("polls the status of the capture until it's done"): _, res = self.client.get_sysdig_captures() - capture = [capture for capture in res["dumps"] if capture["name"] == f"{self.capture_name}.scap"][0] + capture = [ + capture + for capture in res["dumps"] + if capture["name"] == f"{self.capture_name}.scap" + ][0] status = "undefined" for _ in range(300): @@ -80,7 +90,11 @@ def randomword(length): # we assume this won't be covered by the library. with _it("is able to download the capture"): _, res = self.client.get_sysdig_captures() - capture = [capture for capture in res["dumps"] if capture["name"] == f"{self.capture_name}.scap"][0] + capture = [ + capture + for capture in res["dumps"] + if capture["name"] == f"{self.capture_name}.scap" + ][0] call = self.client.download_sysdig_capture(capture["id"]) expect(call).to(be_successful_api_call) diff --git a/specs/monitor/dashboard_converters/dashboard_scope_spec.py b/specs/monitor/dashboard_converters/dashboard_scope_spec.py index e07aa4ad..ec2671ca 100644 --- a/specs/monitor/dashboard_converters/dashboard_scope_spec.py +++ b/specs/monitor/dashboard_converters/dashboard_scope_spec.py @@ -7,184 +7,329 @@ with it("parses correctly: agent.id is foo"): param = "agent.id is foo" res = convert_scope_string_to_expression(param) - expect(res).to(equal([True, [{ - "displayName": "", - "isVariable": False, - "operand": "agent.id", - "operator": "equals", - "value": ["foo"] - }]])) + expect(res).to( + equal( + [ + True, + [ + { + "displayName": "", + "isVariable": False, + "operand": "agent.id", + "operator": "equals", + "value": ["foo"], + } + ], + ] + ) + ) with it("parses correctly: agent.id = foo"): param = "agent.id = foo" res = convert_scope_string_to_expression(param) - expect(res).to(equal([True, [{ - "displayName": "", - "isVariable": False, - "operand": "agent.id", - "operator": "equals", - "value": ["foo"] - }]])) + expect(res).to( + equal( + [ + True, + [ + { + "displayName": "", + "isVariable": False, + "operand": "agent.id", + "operator": "equals", + "value": ["foo"], + } + ], + ] + ) + ) with it('parses correctly: agent.id = "foo"'): param = 'agent.id = "foo"' res = convert_scope_string_to_expression(param) - expect(res).to(equal([True, [{ - "displayName": "", - "isVariable": False, - "operand": "agent.id", - "operator": "equals", - "value": ["foo"] - }]])) + expect(res).to( + equal( + [ + True, + [ + { + "displayName": "", + "isVariable": False, + "operand": "agent.id", + "operator": "equals", + "value": ["foo"], + } + ], + ] + ) + ) with it('parses correctly: cluster.id-number = "foo-bar"'): param = 'cluster.id-number = "foo-bar"' res = convert_scope_string_to_expression(param) - expect(res).to(equal([True, [{ - "displayName": "", - "isVariable": False, - "operand": "cluster.id-number", - "operator": "equals", - "value": ["foo-bar"] - }]])) + expect(res).to( + equal( + [ + True, + [ + { + "displayName": "", + "isVariable": False, + "operand": "cluster.id-number", + "operator": "equals", + "value": ["foo-bar"], + } + ], + ] + ) + ) with it("parses correctly: agent.id = 'foo'"): param = "agent.id = 'foo'" res = convert_scope_string_to_expression(param) - expect(res).to(equal([True, [{ - "displayName": "", - "isVariable": False, - "operand": "agent.id", - "operator": "equals", - "value": ["foo"] - }]])) + expect(res).to( + equal( + [ + True, + [ + { + "displayName": "", + "isVariable": False, + "operand": "agent.id", + "operator": "equals", + "value": ["foo"], + } + ], + ] + ) + ) with it("parses correctly: agent.id is not foo"): param = "agent.id is not foo" res = convert_scope_string_to_expression(param) - expect(res).to(equal([True, [{ - "displayName": "", - "isVariable": False, - "operand": "agent.id", - "operator": "notEquals", - "value": ["foo"] - }]])) + expect(res).to( + equal( + [ + True, + [ + { + "displayName": "", + "isVariable": False, + "operand": "agent.id", + "operator": "notEquals", + "value": ["foo"], + } + ], + ] + ) + ) with it("parses correctly: agent.id in foo"): param = "agent.id in foo" res = convert_scope_string_to_expression(param) - expect(res).to(equal([True, [{ - "displayName": "", - "isVariable": False, - "operand": "agent.id", - "operator": "in", - "value": ["foo"] - }]])) + expect(res).to( + equal( + [ + True, + [ + { + "displayName": "", + "isVariable": False, + "operand": "agent.id", + "operator": "in", + "value": ["foo"], + } + ], + ] + ) + ) with it("parses correctly: agent.id in [foo]"): param = "agent.id in [foo]" res = convert_scope_string_to_expression(param) - expect(res).to(equal([True, [{ - "displayName": "", - "isVariable": False, - "operand": "agent.id", - "operator": "in", - "value": ["foo"] - }]])) + expect(res).to( + equal( + [ + True, + [ + { + "displayName": "", + "isVariable": False, + "operand": "agent.id", + "operator": "in", + "value": ["foo"], + } + ], + ] + ) + ) with it("parses correctly: agent.id in [foo, bar]"): param = "agent.id in [foo, bar]" res = convert_scope_string_to_expression(param) - expect(res).to(equal([True, [{ - "displayName": "", - "isVariable": False, - "operand": "agent.id", - "operator": "in", - "value": ["foo", "bar"] - }]])) + expect(res).to( + equal( + [ + True, + [ + { + "displayName": "", + "isVariable": False, + "operand": "agent.id", + "operator": "in", + "value": ["foo", "bar"], + } + ], + ] + ) + ) with it("parses correctly: agent.id in [foo, bar, baz]"): param = "agent.id in [foo, bar, baz]" res = convert_scope_string_to_expression(param) - expect(res).to(equal([True, [{ - "displayName": "", - "isVariable": False, - "operand": "agent.id", - "operator": "in", - "value": ["foo", "bar", "baz"] - }]])) + expect(res).to( + equal( + [ + True, + [ + { + "displayName": "", + "isVariable": False, + "operand": "agent.id", + "operator": "in", + "value": ["foo", "bar", "baz"], + } + ], + ] + ) + ) with it("parses correctly: agent.id in [foo, bar, baz] and agent.name is 'foobar'"): param = "agent.id in [foo, bar, baz] and agent.name is 'foobar'" res = convert_scope_string_to_expression(param) - expect(res).to(equal([True, [{ - "displayName": "", - "isVariable": False, - "operand": "agent.id", - "operator": "in", - "value": ["foo", "bar", "baz"] - }, { - "displayName": "", - "isVariable": False, - "operand": "agent.name", - "operator": "equals", - "value": ["foobar"] - }]])) + expect(res).to( + equal( + [ + True, + [ + { + "displayName": "", + "isVariable": False, + "operand": "agent.id", + "operator": "in", + "value": ["foo", "bar", "baz"], + }, + { + "displayName": "", + "isVariable": False, + "operand": "agent.name", + "operator": "equals", + "value": ["foobar"], + }, + ], + ] + ) + ) with it("parses correctly: agent.id not in foo"): param = "agent.id not in foo" res = convert_scope_string_to_expression(param) - expect(res).to(equal([True, [{ - "displayName": "", - "isVariable": False, - "operand": "agent.id", - "operator": "notIn", - "value": ["foo"] - }]])) + expect(res).to( + equal( + [ + True, + [ + { + "displayName": "", + "isVariable": False, + "operand": "agent.id", + "operator": "notIn", + "value": ["foo"], + } + ], + ] + ) + ) with it("parses correctly: agent.id not in [foo, bar, baz]"): param = "agent.id not in [foo, bar, baz]" res = convert_scope_string_to_expression(param) - expect(res).to(equal([True, [{ - "displayName": "", - "isVariable": False, - "operand": "agent.id", - "operator": "notIn", - "value": ["foo", "bar", "baz"] - }]])) + expect(res).to( + equal( + [ + True, + [ + { + "displayName": "", + "isVariable": False, + "operand": "agent.id", + "operator": "notIn", + "value": ["foo", "bar", "baz"], + } + ], + ] + ) + ) with it("parses correctly: agent.id contains foo"): param = "agent.id contains foo" res = convert_scope_string_to_expression(param) - expect(res).to(equal([True, [{ - "displayName": "", - "isVariable": False, - "operand": "agent.id", - "operator": "contains", - "value": ["foo"] - }]])) + expect(res).to( + equal( + [ + True, + [ + { + "displayName": "", + "isVariable": False, + "operand": "agent.id", + "operator": "contains", + "value": ["foo"], + } + ], + ] + ) + ) with it("parses correctly: agent.id does not contain foo"): param = "agent.id does not contain foo" res = convert_scope_string_to_expression(param) - expect(res).to(equal([True, [{ - "displayName": "", - "isVariable": False, - "operand": "agent.id", - "operator": "notContains", - "value": ["foo"] - }]])) + expect(res).to( + equal( + [ + True, + [ + { + "displayName": "", + "isVariable": False, + "operand": "agent.id", + "operator": "notContains", + "value": ["foo"], + } + ], + ] + ) + ) with it("parses correctly: agent.id starts with foo"): param = "agent.id starts with foo" res = convert_scope_string_to_expression(param) - expect(res).to(equal([True, [{ - "displayName": "", - "isVariable": False, - "operand": "agent.id", - "operator": "startsWith", - "value": ["foo"] - }]])) + expect(res).to( + equal( + [ + True, + [ + { + "displayName": "", + "isVariable": False, + "operand": "agent.id", + "operator": "startsWith", + "value": ["foo"], + } + ], + ] + ) + ) with it("returns ok, but empty if scope is None"): res = convert_scope_string_to_expression(None) diff --git a/specs/monitor/dashboards_v2_spec.py b/specs/monitor/dashboards_v2_spec.py index a75466db..47f5113e 100644 --- a/specs/monitor/dashboards_v2_spec.py +++ b/specs/monitor/dashboards_v2_spec.py @@ -13,8 +13,10 @@ with description("Dashboards v2", "integration") as self: with before.all: - self.client = DashboardsClientV2(sdc_url=os.getenv("SDC_MONITOR_URL", "https://app.sysdigcloud.com"), - token=os.getenv("SDC_MONITOR_TOKEN")) + self.client = DashboardsClientV2( + sdc_url=os.getenv("SDC_MONITOR_URL", "https://app.sysdigcloud.com"), + token=os.getenv("SDC_MONITOR_TOKEN"), + ) with before.each: self.cleanup_test_dashboards() @@ -22,7 +24,6 @@ with after.each: self.cleanup_test_dashboards() - def cleanup_test_dashboards(self): ok, res = self.client.get_dashboards() expect((ok, res)).to(be_successful_api_call) @@ -32,7 +33,6 @@ def cleanup_test_dashboards(self): call = self.client.delete_dashboard(dashboard) expect(call).to(be_successful_api_call) - def create_test_dashboard(self): ok, res = self.client.create_dashboard(name=_DASHBOARD_NAME) if ok: @@ -40,7 +40,6 @@ def create_test_dashboard(self): return ok, res - with it("is able to create a dashboard with just a name"): ok, res = self.client.create_dashboard(name=_DASHBOARD_NAME) expect((ok, res)).to(be_successful_api_call) @@ -54,16 +53,19 @@ def create_test_dashboard(self): f.flush() f.seek(0) - ok, res = self.client.create_dashboard_from_file(dashboard_name=f"{_DASHBOARD_NAME}_2", filename=f.name, - filter=None) + ok, res = self.client.create_dashboard_from_file( + dashboard_name=f"{_DASHBOARD_NAME}_2", filename=f.name, filter=None + ) expect((ok, res)).to(be_successful_api_call) with it("is able to create a dashboard from a view"): _, res_view_list = self.client.get_views_list() - call = self.client.create_dashboard_from_view(newdashname=f"{_DASHBOARD_NAME}_2", - viewname=res_view_list["defaultDashboards"][0]["name"], - filter=None) + call = self.client.create_dashboard_from_view( + newdashname=f"{_DASHBOARD_NAME}_2", + viewname=res_view_list["defaultDashboards"][0]["name"], + filter=None, + ) expect(call).to(be_successful_api_call) with context("when there are existing dashbords"): @@ -78,7 +80,11 @@ def create_test_dashboard(self): with it("is able to retrieve the test dashboard by its id"): ok, res = self.client.get_dashboard(dashboard_id=self.test_dashboard["id"]) expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_key("dashboard", have_keys("name", id=equal(self.test_dashboard["id"])))) + expect(res).to( + have_key( + "dashboard", have_keys("name", id=equal(self.test_dashboard["id"])) + ) + ) with context("when deleting a dashboard"): with it("is able to remove it if all the info provided is correct"): @@ -98,7 +104,9 @@ def create_test_dashboard(self): expect(ok).to(be_false) expect(res).to(equal("status code 404")) - with it("returns an error if there is not 'id' field in the provided object"): + with it( + "returns an error if there is not 'id' field in the provided object" + ): ok, res = self.client.delete_dashboard({}) expect(ok).to(be_false) @@ -106,31 +114,41 @@ def create_test_dashboard(self): with it("is able to dump the dashboard to a file"): with tempfile.NamedTemporaryFile(mode="w+") as f: - self.client.save_dashboard_to_file(dashboard=self.test_dashboard, filename=f.name) + self.client.save_dashboard_to_file( + dashboard=self.test_dashboard, filename=f.name + ) f.flush() f.seek(0) data = json.load(f) - expect(data).to(have_keys(version=equal("v2"), dashboard=equal(self.test_dashboard))) + expect(data).to( + have_keys(version=equal("v2"), dashboard=equal(self.test_dashboard)) + ) with it("is able to create a dashboard from template"): - call = self.client.create_dashboard_from_template(dashboard_name=f"{_DASHBOARD_NAME}_2", - template=self.test_dashboard, - scope='agent.id = "foo"') + call = self.client.create_dashboard_from_template( + dashboard_name=f"{_DASHBOARD_NAME}_2", + template=self.test_dashboard, + scope='agent.id = "foo"', + ) expect(call).to(be_successful_api_call) with context("when it's created with an incorrect scope"): with it("fails if the scope is not a string"): - ok, res = self.client.create_dashboard_from_template(dashboard_name=f"{_DASHBOARD_NAME}_2", - template=self.test_dashboard, - scope={}) + ok, res = self.client.create_dashboard_from_template( + dashboard_name=f"{_DASHBOARD_NAME}_2", + template=self.test_dashboard, + scope={}, + ) expect(ok).to(be_false) expect(res).to(equal("Invalid scope format: Expected a string")) with it("fails if the scope has incorrect format"): - ok, res = self.client.create_dashboard_from_template(dashboard_name=f"{_DASHBOARD_NAME}_2", - template=self.test_dashboard, - scope="foobarbaz") + ok, res = self.client.create_dashboard_from_template( + dashboard_name=f"{_DASHBOARD_NAME}_2", + template=self.test_dashboard, + scope="foobarbaz", + ) expect(ok).to(be_false) expect(res).to(start_with("invalid scope: foobarbaz")) @@ -142,15 +160,25 @@ def create_test_dashboard(self): with context("when creating a dashboard from other dashboard"): with it("creates the dashboard correctly if the template exists"): - ok, res = self.client.create_dashboard_from_dashboard(newdashname=f"{_DASHBOARD_NAME}_2", - templatename=_DASHBOARD_NAME, filter=None) + ok, res = self.client.create_dashboard_from_dashboard( + newdashname=f"{_DASHBOARD_NAME}_2", + templatename=_DASHBOARD_NAME, + filter=None, + ) expect((ok, res)).to(be_successful_api_call) with it("returns an error saying the dashboard does not exist"): - ok, res = self.client.create_dashboard_from_dashboard(newdashname=f"{_DASHBOARD_NAME}_2", - templatename="NonExistingDashboard", filter=None) + ok, res = self.client.create_dashboard_from_dashboard( + newdashname=f"{_DASHBOARD_NAME}_2", + templatename="NonExistingDashboard", + filter=None, + ) expect(ok).to(be_false) - expect(res).to(equal("can't find dashboard NonExistingDashboard to use as a template")) + expect(res).to( + equal( + "can't find dashboard NonExistingDashboard to use as a template" + ) + ) with it("is able to update a dashboard"): self.test_dashboard["name"] = f"{_DASHBOARD_NAME}_updated" @@ -162,6 +190,14 @@ def create_test_dashboard(self): ok, res = self.client.find_dashboard_by(name=self.test_dashboard["name"]) expect((ok, res)).to(be_successful_api_call) - expect(res).to(contain( - have_key("dashboard", have_keys(id=self.test_dashboard["id"], name=self.test_dashboard["name"]))) + expect(res).to( + contain( + have_key( + "dashboard", + have_keys( + id=self.test_dashboard["id"], + name=self.test_dashboard["name"], + ), + ) + ) ) diff --git a/specs/monitor/dashboards_v3_spec.py b/specs/monitor/dashboards_v3_spec.py index 1c2a574d..760dacf8 100644 --- a/specs/monitor/dashboards_v3_spec.py +++ b/specs/monitor/dashboards_v3_spec.py @@ -2,7 +2,18 @@ import os import tempfile -from expects import expect, have_key, have_keys, contain, equal, start_with, be_false, have_len, be_empty, not_ +from expects import ( + expect, + have_key, + have_keys, + contain, + equal, + start_with, + be_false, + have_len, + be_empty, + not_, +) from mamba import before, it, context, after, description from sdcclient import SdMonitorClient @@ -12,8 +23,10 @@ with description("Dashboards v3", "integration") as self: with before.all: - self.client = SdMonitorClient(sdc_url=os.getenv("SDC_MONITOR_URL", "https://app.sysdigcloud.com"), - token=os.getenv("SDC_MONITOR_TOKEN")) + self.client = SdMonitorClient( + sdc_url=os.getenv("SDC_MONITOR_URL", "https://app.sysdigcloud.com"), + token=os.getenv("SDC_MONITOR_TOKEN"), + ) with before.each: self.cleanup_test_dashboards() @@ -21,7 +34,6 @@ with after.each: self.cleanup_test_dashboards() - def cleanup_test_dashboards(self): ok, res = self.client.get_dashboards() expect((ok, res)).to(be_successful_api_call) @@ -31,7 +43,6 @@ def cleanup_test_dashboards(self): call = self.client.delete_dashboard(dashboard) expect(call).to(be_successful_api_call) - def create_test_dashboard(self): ok, res = self.client.create_dashboard(name=_DASHBOARD_NAME) if ok: @@ -39,7 +50,6 @@ def create_test_dashboard(self): return ok, res - with it("is able to create a dashboard with just a name"): ok, res = self.client.create_dashboard(name=_DASHBOARD_NAME) expect((ok, res)).to(be_successful_api_call) @@ -53,17 +63,20 @@ def create_test_dashboard(self): f.flush() f.seek(0) - ok, res = self.client.create_dashboard_from_file(dashboard_name=f"{_DASHBOARD_NAME}_2", filename=f.name, - filter=None) + ok, res = self.client.create_dashboard_from_file( + dashboard_name=f"{_DASHBOARD_NAME}_2", filename=f.name, filter=None + ) expect((ok, res)).to(be_successful_api_call) with it("is able to create a dashboard from a view"): ok, res_view_list = self.client.get_views_list() expect((ok, res_view_list)).to(be_successful_api_call) - call = self.client.create_dashboard_from_view(newdashname=f"{_DASHBOARD_NAME}_2", - viewname=res_view_list["dashboardTemplates"][0]["name"], - filter=None) + call = self.client.create_dashboard_from_view( + newdashname=f"{_DASHBOARD_NAME}_2", + viewname=res_view_list["dashboardTemplates"][0]["name"], + filter=None, + ) expect(call).to(be_successful_api_call) with context("when there are existing dashbords"): @@ -78,15 +91,29 @@ def create_test_dashboard(self): with it("is able to list all the dashboards with the full information"): ok, res = self.client.get_dashboards(light=False) expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_key("dashboards", contain(have_keys("name", "id", - panels=not_(be_empty), - layout=not_(be_empty), - permissions=not_(be_empty))))) + expect(res).to( + have_key( + "dashboards", + contain( + have_keys( + "name", + "id", + panels=not_(be_empty), + layout=not_(be_empty), + permissions=not_(be_empty), + ) + ), + ) + ) with it("is able to retrieve the test dashboard by its id"): ok, res = self.client.get_dashboard(dashboard_id=self.test_dashboard["id"]) expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_key("dashboard", have_keys("name", id=equal(self.test_dashboard["id"])))) + expect(res).to( + have_key( + "dashboard", have_keys("name", id=equal(self.test_dashboard["id"])) + ) + ) with context("when deleting a dashboard"): with it("is able to remove it if all the info provided is correct"): @@ -106,7 +133,9 @@ def create_test_dashboard(self): expect(ok).to(be_false) expect(res).to(equal("status code 404")) - with it("returns an error if there is not 'id' field in the provided object"): + with it( + "returns an error if there is not 'id' field in the provided object" + ): ok, res = self.client.delete_dashboard({}) expect(ok).to(be_false) @@ -114,17 +143,23 @@ def create_test_dashboard(self): with it("is able to dump the dashboard to a file"): with tempfile.NamedTemporaryFile(mode="w+") as f: - self.client.save_dashboard_to_file(dashboard=self.test_dashboard, filename=f.name) + self.client.save_dashboard_to_file( + dashboard=self.test_dashboard, filename=f.name + ) f.flush() f.seek(0) data = json.load(f) - expect(data).to(have_keys(version=equal("v3"), dashboard=equal(self.test_dashboard))) + expect(data).to( + have_keys(version=equal("v3"), dashboard=equal(self.test_dashboard)) + ) with it("is able to create a dashboard from template"): - call = self.client.create_dashboard_from_template(dashboard_name=f"{_DASHBOARD_NAME}_2", - template=self.test_dashboard, - scope='agent.id = "foo"') + call = self.client.create_dashboard_from_template( + dashboard_name=f"{_DASHBOARD_NAME}_2", + template=self.test_dashboard, + scope='agent.id = "foo"', + ) expect(call).to(be_successful_api_call) with it("is able to make it public"): @@ -149,16 +184,22 @@ def create_test_dashboard(self): with context("when it's created with an incorrect scope"): with it("fails if the scope is not a string"): - ok, res = self.client.create_dashboard_from_template(dashboard_name=f"{_DASHBOARD_NAME}_2", - template=self.test_dashboard, - scope={}) + ok, res = self.client.create_dashboard_from_template( + dashboard_name=f"{_DASHBOARD_NAME}_2", + template=self.test_dashboard, + scope={}, + ) expect(ok).to(be_false) - expect(res).to(equal("Invalid scope format: Expected a list, a string or None")) + expect(res).to( + equal("Invalid scope format: Expected a list, a string or None") + ) with it("fails if the scope has incorrect format"): - ok, res = self.client.create_dashboard_from_template(dashboard_name=f"{_DASHBOARD_NAME}_2", - template=self.test_dashboard, - scope="foobarbaz") + ok, res = self.client.create_dashboard_from_template( + dashboard_name=f"{_DASHBOARD_NAME}_2", + template=self.test_dashboard, + scope="foobarbaz", + ) expect(ok).to(be_false) expect(res).to(start_with("invalid scope: foobarbaz")) @@ -170,15 +211,25 @@ def create_test_dashboard(self): with context("when creating a dashboard from other dashboard"): with it("creates the dashboard correctly if the template exists"): - ok, res = self.client.create_dashboard_from_dashboard(newdashname=f"{_DASHBOARD_NAME}_2", - templatename=_DASHBOARD_NAME, filter=None) + ok, res = self.client.create_dashboard_from_dashboard( + newdashname=f"{_DASHBOARD_NAME}_2", + templatename=_DASHBOARD_NAME, + filter=None, + ) expect((ok, res)).to(be_successful_api_call) with it("returns an error saying the dashboard does not exist"): - ok, res = self.client.create_dashboard_from_dashboard(newdashname=f"{_DASHBOARD_NAME}_2", - templatename="NonExistingDashboard", filter=None) + ok, res = self.client.create_dashboard_from_dashboard( + newdashname=f"{_DASHBOARD_NAME}_2", + templatename="NonExistingDashboard", + filter=None, + ) expect(ok).to(be_false) - expect(res).to(equal("can't find dashboard NonExistingDashboard to use as a template")) + expect(res).to( + equal( + "can't find dashboard NonExistingDashboard to use as a template" + ) + ) with it("is able to update a dashboard"): self.test_dashboard["name"] = f"{_DASHBOARD_NAME}_updated" @@ -190,32 +241,50 @@ def create_test_dashboard(self): ok, res = self.client.find_dashboard_by(name=self.test_dashboard["name"]) expect((ok, res)).to(be_successful_api_call) - expect(res).to(contain( - have_key("dashboard", have_keys(id=self.test_dashboard["id"], name=self.test_dashboard["name"]))) + expect(res).to( + contain( + have_key( + "dashboard", + have_keys( + id=self.test_dashboard["id"], + name=self.test_dashboard["name"], + ), + ) + ) ) with context("when we are sharing a dashboard with all teams"): with it("shares it with view only permissions"): - ok, res = self.client.share_dashboard_with_all_teams(self.test_dashboard, "r") + ok, res = self.client.share_dashboard_with_all_teams( + self.test_dashboard, "r" + ) expect((ok, res)).to(be_successful_api_call) expect(res["dashboard"]).to(have_key("shared", True)) expect(res["dashboard"]).to(have_key("sharingSettings")) expect(res["dashboard"]["sharingSettings"]).to(have_len(1)) - expect(res["dashboard"]["sharingSettings"][0]["role"]).to(equal("ROLE_RESOURCE_READ")) + expect(res["dashboard"]["sharingSettings"][0]["role"]).to( + equal("ROLE_RESOURCE_READ") + ) with it("shares it with read write permissions"): - ok, res = self.client.share_dashboard_with_all_teams(self.test_dashboard, "w") + ok, res = self.client.share_dashboard_with_all_teams( + self.test_dashboard, "w" + ) expect((ok, res)).to(be_successful_api_call) expect(res["dashboard"]).to(have_key("shared", True)) expect(res["dashboard"]).to(have_key("sharingSettings")) expect(res["dashboard"]["sharingSettings"]).to(have_len(1)) - expect(res["dashboard"]["sharingSettings"][0]["role"]).to(equal("ROLE_RESOURCE_EDIT")) + expect(res["dashboard"]["sharingSettings"][0]["role"]).to( + equal("ROLE_RESOURCE_EDIT") + ) with context("when there is a shared dashboard"): with it("unshares it"): - _, dboard = self.client.share_dashboard_with_all_teams(self.test_dashboard, "w") + _, dboard = self.client.share_dashboard_with_all_teams( + self.test_dashboard, "w" + ) ok, res = self.client.unshare_dashboard(dboard["dashboard"]) @@ -235,31 +304,42 @@ def create_test_dashboard(self): with it("shares it with view only permissions"): _, team = self.client.get_team("Monitor Operations") - ok, res = self.client.share_dashboard_with_team(self.test_dashboard, team["id"], "r") + ok, res = self.client.share_dashboard_with_team( + self.test_dashboard, team["id"], "r" + ) expect((ok, res)).to(be_successful_api_call) expect(res["dashboard"]).to(have_key("shared", True)) expect(res["dashboard"]).to(have_key("sharingSettings")) expect(res["dashboard"]["sharingSettings"]).to(have_len(1)) - expect(res["dashboard"]["sharingSettings"][0]["role"]).to(equal("ROLE_RESOURCE_READ")) + expect(res["dashboard"]["sharingSettings"][0]["role"]).to( + equal("ROLE_RESOURCE_READ") + ) with it("shares it with read write permissions"): _, team = self.client.get_team("Monitor Operations") - ok, res = self.client.share_dashboard_with_team(self.test_dashboard, team["id"], "w") + ok, res = self.client.share_dashboard_with_team( + self.test_dashboard, team["id"], "w" + ) expect((ok, res)).to(be_successful_api_call) expect(res["dashboard"]).to(have_key("shared", True)) expect(res["dashboard"]).to(have_key("sharingSettings")) expect(res["dashboard"]["sharingSettings"]).to(have_len(1)) - expect(res["dashboard"]["sharingSettings"][0]["role"]).to(equal("ROLE_RESOURCE_EDIT")) + expect(res["dashboard"]["sharingSettings"][0]["role"]).to( + equal("ROLE_RESOURCE_EDIT") + ) with it("shares it with two teams, one of those with write access"): _, team = self.client.get_team("Monitor Operations") - ok_team, res_team = self.client.share_dashboard_with_team(self.test_dashboard, team["id"], "r") - ok_team2, res_team2 = self.client.share_dashboard_with_team(res_team["dashboard"], - self.team["team"]["id"], "w") + ok_team, res_team = self.client.share_dashboard_with_team( + self.test_dashboard, team["id"], "r" + ) + ok_team2, res_team2 = self.client.share_dashboard_with_team( + res_team["dashboard"], self.team["team"]["id"], "w" + ) expect((ok_team, res_team)).to(be_successful_api_call) expect((ok_team2, res_team2)).to(be_successful_api_call) @@ -267,5 +347,9 @@ def create_test_dashboard(self): expect(res_team2["dashboard"]).to(have_key("shared", True)) expect(res_team2["dashboard"]).to(have_key("sharingSettings")) expect(res_team2["dashboard"]["sharingSettings"]).to(have_len(2)) - expect(res_team2["dashboard"]["sharingSettings"]).to(contain(have_keys(role=equal("ROLE_RESOURCE_READ")))) - expect(res_team2["dashboard"]["sharingSettings"]).to(contain(have_keys(role=equal("ROLE_RESOURCE_EDIT")))) + expect(res_team2["dashboard"]["sharingSettings"]).to( + contain(have_keys(role=equal("ROLE_RESOURCE_READ"))) + ) + expect(res_team2["dashboard"]["sharingSettings"]).to( + contain(have_keys(role=equal("ROLE_RESOURCE_EDIT"))) + ) diff --git a/specs/monitor/events_v1_spec.py b/specs/monitor/events_v1_spec.py index b32c8ac5..6f093af6 100644 --- a/specs/monitor/events_v1_spec.py +++ b/specs/monitor/events_v1_spec.py @@ -9,18 +9,24 @@ with description("Events v1", "integration") as self: with before.all: - self.client = EventsClientV1(sdc_url=os.getenv("SDC_MONITOR_URL", "https://app.sysdigcloud.com"), - token=os.getenv("SDC_MONITOR_TOKEN")) + self.client = EventsClientV1( + sdc_url=os.getenv("SDC_MONITOR_URL", "https://app.sysdigcloud.com"), + token=os.getenv("SDC_MONITOR_TOKEN"), + ) self.event_name = "event_v1_test_ci" with it("is able to create a custom event"): - call = self.client.post_event(name=self.event_name, - description="This event was created in a CI pipeline for the Python SDK library") + call = self.client.post_event( + name=self.event_name, + description="This event was created in a CI pipeline for the Python SDK library", + ) expect(call).to(be_successful_api_call) with it("is able to retrieve an event by ID"): - ok, res = self.client.post_event(name=self.event_name, - description="This event was created in a CI pipeline for the Python SDK library") + ok, res = self.client.post_event( + name=self.event_name, + description="This event was created in a CI pipeline for the Python SDK library", + ) expect((ok, res)).to(be_successful_api_call) event = res["event"] diff --git a/specs/monitor/events_v2_spec.py b/specs/monitor/events_v2_spec.py index 72bba50b..579291b6 100644 --- a/specs/monitor/events_v2_spec.py +++ b/specs/monitor/events_v2_spec.py @@ -3,7 +3,17 @@ from datetime import datetime, timedelta from time import sleep -from expects import expect, have_key, contain, have_keys, be_empty, equal, be_false, be_above_or_equal, have_len +from expects import ( + expect, + have_key, + contain, + have_keys, + be_empty, + equal, + be_false, + be_above_or_equal, + have_len, +) from mamba import it, before, context, description from sdcclient.monitor import EventsClientV2 @@ -11,30 +21,40 @@ with description("Events v2", "integration") as self: with before.all: - self.client = EventsClientV2(sdc_url=os.getenv("SDC_MONITOR_URL", "https://app.sysdigcloud.com"), - token=os.getenv("SDC_MONITOR_TOKEN")) + self.client = EventsClientV2( + sdc_url=os.getenv("SDC_MONITOR_URL", "https://app.sysdigcloud.com"), + token=os.getenv("SDC_MONITOR_TOKEN"), + ) self.event_name = "event_v2_test_ci" with it("is able to create a custom event"): - call = self.client.post_event(name=self.event_name, - description="This event was created in a CI pipeline for the Python SDK library") + call = self.client.post_event( + name=self.event_name, + description="This event was created in a CI pipeline for the Python SDK library", + ) expect(call).to(be_successful_api_call) with it("is able to create a custom event with a scope"): - call = self.client.post_event(name=self.event_name, - description="This event was created in a CI pipeline for the Python SDK library", - event_filter="host.hostName='ci'") + call = self.client.post_event( + name=self.event_name, + description="This event was created in a CI pipeline for the Python SDK library", + event_filter="host.hostName='ci'", + ) expect(call).to(be_successful_api_call) sleep(2) # sleep to guarantee the event is created ok, res = self.client.get_events() expect((ok, res)).to(be_successful_api_call) expect(res).to(have_key("events")) - expect(res["events"]).to(contain(have_key("scope", equal("host.hostName = 'ci'")))) + expect(res["events"]).to( + contain(have_key("scope", equal("host.hostName = 'ci'"))) + ) with it("is able to retrieve an event by ID"): - ok, res = self.client.post_event(name=self.event_name, - description="This event was created in a CI pipeline for the Python SDK library") + ok, res = self.client.post_event( + name=self.event_name, + description="This event was created in a CI pipeline for the Python SDK library", + ) expect((ok, res)).to(be_successful_api_call) event = res["event"] @@ -58,18 +78,18 @@ expect(res).to(have_key("events", contain(have_keys(name=self.event_name)))) with it("fails to retrieve the events with an incorrect category"): - ok, res = self.client.get_events(category=['incorrect_category']) + ok, res = self.client.get_events(category=["incorrect_category"]) expect(ok).to(be_false) expect(res).to(equal("Invalid category 'incorrect_category'")) with it("is able to retrieve events that match a status"): - ok, res = self.client.get_events(status=['triggered']) + ok, res = self.client.get_events(status=["triggered"]) expect((ok, res)).to(be_successful_api_call) expect(res).to(have_key("events", contain(have_keys(name=self.event_name)))) with it("fails to retrieve the events with an incorrect status"): - ok, res = self.client.get_events(status=['incorrect_status']) + ok, res = self.client.get_events(status=["incorrect_status"]) expect(ok).to(be_false) expect(res).to(equal("Invalid status 'incorrect_status'")) @@ -78,25 +98,31 @@ ok, res = self.client.get_events(direction="before") expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_keys('events', 'total', 'matched')) + expect(res).to(have_keys("events", "total", "matched")) with it("retrieves the events correctly specifying direction 'after'"): ok, res = self.client.get_events(direction="after") expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_keys('events', 'total', 'matched')) + expect(res).to(have_keys("events", "total", "matched")) with it("fails to retrieve the events with an incorrect direction"): ok, res = self.client.get_events(direction="incorrect_direction") expect(ok).to(be_false) - expect(res).to(equal("Invalid direction 'incorrect_direction', must be either 'before' or 'after'")) + expect(res).to( + equal( + "Invalid direction 'incorrect_direction', must be either 'before' or 'after'" + ) + ) with it("is able to retrieve events by name"): ok, res = self.client.get_events(name=self.event_name) expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_key("events", contain(have_key("name", equal(self.event_name))))) + expect(res).to( + have_key("events", contain(have_key("name", equal(self.event_name)))) + ) with it("retrieves an empty list when the name provided is not found"): ok, res = self.client.get_events(name="RandomUnexistingEvent") @@ -118,17 +144,27 @@ expect(res).to(have_key("events", have_len(be_above_or_equal(1)))) with context("but the from and to parameters are incorrectly specified"): - with it("returns an error if any of the parameters is specified but not the other"): + with it( + "returns an error if any of the parameters is specified but not the other" + ): t = datetime.now() - timedelta(weeks=2) ok1, res1 = self.client.get_events(from_s=t) ok2, res2 = self.client.get_events(to_s=t) expect((ok1, res1)).not_to(be_successful_api_call) expect((ok2, res2)).not_to(be_successful_api_call) - expect(res1).to(equal("only one of 'from_s' or 'to_s' has been specified, " - "both are required when filtering by time")) - expect(res2).to(equal("only one of 'from_s' or 'to_s' has been specified, " - "both are required when filtering by time")) + expect(res1).to( + equal( + "only one of 'from_s' or 'to_s' has been specified, " + "both are required when filtering by time" + ) + ) + expect(res2).to( + equal( + "only one of 'from_s' or 'to_s' has been specified, " + "both are required when filtering by time" + ) + ) with it("returns an error if they are specified in the wrong order"): to_s = datetime.now() diff --git a/specs/secure/activitylog_v1_spec.py b/specs/secure/activitylog_v1_spec.py index 81fbb13a..cf68b58b 100644 --- a/specs/secure/activitylog_v1_spec.py +++ b/specs/secure/activitylog_v1_spec.py @@ -4,13 +4,18 @@ from expects import be_above, be_empty, contain, expect, have_keys, have_len from mamba import _it, before, context, description, it -from sdcclient.secure import ActivityAuditClientV1 as ActivityAuditClient, ActivityAuditDataSource +from sdcclient.secure import ( + ActivityAuditClientV1 as ActivityAuditClient, + ActivityAuditDataSource, +) from specs import be_successful_api_call with description("Activity Audit v1", "integration") as self: with before.all: - self.client = ActivityAuditClient(sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), - token=os.getenv("SDC_SECURE_TOKEN")) + self.client = ActivityAuditClient( + sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), + token=os.getenv("SDC_SECURE_TOKEN"), + ) with it("is able to list the most recent commands with the default parameters"): ok, res = self.client.list_events() @@ -35,17 +40,23 @@ with context("when listing events from a specific type"): with it("retrieves the events of this event type only"): - ok, res = self.client.list_events(data_sources=[ActivityAuditDataSource.CMD]) + ok, res = self.client.list_events( + data_sources=[ActivityAuditDataSource.CMD] + ) expect((ok, res)).to(be_successful_api_call) expect(res).to(contain(have_keys(type=ActivityAuditDataSource.CMD))) - expect(res).to_not(contain(have_keys(type=ActivityAuditDataSource.KUBE_EXEC))) + expect(res).to_not( + contain(have_keys(type=ActivityAuditDataSource.KUBE_EXEC)) + ) expect(res).to_not(contain(have_keys(type=ActivityAuditDataSource.FILE))) expect(res).to_not(contain(have_keys(type=ActivityAuditDataSource.NET))) with context("when retrieving the inner events of a traceable event"): with _it("retrieves the trace of these events"): - ok, res = self.client.list_events(data_sources=[ActivityAuditDataSource.KUBE_EXEC]) + ok, res = self.client.list_events( + data_sources=[ActivityAuditDataSource.KUBE_EXEC] + ) expect((ok, res)).to(be_successful_api_call) expect(res).to(contain(have_keys(traceable=True))) @@ -55,4 +66,6 @@ expect((ok, res)).to(be_successful_api_call) expect(res).to(contain(have_keys(type=ActivityAuditDataSource.CMD))) - expect(res).to(have_len(be_above(0))) # Not using be_empty, because we want to ensure this is a list + expect(res).to( + have_len(be_above(0)) + ) # Not using be_empty, because we want to ensure this is a list diff --git a/specs/secure/custom_rules_spec.py b/specs/secure/custom_rules_spec.py index 907f9365..f56b317e 100644 --- a/specs/secure/custom_rules_spec.py +++ b/specs/secure/custom_rules_spec.py @@ -8,27 +8,34 @@ with description("Custom Rules", "integration") as self: with before.each: - self.client = SdSecureClient(sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), - token=os.getenv("SDC_SECURE_TOKEN")) + self.client = SdSecureClient( + sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), + token=os.getenv("SDC_SECURE_TOKEN"), + ) with context("when the custom rules file exists"): with it("can be retrieved"): ok, res = self.client.get_user_falco_rules() expect((ok, res)).to(be_successful_api_call) - expect(res).to(start_with("####################\n# Your custom rules!\n####################\n")) + expect(res).to( + start_with( + "####################\n# Your custom rules!\n####################\n" + ) + ) with context("when the credentials are not valid"): with it("can't be retrieved"): - self.client = SdSecureClient(sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), - token="foo-bar") + self.client = SdSecureClient( + sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), + token="foo-bar", + ) ok, res = self.client.get_user_falco_rules() expect((ok, res)).to_not(be_successful_api_call) expect(res).to(equal("Bad credentials")) - with it("can push custom rules"): _, previous_rules = self.client.get_user_falco_rules() empty_rules = self.empty_falco_rules() @@ -45,19 +52,21 @@ ok, res = self.client.set_user_falco_rules(self.rules_without_header()) expect((ok, res)).to(be_successful_api_call) # The endpoint automatically fills the header for the user. - expect(res).to(start_with("####################\n# Your custom rules!\n####################\n\n")) + expect(res).to( + start_with( + "####################\n# Your custom rules!\n####################\n\n" + ) + ) expect(res).to(contain(self.rules_without_header())) ok, res = self.client.set_user_falco_rules(previous_rules) expect((ok, res)).to(be_successful_api_call) expect(res).to(equal(previous_rules)) - def user_falco_rules(self): with open("fixtures/custom_rules.yaml", "r") as f: return f.read() - def empty_falco_rules(self): return """#################### # Your custom rules! @@ -74,7 +83,6 @@ def empty_falco_rules(self): # Or override any rule, macro, or list from the Default Rules """ - def rules_without_header(self): return """\ --- diff --git a/specs/secure/policy_events_v1_spec.py b/specs/secure/policy_events_v1_spec.py index ad7337aa..b1d039bb 100644 --- a/specs/secure/policy_events_v1_spec.py +++ b/specs/secure/policy_events_v1_spec.py @@ -9,8 +9,10 @@ with description("Policy Events v1", "integration") as self: with before.each: - self.client = PolicyEventsClientV1(sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), - token=os.getenv("SDC_SECURE_TOKEN")) + self.client = PolicyEventsClientV1( + sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), + token=os.getenv("SDC_SECURE_TOKEN"), + ) with context("when we try to retrieve policy events from the last 7 days"): with it("returns the list of all events happened"): week_in_seconds = 7 * 24 * 60 * 60 @@ -20,10 +22,25 @@ expect((ok, res)).to(be_successful_api_call) expect(res).to(have_keys("ctx", "data")) expect(res["data"]).to( - contain(have_keys("id", "timestamp", "customerId", "source", "name", "description", "cursor"))) + contain( + have_keys( + "id", + "timestamp", + "customerId", + "source", + "name", + "description", + "cursor", + ) + ) + ) with it("returns the list of all events from a range"): - to_sec = int((datetime.datetime.utcnow() - datetime.datetime.utcfromtimestamp(0)).total_seconds()) + to_sec = int( + ( + datetime.datetime.utcnow() - datetime.datetime.utcfromtimestamp(0) + ).total_seconds() + ) from_sec = to_sec - (7 * 24 * 60 * 60) ok, res = self.client.get_policy_events_range(from_sec, to_sec) @@ -31,12 +48,27 @@ expect((ok, res)).to(be_successful_api_call) expect(res).to(have_keys("ctx", "data")) expect(res["data"]).to( - contain(have_keys("id", "timestamp", "customerId", "source", "name", "description", "cursor"))) - - with it("returns the list of all events from the last 7 days that match a filter"): + contain( + have_keys( + "id", + "timestamp", + "customerId", + "source", + "name", + "description", + "cursor", + ) + ) + ) + + with it( + "returns the list of all events from the last 7 days that match a filter" + ): week_in_seconds = 7 * 24 * 60 * 60 - ok, res = self.client.get_policy_events_duration(week_in_seconds, filter='severity in ("4","5")') + ok, res = self.client.get_policy_events_duration( + week_in_seconds, filter='severity in ("4","5")' + ) expect((ok, res)).to(be_successful_api_call) expect(res).to(have_keys("ctx", "data")) @@ -45,7 +77,9 @@ with it("returns an empty list if the filter does not match"): week_in_seconds = 7 * 24 * 60 * 60 - ok, res = self.client.get_policy_events_duration(week_in_seconds, filter='severity in ("-1")') + ok, res = self.client.get_policy_events_duration( + week_in_seconds, filter='severity in ("-1")' + ) expect((ok, res)).to(be_successful_api_call) expect(res).to(have_keys("ctx", "data")) @@ -88,4 +122,13 @@ ok, res = self.client.get_policy_event(event_id) expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_keys("name", "timestamp", "customerId", "originator", "machineId", id=event_id)) + expect(res).to( + have_keys( + "name", + "timestamp", + "customerId", + "originator", + "machineId", + id=event_id, + ) + ) diff --git a/specs/secure/policy_v1_spec.py b/specs/secure/policy_v1_spec.py index c33b47f1..3684e0a7 100644 --- a/specs/secure/policy_v1_spec.py +++ b/specs/secure/policy_v1_spec.py @@ -12,20 +12,14 @@ _POLICY_DESCRIPTION = "Detect network tools launched on the host" _POLICY_RULES_REGEX = "Launch Suspicious Network Tool on Host" _POLICY_ACTIONS = [ - { - "type": "POLICY_ACTION_STOP", - "msg": "" - }, - { - "type": "POLICY_ACTION_PAUSE", - "msg": "" - }, + {"type": "POLICY_ACTION_STOP", "msg": ""}, + {"type": "POLICY_ACTION_PAUSE", "msg": ""}, { "type": "POLICY_ACTION_CAPTURE", "beforeEventNs": 5000000000, "afterEventNs": 18000000000, - "isLimitedToContainer": True - } + "isLimitedToContainer": True, + }, ] @@ -56,14 +50,16 @@ def policy_json(): with description("Policies v1", "integration") as self: with before.all: - self.clientV1 = SdSecureClientV1(sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), - token=os.getenv("SDC_SECURE_TOKEN")) + self.clientV1 = SdSecureClientV1( + sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), + token=os.getenv("SDC_SECURE_TOKEN"), + ) with after.each: self.cleanup_policies() def cleanup_policies(self): _, res = self.clientV1.list_policies() - for policy in res['policies']: + for policy in res["policies"]: if str(policy["name"]).startswith("Test - "): ok, res = self.clientV1.delete_policy_id(policy["id"]) expect((ok, res)).to(be_successful_api_call) @@ -78,7 +74,7 @@ def cleanup_policies(self): with it("is able to change the evaluation order of policies"): ok, res = self.clientV1.get_policy_priorities() - random.shuffle(res['priorities']['policyIds']) + random.shuffle(res["priorities"]["policyIds"]) ok, res = self.clientV1.set_policy_priorities(json.dumps(res)) expect((ok, res)).to(be_successful_api_call) @@ -88,31 +84,31 @@ def cleanup_policies(self): with it("is able to get a policy by id"): ok, res = self.clientV1.list_policies() - id = res['policies'][0]['id'] + id = res["policies"][0]["id"] call = self.clientV1.get_policy_id(id) expect(call).to(be_successful_api_call) with it("is able to get a policy by name"): ok, res = self.clientV1.list_policies() - name = res['policies'][0]['name'] + name = res["policies"][0]["name"] call = self.clientV1.get_policy(name) expect(call).to(be_successful_api_call) with it("is able to update a policy from JSON"): ok, res = self.clientV1.list_policies() - policy_json = res['policies'][0] - policy_json['description'] = "Updated description" + policy_json = res["policies"][0] + policy_json["description"] = "Updated description" call = self.clientV1.update_policy(json.dumps(policy_json)) expect(call).to(be_successful_api_call) with it("is able to delete a single policy by id"): ok, res = self.clientV1.list_policies() - ok, res = self.clientV1.delete_policy_id(res['policies'][0]['id']) + ok, res = self.clientV1.delete_policy_id(res["policies"][0]["id"]) expect((ok, res)).to(be_successful_api_call) with it("is able to delete a single policy by name"): ok, res = self.clientV1.list_policies() - ok, res = self.clientV1.delete_policy_name(res['policies'][1]['name']) + ok, res = self.clientV1.delete_policy_name(res["policies"][1]["name"]) expect((ok, res)).to(be_successful_api_call) with it("is able to delete all policies at once"): diff --git a/specs/secure/policy_v2_spec.py b/specs/secure/policy_v2_spec.py index 4cdc9b83..45ea736e 100644 --- a/specs/secure/policy_v2_spec.py +++ b/specs/secure/policy_v2_spec.py @@ -9,9 +9,15 @@ from specs import be_successful_api_call _POLICY_NAME = "Test - Terminal shell in container" -_POLICY_DESCRIPTION = "A shell was spawned by a program in a container with an attached terminal." +_POLICY_DESCRIPTION = ( + "A shell was spawned by a program in a container with an attached terminal." +) _POLICY_RULES = ["Terminal shell in container"] -_POLICY_ACTIONS = [policy_action_capture(file_name="TerminalShellInContainer", secs_before=10, secs_after=20)] +_POLICY_ACTIONS = [ + policy_action_capture( + file_name="TerminalShellInContainer", secs_before=10, secs_after=20 + ) +] def policy_json(): @@ -29,13 +35,20 @@ def policy_json(): "createdOn": 1596902934000, "modifiedOn": 1597138586000 } -""" % (_POLICY_NAME, _POLICY_DESCRIPTION, json.dumps(_POLICY_RULES), json.dumps(_POLICY_ACTIONS)) +""" % ( + _POLICY_NAME, + _POLICY_DESCRIPTION, + json.dumps(_POLICY_RULES), + json.dumps(_POLICY_ACTIONS), + ) with description("Policies v2", "integration") as self: with before.all: - self.client = SdSecureClient(sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), - token=os.getenv("SDC_SECURE_TOKEN")) + self.client = SdSecureClient( + sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), + token=os.getenv("SDC_SECURE_TOKEN"), + ) with before.each: self.cleanup_policies() @@ -43,7 +56,6 @@ def policy_json(): with after.each: self.cleanup_policies() - def cleanup_policies(self): _, res = self.client.list_policies() for policy in res: @@ -51,7 +63,6 @@ def cleanup_policies(self): ok, res = self.client.delete_policy_id(policy["id"]) expect((ok, res)).to(be_successful_api_call) - with it("is able to list all existing policies"): ok, res = self.client.list_policies() expect((ok, res)).to(be_successful_api_call) @@ -61,11 +72,13 @@ def cleanup_policies(self): expect(call).to(be_successful_api_call) with it("is able to create a policy with parameters"): - ok, res = self.client.add_policy(name=_POLICY_NAME, - description=_POLICY_DESCRIPTION, - rule_names=_POLICY_RULES, - actions=_POLICY_ACTIONS, - type="falco") + ok, res = self.client.add_policy( + name=_POLICY_NAME, + description=_POLICY_DESCRIPTION, + rule_names=_POLICY_RULES, + actions=_POLICY_ACTIONS, + type="falco", + ) expect((ok, res)).to(be_successful_api_call) @@ -73,7 +86,7 @@ def cleanup_policies(self): _, policies = self.client.list_policies() for policy in policies: - ok, res = self.client.delete_policy_id(policy['id']) + ok, res = self.client.delete_policy_id(policy["id"]) expect((ok, res)).to(be_successful_api_call) with it("is able to create the default policies"): diff --git a/specs/secure/scanning/alerts_spec.py b/specs/secure/scanning/alerts_spec.py index 0f0ccf4e..bf1ad130 100644 --- a/specs/secure/scanning/alerts_spec.py +++ b/specs/secure/scanning/alerts_spec.py @@ -1,7 +1,16 @@ import os import uuid -from expects import be_empty, be_false, be_true, contain, contain_exactly, expect, have_keys, equal +from expects import ( + be_empty, + be_false, + be_true, + contain, + contain_exactly, + expect, + have_keys, + equal, +) from mamba import after, before, context, description, it from sdcclient import SdScanningClient @@ -9,8 +18,10 @@ with description("Scanning Alerts", "integration") as self: with before.all: - self.client = SdScanningClient(sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), - token=os.getenv("SDC_SECURE_TOKEN")) + self.client = SdScanningClient( + sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), + token=os.getenv("SDC_SECURE_TOKEN"), + ) with after.all: ok, res = self.client.list_alerts() @@ -29,170 +40,279 @@ "analysis_update": False, "vuln_update": False, "policy_eval": False, - "failed": False + "failed": False, }, "autoscan": False, "onlyPassFail": False, "skipEventSend": False, - "notificationChannelIds": [] + "notificationChannelIds": [], } ok, res = self.client.add_alert_object(alert) expect((ok, res)).to(be_successful_api_call) - expect(res['enabled']).to(equal(alert['enabled'])) - expect(res['type']).to(equal(alert['type'])) - expect(res['name']).to(equal(alert['name'])) - expect(res['triggers']).to(equal(alert['triggers'])) - expect(res['autoscan']).to(equal(alert['autoscan'])) - expect(res['onlyPassFail']).to(equal(alert['onlyPassFail'])) - expect(res['skipEventSend']).to(equal(alert['skipEventSend'])) - expect(res['notificationChannelIds']).to(equal(alert['notificationChannelIds'])) + expect(res["enabled"]).to(equal(alert["enabled"])) + expect(res["type"]).to(equal(alert["type"])) + expect(res["name"]).to(equal(alert["name"])) + expect(res["triggers"]).to(equal(alert["triggers"])) + expect(res["autoscan"]).to(equal(alert["autoscan"])) + expect(res["onlyPassFail"]).to(equal(alert["onlyPassFail"])) + expect(res["skipEventSend"]).to(equal(alert["skipEventSend"])) + expect(res["notificationChannelIds"]).to(equal(alert["notificationChannelIds"])) with it("lists all the scanning alerts"): ok, res = self.client.add_runtime_alert( - name="A name", - description="A description", - scope="", - triggers=[SdScanningClient.RuntimeAlertTrigger.unscanned_image] + name="A name", + description="A description", + scope="", + triggers=[SdScanningClient.RuntimeAlertTrigger.unscanned_image], ) expect((ok, res)).to(be_successful_api_call) ok, res = self.client.list_alerts() expect((ok, res)).to(be_successful_api_call) expect(res["alerts"]).to_not(be_empty) - expect(res["alerts"]).to(contain(have_keys("customerId", "teamId", "alertId", "enabled", "type", "name", "description", "scope", "repositories", "triggers", "autoscan", "onlyPassFail", "skipEventSend", "notificationChannelIds"))) + expect(res["alerts"]).to( + contain( + have_keys( + "customerId", + "teamId", + "alertId", + "enabled", + "type", + "name", + "description", + "scope", + "repositories", + "triggers", + "autoscan", + "onlyPassFail", + "skipEventSend", + "notificationChannelIds", + ) + ) + ) with context("when creating a runtime alert"): with it("creates an alert with unscanned image trigger"): ok, res = self.client.add_runtime_alert( + name="A name", + description="A description", + scope="", + triggers=[SdScanningClient.RuntimeAlertTrigger.unscanned_image], + ) + + expect((ok, res)).to(be_successful_api_call) + expect(res).to( + have_keys( name="A name", description="A description", scope="", - triggers=[SdScanningClient.RuntimeAlertTrigger.unscanned_image] + triggers=have_keys(unscanned=be_true), + ) ) - expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_keys(name="A name", description="A description", scope="", triggers=have_keys(unscanned=be_true))) - with it("creates an alert with scan result change trigger as 'Pass > Fail'"): ok, res = self.client.add_runtime_alert( + name="A name", + description="A description", + scope="", + triggers=[SdScanningClient.RuntimeAlertTrigger.scan_result_change_fail], + ) + + expect((ok, res)).to(be_successful_api_call) + expect(res).to( + have_keys( name="A name", description="A description", scope="", - triggers=[SdScanningClient.RuntimeAlertTrigger.scan_result_change_fail] + triggers=have_keys(policy_eval=be_true), + onlyPassFail=be_true, + ) ) - expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_keys(name="A name", description="A description", scope="", triggers=have_keys(policy_eval=be_true), onlyPassFail=be_true)) - with it("creates an alert with scan result change trigger as 'Any Change'"): ok, res = self.client.add_runtime_alert( + name="A name", + description="A description", + scope="", + triggers=[SdScanningClient.RuntimeAlertTrigger.scan_result_change_any], + ) + + expect((ok, res)).to(be_successful_api_call) + expect(res).to( + have_keys( name="A name", description="A description", scope="", - triggers=[SdScanningClient.RuntimeAlertTrigger.scan_result_change_any] + triggers=have_keys(policy_eval=be_true), + onlyPassFail=be_false, + ) ) - expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_keys(name="A name", description="A description", scope="", triggers=have_keys(policy_eval=be_true), onlyPassFail=be_false)) - with it("creates an alert with cve update trigger"): ok, res = self.client.add_runtime_alert( + name="A name", + description="A description", + scope="", + triggers=[SdScanningClient.RuntimeAlertTrigger.cve_update], + ) + + expect((ok, res)).to(be_successful_api_call) + expect(res).to( + have_keys( name="A name", description="A description", scope="", - triggers=[SdScanningClient.RuntimeAlertTrigger.cve_update] + triggers=have_keys(vuln_update=be_true), + ) ) - expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_keys(name="A name", description="A description", scope="", triggers=have_keys(vuln_update=be_true))) - with it("creates an alert with multiple triggers"): ok, res = self.client.add_runtime_alert( + name="A name", + description="A description", + scope="", + triggers=[ + SdScanningClient.RuntimeAlertTrigger.unscanned_image, + SdScanningClient.RuntimeAlertTrigger.scan_result_change_fail, + SdScanningClient.RuntimeAlertTrigger.cve_update, + ], + ) + + expect((ok, res)).to(be_successful_api_call) + expect(res).to( + have_keys( name="A name", description="A description", scope="", - triggers=[SdScanningClient.RuntimeAlertTrigger.unscanned_image, - SdScanningClient.RuntimeAlertTrigger.scan_result_change_fail, - SdScanningClient.RuntimeAlertTrigger.cve_update] + triggers=have_keys( + unscanned=be_true, policy_eval=be_true, vuln_update=be_true + ), + onlyPassFail=be_true, + ) ) - expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_keys(name="A name", description="A description", scope="", triggers=have_keys(unscanned=be_true, policy_eval=be_true, vuln_update=be_true), onlyPassFail=be_true)) - with context("when creating a repository alert"): with it("creates an alert with new image analyzed trigger"): ok, res = self.client.add_repository_alert( - name="A name", - registry="registry", - repository="repository", - tag="latest", - description="A description", - triggers=[SdScanningClient.RepositoryAlertTrigger.new_image_analyzed] + name="A name", + registry="registry", + repository="repository", + tag="latest", + description="A description", + triggers=[SdScanningClient.RepositoryAlertTrigger.new_image_analyzed], ) expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_keys(name="A name", description="A description", scope="", triggers=have_keys(analysis_update=be_true))) + expect(res).to( + have_keys( + name="A name", + description="A description", + scope="", + triggers=have_keys(analysis_update=be_true), + ) + ) with it("creates an alert with scan result change trigger as 'Pass > Fail'"): ok, res = self.client.add_repository_alert( - name="A name", - registry="registry", - repository="repository", - tag="latest", - description="A description", - triggers=[SdScanningClient.RepositoryAlertTrigger.scan_result_change_fail] + name="A name", + registry="registry", + repository="repository", + tag="latest", + description="A description", + triggers=[ + SdScanningClient.RepositoryAlertTrigger.scan_result_change_fail + ], ) expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_keys(name="A name", description="A description", scope="", triggers=have_keys(policy_eval=be_true), onlyPassFail=be_true)) + expect(res).to( + have_keys( + name="A name", + description="A description", + scope="", + triggers=have_keys(policy_eval=be_true), + onlyPassFail=be_true, + ) + ) with it("creates an alert with scan result change trigger as 'Any Change'"): ok, res = self.client.add_repository_alert( - name="A name", - registry="registry", - repository="repository", - tag="latest", - description="A description", - triggers=[SdScanningClient.RepositoryAlertTrigger.scan_result_change_any] + name="A name", + registry="registry", + repository="repository", + tag="latest", + description="A description", + triggers=[ + SdScanningClient.RepositoryAlertTrigger.scan_result_change_any + ], ) expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_keys(name="A name", description="A description", scope="", triggers=have_keys(policy_eval=be_true), onlyPassFail=be_false)) + expect(res).to( + have_keys( + name="A name", + description="A description", + scope="", + triggers=have_keys(policy_eval=be_true), + onlyPassFail=be_false, + ) + ) with it("creates an alert with cve update trigger"): ok, res = self.client.add_repository_alert( - name="A name", - registry="registry", - repository="repository", - tag="latest", - description="A description", - triggers=[SdScanningClient.RepositoryAlertTrigger.cve_update] + name="A name", + registry="registry", + repository="repository", + tag="latest", + description="A description", + triggers=[SdScanningClient.RepositoryAlertTrigger.cve_update], ) expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_keys(name="A name", description="A description", scope="", triggers=have_keys(vuln_update=be_true))) + expect(res).to( + have_keys( + name="A name", + description="A description", + scope="", + triggers=have_keys(vuln_update=be_true), + ) + ) with it("creates an alert with multiple triggers"): ok, res = self.client.add_repository_alert( - name="A name", - registry="registry", - repository="repository", - tag="latest", - description="A description", - triggers=[SdScanningClient.RepositoryAlertTrigger.new_image_analyzed, - SdScanningClient.RepositoryAlertTrigger.scan_result_change_fail, - SdScanningClient.RepositoryAlertTrigger.cve_update] + name="A name", + registry="registry", + repository="repository", + tag="latest", + description="A description", + triggers=[ + SdScanningClient.RepositoryAlertTrigger.new_image_analyzed, + SdScanningClient.RepositoryAlertTrigger.scan_result_change_fail, + SdScanningClient.RepositoryAlertTrigger.cve_update, + ], ) expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_keys(name="A name", description="A description", scope="", triggers=have_keys(analysis_update=be_true, policy_eval=be_true, vuln_update=be_true), onlyPassFail=be_true)) + expect(res).to( + have_keys( + name="A name", + description="A description", + scope="", + triggers=have_keys( + analysis_update=be_true, + policy_eval=be_true, + vuln_update=be_true, + ), + onlyPassFail=be_true, + ) + ) with it("removes an alert correctly"): ok, res = self.client.add_runtime_alert( - name="A name", - description="A description", - scope="", - triggers=[SdScanningClient.RuntimeAlertTrigger.unscanned_image] + name="A name", + description="A description", + scope="", + triggers=[SdScanningClient.RuntimeAlertTrigger.unscanned_image], ) expect((ok, res)).to(be_successful_api_call) @@ -207,66 +327,106 @@ with it("retrieves an alert correctly"): ok, res = self.client.add_runtime_alert( - name="A name", - description="A description", - scope="", - triggers=[SdScanningClient.RuntimeAlertTrigger.unscanned_image] + name="A name", + description="A description", + scope="", + triggers=[SdScanningClient.RuntimeAlertTrigger.unscanned_image], ) expect((ok, res)).to(be_successful_api_call) alert_id = res["alertId"] ok, res = self.client.get_alert(alert_id) expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_keys(name="A name", description="A description", scope="", triggers=have_keys(unscanned=be_true))) + expect(res).to( + have_keys( + name="A name", + description="A description", + scope="", + triggers=have_keys(unscanned=be_true), + ) + ) with it("updates a runtime alert correctly"): ok, res = self.client.add_runtime_alert( + name="A name", + description="A description", + scope="", + triggers=[SdScanningClient.RuntimeAlertTrigger.unscanned_image], + ) + expect((ok, res)).to(be_successful_api_call) + expect(res).to( + have_keys( name="A name", description="A description", scope="", - triggers=[SdScanningClient.RuntimeAlertTrigger.unscanned_image] + triggers=have_keys(unscanned=be_true), + ) ) - expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_keys(name="A name", description="A description", scope="", triggers=have_keys(unscanned=be_true))) alert_id = res["alertId"] ok, res = self.client.update_runtime_alert( - id=alert_id, + id=alert_id, + name="An updated name", + description="An updated description", + scope="agent.id = 'foo'", + triggers=[SdScanningClient.RuntimeAlertTrigger.scan_result_change_fail], + ) + expect((ok, res)).to(be_successful_api_call) + expect(res).to( + have_keys( name="An updated name", description="An updated description", scope="agent.id = 'foo'", - triggers=[SdScanningClient.RuntimeAlertTrigger.scan_result_change_fail] + triggers=have_keys(unscanned=be_false, policy_eval=be_true), + onlyPassFail=be_true, + ) ) - expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_keys(name="An updated name", description="An updated description", scope="agent.id = 'foo'", triggers=have_keys(unscanned=be_false, policy_eval=be_true), onlyPassFail=be_true)) with it("updates a repository alert correctly"): ok, res = self.client.add_repository_alert( - name="A name", - registry="registry", - repository="repository", - tag="latest", - description="A description", - triggers=[SdScanningClient.RepositoryAlertTrigger.new_image_analyzed] + name="A name", + registry="registry", + repository="repository", + tag="latest", + description="A description", + triggers=[SdScanningClient.RepositoryAlertTrigger.new_image_analyzed], ) expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_keys(name="A name", description="A description", - repositories=contain_exactly(have_keys(registry="registry", repository="repository", tag="latest")), - triggers=have_keys(analysis_update=be_true))) + expect(res).to( + have_keys( + name="A name", + description="A description", + repositories=contain_exactly( + have_keys( + registry="registry", repository="repository", tag="latest" + ) + ), + triggers=have_keys(analysis_update=be_true), + ) + ) alert_id = res["alertId"] ok, res = self.client.update_repository_alert( - id=alert_id, + id=alert_id, + name="An updated name", + registry="new_registry", + repository="new_repository", + tag="v1", + description="An updated description", + triggers=[SdScanningClient.RepositoryAlertTrigger.scan_result_change_fail], + ) + expect((ok, res)).to(be_successful_api_call) + expect(res).to( + have_keys( name="An updated name", - registry="new_registry", - repository="new_repository", - tag="v1", description="An updated description", - triggers=[SdScanningClient.RepositoryAlertTrigger.scan_result_change_fail] + repositories=contain_exactly( + have_keys( + registry="new_registry", repository="new_repository", tag="v1" + ) + ), + triggers=have_keys(unscanned=be_false, policy_eval=be_true), + onlyPassFail=be_true, + ) ) - expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_keys(name="An updated name", - description="An updated description", - repositories=contain_exactly(have_keys(registry="new_registry", repository="new_repository", tag="v1")), - triggers=have_keys(unscanned=be_false, policy_eval=be_true), onlyPassFail=be_true)) diff --git a/specs/secure/scanning/list_image_tags_spec.py b/specs/secure/scanning/list_image_tags_spec.py index 5c0a9631..5fcded84 100644 --- a/specs/secure/scanning/list_image_tags_spec.py +++ b/specs/secure/scanning/list_image_tags_spec.py @@ -8,13 +8,26 @@ with description("Scanning list_image_tags") as self: with before.all: - self.client = SdScanningClient(sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), - token=os.getenv("SDC_SECURE_TOKEN")) + self.client = SdScanningClient( + sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), + token=os.getenv("SDC_SECURE_TOKEN"), + ) with it("is able to retrieve all the image tags"): ok, res = self.client.list_image_tags() expect((ok, res)).to(be_successful_api_call) expect(res).to( - contain(have_keys("analyzed_at", "created_at", "fulltag", "imageDigest", "imageId", "parentDigest", - "tag_detected_at", "analysis_status"))) + contain( + have_keys( + "analyzed_at", + "created_at", + "fulltag", + "imageDigest", + "imageId", + "parentDigest", + "tag_detected_at", + "analysis_status", + ) + ) + ) diff --git a/specs/secure/scanning/list_images_spec.py b/specs/secure/scanning/list_images_spec.py index 7f3e33dc..24c06fc8 100644 --- a/specs/secure/scanning/list_images_spec.py +++ b/specs/secure/scanning/list_images_spec.py @@ -8,12 +8,27 @@ with description("Scanning list_images") as self: with before.all: - self.client = SdScanningClient(sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), - token=os.getenv("SDC_SECURE_TOKEN")) + self.client = SdScanningClient( + sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), + token=os.getenv("SDC_SECURE_TOKEN"), + ) with it("is able to list all the scanned images"): ok, res = self.client.list_images() expect((ok, res)).to(be_successful_api_call) - expect(res).to(contain( - have_keys("annotations", "imageDigest", "last_updated", "analysis_status", "image_content", "image_detail", - "image_status", "parentDigest", "userId", "created_at"))) + expect(res).to( + contain( + have_keys( + "annotations", + "imageDigest", + "last_updated", + "analysis_status", + "image_content", + "image_detail", + "image_status", + "parentDigest", + "userId", + "created_at", + ) + ) + ) diff --git a/specs/secure/scanning/policy_evaluation_spec.py b/specs/secure/scanning/policy_evaluation_spec.py index 7524f41a..e103f01a 100644 --- a/specs/secure/scanning/policy_evaluation_spec.py +++ b/specs/secure/scanning/policy_evaluation_spec.py @@ -9,8 +9,10 @@ with description("Policy Evaluation", "integration") as self: with before.all: - self.client = SdScanningClient(sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), - token=os.getenv("SDC_SECURE_TOKEN")) + self.client = SdScanningClient( + sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), + token=os.getenv("SDC_SECURE_TOKEN"), + ) self.image_name = "quay.io/sysdig/agent:latest" with it("is able to retrieve the results for all the policies"): @@ -18,12 +20,19 @@ expect((ok, res)).to(be_successful_api_call) expect(res).to( - have_keys("image_digest", "image_id", "stop_results", - total_warn=be_above_or_equal(0), total_stop=be_above_or_equal(0), - last_evaluation=be_an(datetime), - status="pass", image_tag=self.image_name, - policy_id="*", policy_name="All policies", - warn_results=not_(be_empty)) + have_keys( + "image_digest", + "image_id", + "stop_results", + total_warn=be_above_or_equal(0), + total_stop=be_above_or_equal(0), + last_evaluation=be_an(datetime), + status="pass", + image_tag=self.image_name, + policy_id="*", + policy_name="All policies", + warn_results=not_(be_empty), + ) ) with it("is able to retrieve the results for the default policy"): @@ -32,12 +41,19 @@ expect((ok, res)).to(be_successful_api_call) expect(res).to( - have_keys("image_digest", "image_id", "stop_results", - total_warn=be_above_or_equal(0), total_stop=be_above_or_equal(0), - last_evaluation=be_an(datetime), - status="pass", image_tag=self.image_name, - policy_id="default", policy_name="DefaultPolicy", - warn_results=not_(be_empty)) + have_keys( + "image_digest", + "image_id", + "stop_results", + total_warn=be_above_or_equal(0), + total_stop=be_above_or_equal(0), + last_evaluation=be_an(datetime), + status="pass", + image_tag=self.image_name, + policy_id="default", + policy_name="DefaultPolicy", + warn_results=not_(be_empty), + ) ) with context("but the image has not been scanned yet"): @@ -45,8 +61,12 @@ ok, res = self.client.get_image_scanning_results("unknown_image") expect((ok, res)).to_not(be_successful_api_call) - expect(res).to(equal("could not retrieve image digest for the given image name, " - "ensure that the image has been scanned")) + expect(res).to( + equal( + "could not retrieve image digest for the given image name, " + "ensure that the image has been scanned" + ) + ) with context("but the provided policy id does not exist"): with it("returns an error saying that the policy id is not found"): diff --git a/specs/secure/scanning/query_image_content_spec.py b/specs/secure/scanning/query_image_content_spec.py index 4b224dd1..53e92615 100644 --- a/specs/secure/scanning/query_image_content_spec.py +++ b/specs/secure/scanning/query_image_content_spec.py @@ -8,15 +8,21 @@ with description("Query Image Content", "integration") as self: with before.each: - self.client = SdScanningClient(sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), - token=os.getenv("SDC_SECURE_TOKEN")) + self.client = SdScanningClient( + sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), + token=os.getenv("SDC_SECURE_TOKEN"), + ) self.image_to_scan = "quay.io/sysdig/agent:latest" with it("is able to retrieve the OS contents"): ok, res = self.client.query_image_content(self.image_to_scan, "os") expect((ok, res)).to(be_successful_api_call) - expect(res["content"]).to(contain(have_keys("license", "origin", "package", "size", "type", "version"))) + expect(res["content"]).to( + contain( + have_keys("license", "origin", "package", "size", "type", "version") + ) + ) expect(res["content_type"]).to(equal("os")) with it("is able to retrieve the npm contents"): @@ -48,7 +54,19 @@ expect((ok, res)).to(be_successful_api_call) expect(res["content"]).to( - contain(have_keys("filename", "gid", "linkdest", "mode", "sha256", "size", "type", "uid"))) + contain( + have_keys( + "filename", + "gid", + "linkdest", + "mode", + "sha256", + "size", + "type", + "uid", + ) + ) + ) expect(res["content_type"]).to(equal("files")) with context("when the type is not in the supported list"): @@ -56,5 +74,8 @@ ok, res = self.client.query_image_content(self.image_to_scan, "Unknown") expect((ok, res)).not_to(be_successful_api_call) - expect(res).to(equal( - "unsupported type provided: unknown, must be one of ['os', 'files', 'npm', 'gem', 'python', 'java']")) + expect(res).to( + equal( + "unsupported type provided: unknown, must be one of ['os', 'files', 'npm', 'gem', 'python', 'java']" + ) + ) diff --git a/specs/secure/scanning/scanning_cve_report_spec.py b/specs/secure/scanning/scanning_cve_report_spec.py index 9ce1e9a5..fa52d3b3 100644 --- a/specs/secure/scanning/scanning_cve_report_spec.py +++ b/specs/secure/scanning/scanning_cve_report_spec.py @@ -8,34 +8,60 @@ with description("CVE Reports", "integration") as self: with before.all: - self.client = SdScanningClient(sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), - token=os.getenv("SDC_SECURE_TOKEN")) + self.client = SdScanningClient( + sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), + token=os.getenv("SDC_SECURE_TOKEN"), + ) with context("when the CSV of static can be downloaded"): with it("is able to download it for OS vulnerabilities"): - ok, csv = self.client.download_cve_report_csv(vuln_type="os", scope_type="static") + ok, csv = self.client.download_cve_report_csv( + vuln_type="os", scope_type="static" + ) expect((ok, csv)).to(be_successful_api_call) - expect(csv).to(start_with("Image Name,Tag,Package Name,Package Version,Package Path,Severity,Fixed In," - "Vulnerability ID,Links,Image Digest,Runtime Metadata")) + expect(csv).to( + start_with( + "Image Name,Tag,Package Name,Package Version,Package Path,Severity,Fixed In," + "Vulnerability ID,Links,Image Digest,Runtime Metadata" + ) + ) with it("is able to download it for non-OS vulnerabilities"): - ok, csv = self.client.download_cve_report_csv(vuln_type="non-os", scope_type="static") + ok, csv = self.client.download_cve_report_csv( + vuln_type="non-os", scope_type="static" + ) expect((ok, csv)).to(be_successful_api_call) - expect(csv).to(start_with("Image Name,Tag,Package Name,Package Version,Package Path,Severity,Fixed In," - "Vulnerability ID,Links,Image Digest,Runtime Metadata")) + expect(csv).to( + start_with( + "Image Name,Tag,Package Name,Package Version,Package Path,Severity,Fixed In," + "Vulnerability ID,Links,Image Digest,Runtime Metadata" + ) + ) with context("when the CSV of runtime can be downloaded"): with it("is able to download it for OS vulnerabilities"): - ok, csv = self.client.download_cve_report_csv(vuln_type="os", scope_type="runtime") + ok, csv = self.client.download_cve_report_csv( + vuln_type="os", scope_type="runtime" + ) expect((ok, csv)).to(be_successful_api_call) - expect(csv).to(start_with("Image Name,Tag,Package Name,Package Version,Package Path,Severity,Fixed In," - "Vulnerability ID,Links,Image Digest,Runtime Metadata")) + expect(csv).to( + start_with( + "Image Name,Tag,Package Name,Package Version,Package Path,Severity,Fixed In," + "Vulnerability ID,Links,Image Digest,Runtime Metadata" + ) + ) with it("is able to download it for non-OS vulnerabilities"): - ok, csv = self.client.download_cve_report_csv(vuln_type="non-os", scope_type="runtime") + ok, csv = self.client.download_cve_report_csv( + vuln_type="non-os", scope_type="runtime" + ) expect((ok, csv)).to(be_successful_api_call) - expect(csv).to(start_with("Image Name,Tag,Package Name,Package Version,Package Path,Severity,Fixed In," - "Vulnerability ID,Links,Image Digest,Runtime Metadata")) + expect(csv).to( + start_with( + "Image Name,Tag,Package Name,Package Version,Package Path,Severity,Fixed In," + "Vulnerability ID,Links,Image Digest,Runtime Metadata" + ) + ) diff --git a/specs/secure/scanning/scanning_vulnerability_exceptions_spec.py b/specs/secure/scanning/scanning_vulnerability_exceptions_spec.py index 3bf8d2dc..1a168e30 100644 --- a/specs/secure/scanning/scanning_vulnerability_exceptions_spec.py +++ b/specs/secure/scanning/scanning_vulnerability_exceptions_spec.py @@ -2,7 +2,18 @@ import os import uuid -from expects import equal, expect, contain, be_empty, have_key, be_true, have_keys, not_, be_false, be_above +from expects import ( + equal, + expect, + contain, + be_empty, + have_key, + be_true, + have_keys, + not_, + be_false, + be_above, +) from mamba import before, context, description, after, it from sdcclient import SdScanningClient @@ -10,47 +21,67 @@ with description("Scanning vulnerability exceptions", "integration") as self: with before.each: - self.client = SdScanningClient(sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), - token=os.getenv("SDC_SECURE_TOKEN")) + self.client = SdScanningClient( + sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), + token=os.getenv("SDC_SECURE_TOKEN"), + ) with after.each: self.clean_bundles() - def clean_bundles(self): _, res = self.client.list_vulnerability_exception_bundles() for bundle in res: if str(bundle["name"]).startswith("test_exception_bundle_"): - call = self.client.delete_vulnerability_exception_bundle(id=bundle["id"]) + call = self.client.delete_vulnerability_exception_bundle( + id=bundle["id"] + ) expect(call).to(be_successful_api_call) - with context("when we are creating a new vulnerability exception bundle"): with it("creates the bundle correctly"): exception_bundle = f"test_exception_bundle_{uuid.uuid4()}" exception_comment = "This is an example of an exception bundle" - ok, res = self.client.add_vulnerability_exception_bundle(name=exception_bundle, comment=exception_comment) + ok, res = self.client.add_vulnerability_exception_bundle( + name=exception_bundle, comment=exception_comment + ) expect((ok, res)).to(be_successful_api_call) expect(res).to( - have_keys("id", items=be_empty, policyBundleId=equal("default"), version="1_0", - comment=equal(exception_comment), name=equal(exception_bundle)) + have_keys( + "id", + items=be_empty, + policyBundleId=equal("default"), + version="1_0", + comment=equal(exception_comment), + name=equal(exception_bundle), + ) ) with it("creates the bundle correctly with name only and removes it correctly"): exception_bundle = f"test_exception_bundle_{uuid.uuid4()}" - ok, res = self.client.add_vulnerability_exception_bundle(name=exception_bundle) + ok, res = self.client.add_vulnerability_exception_bundle( + name=exception_bundle + ) expect((ok, res)).to(be_successful_api_call) expect(res).to( - have_keys("id", items=be_empty, policyBundleId=equal("default"), version="1_0", - comment=be_empty, name=equal(exception_bundle)) + have_keys( + "id", + items=be_empty, + policyBundleId=equal("default"), + version="1_0", + comment=be_empty, + name=equal(exception_bundle), + ) ) with context("when we are listing the vulnerability exception bundles"): with before.each: self.exception_bundle = f"test_exception_bundle_{uuid.uuid4()}" - ok, res = self.client.add_vulnerability_exception_bundle(name=self.exception_bundle) + ok, res = self.client.add_vulnerability_exception_bundle( + name=self.exception_bundle + ) expect((ok, res)).to(be_successful_api_call) self.created_exception_bundle = res["id"] @@ -58,42 +89,62 @@ def clean_bundles(self): ok, res = self.client.list_vulnerability_exception_bundles() expect((ok, res)).to(be_successful_api_call) - expect(res).to(contain( - have_keys(id=self.created_exception_bundle, items=None, policyBundleId=equal("default"), - version=equal("1_0"), comment=be_empty, name=equal(self.exception_bundle)) - )) + expect(res).to( + contain( + have_keys( + id=self.created_exception_bundle, + items=None, + policyBundleId=equal("default"), + version=equal("1_0"), + comment=be_empty, + name=equal(self.exception_bundle), + ) + ) + ) with context("when we are working with vulnerability exceptions in a bundle"): with before.each: - ok, res = self.client.add_vulnerability_exception_bundle(name=f"test_exception_bundle_{uuid.uuid4()}") + ok, res = self.client.add_vulnerability_exception_bundle( + name=f"test_exception_bundle_{uuid.uuid4()}" + ) expect((ok, res)).to(be_successful_api_call) self.created_exception_bundle = res["id"] with it("is able to add a vulnerability exception to a bundle"): exception_notes = "Microsoft Vulnerability" exception_cve = "CVE-2020-1234" - ok, res = self.client.add_vulnerability_exception(bundle=self.created_exception_bundle, - cve=exception_cve, - note=exception_notes, - expiration_date=datetime.datetime(2030, 12, 31) - .timestamp()) + ok, res = self.client.add_vulnerability_exception( + bundle=self.created_exception_bundle, + cve=exception_cve, + note=exception_notes, + expiration_date=datetime.datetime(2030, 12, 31).timestamp(), + ) expect((ok, res)).to(be_successful_api_call) expect(res).to( - have_keys("id", "description", gate=equal("vulnerabilities"), trigger_id=equal(exception_cve), - notes=equal(exception_notes), enabled=be_true) + have_keys( + "id", + "description", + gate=equal("vulnerabilities"), + trigger_id=equal(exception_cve), + notes=equal(exception_notes), + enabled=be_true, + ) ) with context("and there are existing vulnerability exceptions"): with before.each: self.created_exception_cve = "CVE-2020-1234" - ok, res = self.client.add_vulnerability_exception(bundle=self.created_exception_bundle, - cve=self.created_exception_cve) + ok, res = self.client.add_vulnerability_exception( + bundle=self.created_exception_bundle, cve=self.created_exception_cve + ) expect((ok, res)).to(be_successful_api_call) self.created_exception = res["id"] with it("is able to list all the vulnerability exceptions from a bundle"): - ok, res = self.client.get_vulnerability_exception_bundle(bundle=self.created_exception_bundle) + ok, res = self.client.get_vulnerability_exception_bundle( + bundle=self.created_exception_bundle + ) expect((ok, res)).to(be_successful_api_call) expect(res).to( @@ -106,75 +157,99 @@ def clean_bundles(self): trigger_id=equal(self.created_exception_cve), enabled=be_true, ) - ) + ), ) ) with it("is able to remove them"): - _, ex_before = self.client.get_vulnerability_exception_bundle(bundle=self.created_exception_bundle) - ok, res = self.client.delete_vulnerability_exception(bundle=self.created_exception_bundle, - id=self.created_exception) - _, ex_after = self.client.get_vulnerability_exception_bundle(bundle=self.created_exception_bundle) + _, ex_before = self.client.get_vulnerability_exception_bundle( + bundle=self.created_exception_bundle + ) + ok, res = self.client.delete_vulnerability_exception( + bundle=self.created_exception_bundle, id=self.created_exception + ) + _, ex_after = self.client.get_vulnerability_exception_bundle( + bundle=self.created_exception_bundle + ) expect((ok, res)).to(be_successful_api_call) expect(ex_before).to( - have_key("items", contain( - have_keys( - id=equal(self.created_exception), - gate=equal("vulnerabilities"), - trigger_id=equal(self.created_exception_cve), - enabled=be_true, - ) - )) + have_key( + "items", + contain( + have_keys( + id=equal(self.created_exception), + gate=equal("vulnerabilities"), + trigger_id=equal(self.created_exception_cve), + enabled=be_true, + ) + ), + ) ) expect(ex_after).to( - have_key("items", not_(contain( - have_keys( - id=equal(self.created_exception), - gate=equal("vulnerabilities"), - trigger_id=equal(self.created_exception_cve), - enabled=be_true, - ) - ))) + have_key( + "items", + not_( + contain( + have_keys( + id=equal(self.created_exception), + gate=equal("vulnerabilities"), + trigger_id=equal(self.created_exception_cve), + enabled=be_true, + ) + ) + ), + ) ) with it("is able to update them"): - _, ex_before = self.client.get_vulnerability_exception_bundle(bundle=self.created_exception_bundle) + _, ex_before = self.client.get_vulnerability_exception_bundle( + bundle=self.created_exception_bundle + ) - ok, res = self.client.update_vulnerability_exception(bundle=self.created_exception_bundle, - id=self.created_exception, - cve="CVE-2020-1235", - enabled=False, - note="Dummy note", - expiration_date=datetime.datetime(2030, 12, 31) - .timestamp()) + ok, res = self.client.update_vulnerability_exception( + bundle=self.created_exception_bundle, + id=self.created_exception, + cve="CVE-2020-1235", + enabled=False, + note="Dummy note", + expiration_date=datetime.datetime(2030, 12, 31).timestamp(), + ) - _, ex_after = self.client.get_vulnerability_exception_bundle(bundle=self.created_exception_bundle) + _, ex_after = self.client.get_vulnerability_exception_bundle( + bundle=self.created_exception_bundle + ) expect((ok, res)).to(be_successful_api_call) expect(ex_before).to( - have_key("items", contain( - have_keys( - id=equal(self.created_exception), - gate=equal("vulnerabilities"), - trigger_id=equal(self.created_exception_cve), - notes=equal(None), - expiration_date=equal(None), - enabled=be_true, - ) - )) + have_key( + "items", + contain( + have_keys( + id=equal(self.created_exception), + gate=equal("vulnerabilities"), + trigger_id=equal(self.created_exception_cve), + notes=equal(None), + expiration_date=equal(None), + enabled=be_true, + ) + ), + ) ) expect(ex_after).to( - have_key("items", contain( - have_keys( - id=equal(self.created_exception), - gate=equal("vulnerabilities"), - trigger_id=equal("CVE-2020-1235"), - notes=equal("Dummy note"), - expiration_date=be_above(0), - enabled=be_false, - ) - )) + have_key( + "items", + contain( + have_keys( + id=equal(self.created_exception), + gate=equal("vulnerabilities"), + trigger_id=equal("CVE-2020-1235"), + notes=equal("Dummy note"), + expiration_date=be_above(0), + enabled=be_false, + ) + ), + ) ) diff --git a/specs/secure/scanning/scanning_vulnerability_spec.py b/specs/secure/scanning/scanning_vulnerability_spec.py index 8d992adf..c60c55cd 100644 --- a/specs/secure/scanning/scanning_vulnerability_spec.py +++ b/specs/secure/scanning/scanning_vulnerability_spec.py @@ -8,8 +8,10 @@ with description("Scanning vulnerability details", "integration") as self: with before.each: - self.client = SdScanningClient(sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), - token=os.getenv("SDC_SECURE_TOKEN")) + self.client = SdScanningClient( + sdc_url=os.getenv("SDC_SECURE_URL", "https://secure.sysdig.com"), + token=os.getenv("SDC_SECURE_TOKEN"), + ) with context("when retrieving a simple vulnerability"): with it("retrieves the vulnerability details correctly if exists"): @@ -18,8 +20,15 @@ expect((ok, res)).to(be_successful_api_call) expect(res).to( - have_keys("description", "severity", "vendor_data", "nvd_data", "references", - "affected_packages", id=equal(vuln_id)) + have_keys( + "description", + "severity", + "vendor_data", + "nvd_data", + "references", + "affected_packages", + id=equal(vuln_id), + ) ) with it("fails if it does not exist"): diff --git a/utils/sync_pagerduty_policies.py b/utils/sync_pagerduty_policies.py index c0dc46d2..e7fe1396 100644 --- a/utils/sync_pagerduty_policies.py +++ b/utils/sync_pagerduty_policies.py @@ -15,26 +15,29 @@ # # Parse arguments # -parser = argparse.ArgumentParser(description='Synchronize PagerDuty escalation policies with Sysdig, ' - 'to make sure each escalation policy has a notification ' - 'channel enabled in Sysdig') -parser.add_argument('sysdig-token', nargs=1, help='Sysdig API token') -parser.add_argument('pagerduty-account-id', nargs=1, help='PagerDuty account ID') -parser.add_argument('pagerduty-access-key', nargs=1, help='PagerDuty API access key') +parser = argparse.ArgumentParser( + description="Synchronize PagerDuty escalation policies with Sysdig, " + "to make sure each escalation policy has a notification " + "channel enabled in Sysdig" +) +parser.add_argument("sysdig-token", nargs=1, help="Sysdig API token") +parser.add_argument("pagerduty-account-id", nargs=1, help="PagerDuty account ID") +parser.add_argument("pagerduty-access-key", nargs=1, help="PagerDuty API access key") parser.add_argument( - '--link', - action='store_true', - help='Set to creat notification channels in Sysdig and services in PagerDuty for all escalation policies' + "--link", + action="store_true", + help="Set to creat notification channels in Sysdig and services in PagerDuty for all escalation policies", ) parser.add_argument( - '--unlink', - action='store_true', - help='Set to remove notification channels connected to PagerDuty escalation policies' + "--unlink", + action="store_true", + help="Set to remove notification channels connected to PagerDuty escalation policies", ) parser.add_argument( - '--dry-run', - action='store_true', - help='Set to get a report of changes, without actually apply them') + "--dry-run", + action="store_true", + help="Set to get a report of changes, without actually apply them", +) args = vars(parser.parse_args()) @@ -53,22 +56,29 @@ def run(sysdig_token, pager_duty_id, pager_duty_token, link, unlink, dry_run): # ok, res = sysdig.list_notification_channels() if not ok: - print('\nUnable to fetch Sysdig notification channels') + print("\nUnable to fetch Sysdig notification channels") print(res) sys.exit(1) # # Find PagerDuty notification channels # - pager_duty_channels = [channel for channel in res['notificationChannels'] if channel['type'] == 'PAGER_DUTY'] - print('Found {} PagerDuty notification {} configured in Sysdig'.format( - len(pager_duty_channels), pluralize('channel', len(pager_duty_channels)))) + pager_duty_channels = [ + channel + for channel in res["notificationChannels"] + if channel["type"] == "PAGER_DUTY" + ] + print( + "Found {} PagerDuty notification {} configured in Sysdig".format( + len(pager_duty_channels), pluralize("channel", len(pager_duty_channels)) + ) + ) # print(json.dumps(pager_duty_channels, sort_keys=True, indent=4)) # Build map of notification channel -> integration key def get_integration_map(acc, channel): - acc[channel['options']['serviceKey']] = channel + acc[channel["options"]["serviceKey"]] = channel return acc integration_keys = reduce(get_integration_map, pager_duty_channels, {}) @@ -76,52 +86,62 @@ def get_integration_map(acc, channel): # # Get list of PagerDuty escalation policies # - escalation_policies = pager_duty.get( - '/escalation_policies')['escalation_policies'] - print('Found {} PagerDuty escalation {}'.format( - len(escalation_policies), - pluralize('policy', len(escalation_policies), 'policies'))) + escalation_policies = pager_duty.get("/escalation_policies")["escalation_policies"] + print( + "Found {} PagerDuty escalation {}".format( + len(escalation_policies), + pluralize("policy", len(escalation_policies), "policies"), + ) + ) escalation_policies_map = {} for escalation_policy in escalation_policies: - escalation_policies_map[escalation_policy['id']] = escalation_policy + escalation_policies_map[escalation_policy["id"]] = escalation_policy # print(json.dumps(escalation_policies, sort_keys=True, indent=4)) # # Get list of PagerDuty services # - services = pager_duty.get('/services', {'include[]': ['integrations']})['services'] - print('Found {} PagerDuty {}'.format( - len(services), pluralize('service', len(services)))) + services = pager_duty.get("/services", {"include[]": ["integrations"]})["services"] + print( + "Found {} PagerDuty {}".format( + len(services), pluralize("service", len(services)) + ) + ) # print(json.dumps(services, sort_keys=True, indent=4)) # # Get Sysdig vendor configuration # - sysdig_vendor = pager_duty.get('/vendors', {'query': 'sysdig', 'limit': 1, - 'offset': 0, 'total': 'false'})['vendors'][0] + sysdig_vendor = pager_duty.get( + "/vendors", {"query": "sysdig", "limit": 1, "offset": 0, "total": "false"} + )["vendors"][0] # # Get integration details # for service in services: - for integration in service['integrations']: - integration['details'] = pager_duty.get( - '/services/{}/integrations/{}'.format(service['id'], integration['id']))['integration'] + for integration in service["integrations"]: + integration["details"] = pager_duty.get( + "/services/{}/integrations/{}".format(service["id"], integration["id"]) + )["integration"] # # Find integrations with Sysdig # service_integration_keys = {} for service in services: - service['sysdig_integrations'] = [integration for integration in service['integrations'] - if - 'vendor' in integration and integration['vendor'] and integration['vendor'][ - 'id'] == sysdig_vendor['id']] - - for integration in service['sysdig_integrations']: - service_integration_keys[integration['integration_key']] = { - 'service': service, - 'integration': integration + service["sysdig_integrations"] = [ + integration + for integration in service["integrations"] + if "vendor" in integration + and integration["vendor"] + and integration["vendor"]["id"] == sysdig_vendor["id"] + ] + + for integration in service["sysdig_integrations"]: + service_integration_keys[integration["integration_key"]] = { + "service": service, + "integration": integration, } # @@ -134,176 +154,232 @@ def get_integration_map(acc, channel): # delete all PagerDuty notification channels in Sysdig # for channel in pager_duty_channels: - actions.append({ - 'info': 'Sysdig: Delete channel "{}" ({})'.format(channel['name'], channel['id']), - 'fn': actions_factory.delete_notification_channel(channel) - }) + actions.append( + { + "info": 'Sysdig: Delete channel "{}" ({})'.format( + channel["name"], channel["id"] + ), + "fn": actions_factory.delete_notification_channel(channel), + } + ) # # delete integration with Sysdig # for service in services: - if service['sysdig_integrations']: - if len(service['sysdig_integrations']) == len(service['integrations']): + if service["sysdig_integrations"]: + if len(service["sysdig_integrations"]) == len(service["integrations"]): # # service connected to Sysdig only: delete service # - actions.append({ - 'info': 'PagerDuty: Delete service "{}" ({})'.format(service['name'], service['id']), - 'fn': actions_factory.delete_service(service['id']) - }) + actions.append( + { + "info": 'PagerDuty: Delete service "{}" ({})'.format( + service["name"], service["id"] + ), + "fn": actions_factory.delete_service(service["id"]), + } + ) else: # # service with some integrations with Sysdig: delete individual integrations # - for integration in service['sysdig_integrations']: + for integration in service["sysdig_integrations"]: actions.append( { - 'info': 'PagerDuty: Delete integration "{}" ({}) in service "{}" ({})'.format( - integration['name'], - integration['id'], - service['name'], - service['id']), - 'fn': actions_factory.delete_integration( - service['id'], - integration['id'])}) + "info": 'PagerDuty: Delete integration "{}" ({}) in service "{}" ({})'.format( + integration["name"], + integration["id"], + service["name"], + service["id"], + ), + "fn": actions_factory.delete_integration( + service["id"], integration["id"] + ), + } + ) if link: # # delete all PagerDuty notification channels in Sysdig that do NOT have an integration in PagerDuty # for channel in pager_duty_channels: - if channel['options']['serviceKey'] not in service_integration_keys: - actions.append({ - 'info': 'Remove notification channel "{}" not connected to any integration'.format(channel['name']), - 'fn': actions_factory.delete_notification_channel(channel) - }) + if channel["options"]["serviceKey"] not in service_integration_keys: + actions.append( + { + "info": 'Remove notification channel "{}" not connected to any integration'.format( + channel["name"] + ), + "fn": actions_factory.delete_notification_channel(channel), + } + ) for policy in escalation_policies: - service_name = '{} (Sysdig)'.format(policy['name']) - - policy_services = [service for service in services if service['escalation_policy']['id'] == policy['id']] - sysdig_services = [service for service in policy_services if service['sysdig_integrations']] + service_name = "{} (Sysdig)".format(policy["name"]) + + policy_services = [ + service + for service in services + if service["escalation_policy"]["id"] == policy["id"] + ] + sysdig_services = [ + service for service in policy_services if service["sysdig_integrations"] + ] disconnected_services = [] for service in sysdig_services: - for integration in service['integrations']: - if integration['vendor'] and \ - integration['vendor']['id'] == sysdig_vendor['id'] and \ - integration['integration_key'] not in integration_keys: - disconnected_services.append({ - 'service': service, - 'integration': integration - }) + for integration in service["integrations"]: + if ( + integration["vendor"] + and integration["vendor"]["id"] == sysdig_vendor["id"] + and integration["integration_key"] not in integration_keys + ): + disconnected_services.append( + {"service": service, "integration": integration} + ) if not sysdig_services: # # create service and integration in PagerDuty, and notification channel in Sysdig # - actions.append({'info': 'Create service, integration, and notification channel for policy "{}"'.format( - policy['name']), 'fn': actions_factory.create_all(policy, sysdig_vendor)}) + actions.append( + { + "info": 'Create service, integration, and notification channel for policy "{}"'.format( + policy["name"] + ), + "fn": actions_factory.create_all(policy, sysdig_vendor), + } + ) elif disconnected_services: # # create notification channel to disconnected integration # actions.append( { - 'info': 'Restore notification channel for disconnected service "{}" for policy "{}"'.format( - disconnected_services[0]['service']['name'], - policy['name']), - 'fn': actions_factory.create_notification_channel( + "info": 'Restore notification channel for disconnected service "{}" for policy "{}"'.format( + disconnected_services[0]["service"]["name"], policy["name"] + ), + "fn": actions_factory.create_notification_channel( policy, - disconnected_services[0]['service'], - disconnected_services[0]['integration'])}) + disconnected_services[0]["service"], + disconnected_services[0]["integration"], + ), + } + ) else: for service in sysdig_services: - for integration in service['integrations']: - if integration['vendor'] and \ - integration['vendor']['id'] == sysdig_vendor['id'] and \ - integration['integration_key'] in integration_keys: - channel = integration_keys[integration['integration_key']] - if channel['name'] != policy['name']: + for integration in service["integrations"]: + if ( + integration["vendor"] + and integration["vendor"]["id"] == sysdig_vendor["id"] + and integration["integration_key"] in integration_keys + ): + channel = integration_keys[integration["integration_key"]] + if channel["name"] != policy["name"]: # # rename channel to match new policy name # - actions.append({ - 'info': 'Rename notification channel "{}" to policy name "{}"'.format( - channel['name'], policy['name']), - 'fn': actions_factory.rename_notification_channel(channel, policy['name'], - service_name) - }) - elif channel['options']['serviceName'] != service_name: + actions.append( + { + "info": 'Rename notification channel "{}" to policy name "{}"'.format( + channel["name"], policy["name"] + ), + "fn": actions_factory.rename_notification_channel( + channel, policy["name"], service_name + ), + } + ) + elif channel["options"]["serviceName"] != service_name: # # rename channel service to service name # - actions.append({ - 'info': 'Rename channel service "{}" to service name "{}"'.format(service['name'], - service_name), - 'fn': actions_factory.rename_notification_channel(channel, policy['name'], - service_name) - }) - - if len(service['integrations']) == 1 and service['name'] != service_name: + actions.append( + { + "info": 'Rename channel service "{}" to service name "{}"'.format( + service["name"], service_name + ), + "fn": actions_factory.rename_notification_channel( + channel, policy["name"], service_name + ), + } + ) + + if ( + len(service["integrations"]) == 1 + and service["name"] != service_name + ): # # rename service to match new policy name # - actions.append({ - 'info': 'Rename service "{}" to "{}"'.format(service['name'], service_name), - 'fn': actions_factory.rename_service(service, service_name) - }) + actions.append( + { + "info": 'Rename service "{}" to "{}"'.format( + service["name"], service_name + ), + "fn": actions_factory.rename_service( + service, service_name + ), + } + ) if actions: # # Run action, or just print the task in dry mode # - print('') - print('Action items:') + print("") + print("Action items:") for action in actions: if dry_run: - print('\t* {}'.format(action['info'])) + print("\t* {}".format(action["info"])) else: - print('\t* {}...'.format(action['info'])) - action['fn']() - print('\t Done!') + print("\t* {}...".format(action["info"])) + action["fn"]() + print("\t Done!") if dry_run: - print('\nTo apply changes, execute the same command without "--dry-run" parameter:\npython {}'.format( - ' '.join([arg for arg in sys.argv if arg != '--dry-run']))) + print( + '\nTo apply changes, execute the same command without "--dry-run" parameter:\npython {}'.format( + " ".join([arg for arg in sys.argv if arg != "--dry-run"]) + ) + ) else: if unlink: - print('All escalation policies have been disconnected from Sysdig!') + print("All escalation policies have been disconnected from Sysdig!") if link: - print('All escalation policies are already connected to Sysdig!') + print("All escalation policies are already connected to Sysdig!") -class PagerDutyAPI(): +class PagerDutyAPI: def __init__(self, token): - self._base_url = 'https://api.pagerduty.com' + self._base_url = "https://api.pagerduty.com" self._token = token def get(self, endpoint, params=None): - return self._base_request('get', endpoint, params=params) + return self._base_request("get", endpoint, params=params) def post(self, endpoint, data=None): - return self._base_request('post', endpoint, data=data) + return self._base_request("post", endpoint, data=data) def put(self, endpoint, data=None): - return self._base_request('put', endpoint, data=data) + return self._base_request("put", endpoint, data=data) def delete(self, endpoint, params=None): - return self._base_request('delete', endpoint, params=params) + return self._base_request("delete", endpoint, params=params) def _base_request(self, method, endpoint, params=None, data=None): url = self._get_url(endpoint) request_data = json.dumps(data) if data else None - response = getattr(requests, method)(url, params=params, data=request_data, headers=self._get_headers()) + response = getattr(requests, method)( + url, params=params, data=request_data, headers=self._get_headers() + ) return self._handle_response(response, url) def _handle_response(self, response, url): if response.status_code >= 300: - error = 'PagerDuty API request {} {} failed: {}, {}'.format( - response.request.method, url, response.status_code, response.content) + error = "PagerDuty API request {} {} failed: {}, {}".format( + response.request.method, url, response.status_code, response.content + ) print(error) raise Exception(error) @@ -316,17 +392,17 @@ def _parse_response(self, response): return response.json() def _get_url(self, endpoint): - return '{}{}'.format(self._base_url, endpoint) + return "{}{}".format(self._base_url, endpoint) def _get_headers(self): return { - 'Accept': 'application/vnd.pagerduty+json;version=2', - 'Content-Type': 'application/json', - 'Authorization': 'Token token={}'.format(self._token) + "Accept": "application/vnd.pagerduty+json;version=2", + "Content-Type": "application/json", + "Authorization": "Token token={}".format(self._token), } -class ActionFactory(): +class ActionFactory: def __init__(self, sysdig, pager_duty, pager_duty_id): self._sysdig = sysdig self._pager_duty = pager_duty @@ -334,13 +410,15 @@ def __init__(self, sysdig, pager_duty, pager_duty_id): def delete_service(self, service_id): def fn(): - self._pager_duty.delete('/services/{}'.format(service_id)) + self._pager_duty.delete("/services/{}".format(service_id)) return fn def delete_integration(self, service_id, integration_id): def fn(): - self._pager_duty.delete('/services/{}/integrations/{}'.format(service_id, integration_id)) + self._pager_duty.delete( + "/services/{}/integrations/{}".format(service_id, integration_id) + ) return fn @@ -352,114 +430,115 @@ def fn(): def create_all(self, policy, sysdig_vendor): def fn(): - new_service = self._pager_duty.post('/services', { - 'service': { - 'type': 'service', - 'name': '{} (Sysdig)'.format(policy['name']), - 'auto_resolve_timeout': None, - 'acknowledgement_timeout': None, - 'status': 'active', - 'escalation_policy': { - 'id': policy['id'], - 'type': 'escalation_policy_reference' - }, - 'incident_urgency_rule': { - 'type': 'use_support_hours', - 'during_support_hours': { - 'type': 'constant', - 'urgency': 'high' + new_service = self._pager_duty.post( + "/services", + { + "service": { + "type": "service", + "name": "{} (Sysdig)".format(policy["name"]), + "auto_resolve_timeout": None, + "acknowledgement_timeout": None, + "status": "active", + "escalation_policy": { + "id": policy["id"], + "type": "escalation_policy_reference", }, - 'outside_support_hours': { - 'type': 'constant', - 'urgency': 'low' - } - }, - 'support_hours': { - 'type': 'fixed_time_per_day', - 'time_zone': 'America/Lima', - 'start_time': '09:00:00', - 'end_time': '17:00:00', - 'days_of_week': [ - 1, - 2, - 3, - 4, - 5 - ] - }, - 'scheduled_actions': [ - { - 'type': 'urgency_change', - 'at': { - 'type': 'named_time', - 'name': 'support_hours_start' + "incident_urgency_rule": { + "type": "use_support_hours", + "during_support_hours": { + "type": "constant", + "urgency": "high", }, - 'to_urgency': 'high' - } - ], - 'alert_creation': 'create_alerts_and_incidents', - 'alert_grouping': 'time', - 'alert_grouping_timeout': 2 - } - })['service'] - - new_integration = self._pager_duty.post('/services/{}/integrations'.format(new_service['id']), { - 'integration': { - 'type': 'integration_inbound_integration', - 'name': 'Sysdig', - 'vendor': { - 'id': sysdig_vendor['id'], - 'type': 'vendor' - }, - 'service': { - 'id': new_service['id'], - 'summary': new_service['summary'], - 'type': new_service['type'], - 'self': new_service['self'], - 'html_url': new_service['html_url'], + "outside_support_hours": { + "type": "constant", + "urgency": "low", + }, + }, + "support_hours": { + "type": "fixed_time_per_day", + "time_zone": "America/Lima", + "start_time": "09:00:00", + "end_time": "17:00:00", + "days_of_week": [1, 2, 3, 4, 5], + }, + "scheduled_actions": [ + { + "type": "urgency_change", + "at": { + "type": "named_time", + "name": "support_hours_start", + }, + "to_urgency": "high", + } + ], + "alert_creation": "create_alerts_and_incidents", + "alert_grouping": "time", + "alert_grouping_timeout": 2, } + }, + )["service"] + + new_integration = self._pager_duty.post( + "/services/{}/integrations".format(new_service["id"]), + { + "integration": { + "type": "integration_inbound_integration", + "name": "Sysdig", + "vendor": {"id": sysdig_vendor["id"], "type": "vendor"}, + "service": { + "id": new_service["id"], + "summary": new_service["summary"], + "type": new_service["type"], + "self": new_service["self"], + "html_url": new_service["html_url"], + }, + } + }, + )["integration"] + + self._sysdig.create_notification_channel( + { + "type": "PAGER_DUTY", + "enabled": True, + "sendTestNotification": False, + "name": policy["name"], + "options": { + "account": self._pager_duty_id, + "serviceKey": new_integration["integration_key"], + "serviceName": new_service["name"], + "notifyOnOk": True, + "notifyOnResolve": True, + }, } - })['integration'] - - self._sysdig.create_notification_channel({ - 'type': 'PAGER_DUTY', - 'enabled': True, - 'sendTestNotification': False, - 'name': policy['name'], - 'options': { - 'account': self._pager_duty_id, - 'serviceKey': new_integration['integration_key'], - 'serviceName': new_service['name'], - 'notifyOnOk': True, - 'notifyOnResolve': True - } - }) + ) return fn def create_notification_channel(self, policy, service, integration): def fn(): - self._sysdig.create_notification_channel({ - "type": "PAGER_DUTY", - "enabled": True, - "sendTestNotification": False, - "name": policy['name'], - "options": { - "account": self._pager_duty_id, - "serviceKey": integration['integration_key'], - "serviceName": service['name'], - "notifyOnOk": True, - "notifyOnResolve": True + self._sysdig.create_notification_channel( + { + "type": "PAGER_DUTY", + "enabled": True, + "sendTestNotification": False, + "name": policy["name"], + "options": { + "account": self._pager_duty_id, + "serviceKey": integration["integration_key"], + "serviceName": service["name"], + "notifyOnOk": True, + "notifyOnResolve": True, + }, } - }) + ) return fn def rename_notification_channel(self, channel, channel_name, service_name): def fn(): new_channel = copy.deepcopy(channel) - new_channel['name'] = channel_name - new_channel['options']['serviceName'] = service_name + new_channel["name"] = channel_name + new_channel["options"]["serviceName"] = service_name self._sysdig.update_notification_channel(new_channel) return fn @@ -467,8 +546,8 @@ def fn(): def rename_service(self, service, service_name): def fn(): new_service = copy.deepcopy(service) - new_service['name'] = service_name - self._pager_duty.put('/services/{}'.format(service['id']), new_service) + new_service["name"] = service_name + self._pager_duty.put("/services/{}".format(service["id"]), new_service) return fn @@ -478,13 +557,19 @@ def pluralize(term, count, plural=None): return term else: if plural is None: - return '{}s'.format(term) + return "{}s".format(term) else: return plural # let's get started! -print('') - -run(args['sysdig-token'][0], args['pagerduty-account-id'][0], - args['pagerduty-access-key'][0], args['link'], args['unlink'], args['dry_run']) +print("") + +run( + args["sysdig-token"][0], + args["pagerduty-account-id"][0], + args["pagerduty-access-key"][0], + args["link"], + args["unlink"], + args["dry_run"], +) From 70396c3a5f7bb9fe0a2d08fc65715da9f8e2b025 Mon Sep 17 00:00:00 2001 From: Fede Barcelona Date: Fri, 2 May 2025 12:05:39 +0200 Subject: [PATCH 05/13] lint: add pre-commit config to verify with linter and formatter --- .pre-commit-config.yaml | 9 +++++++++ flake.nix | 5 +++++ 2 files changed, 14 insertions(+) create mode 100644 .pre-commit-config.yaml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..e6d4ad1b --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,9 @@ +repos: +- repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.11.8 + hooks: + # Run the linter. + - id: ruff + args: [ --fix ] + # Run the formatter. + - id: ruff-format diff --git a/flake.nix b/flake.nix index 4ea96628..6a5c3374 100644 --- a/flake.nix +++ b/flake.nix @@ -22,7 +22,12 @@ python310 # Minimum supported python version in this project uv ruff + pre-commit ]; + + shellHook = '' + pre-commit install + ''; }; formatter = pkgs.nixfmt-rfc-style; From 1a12eb9f1c1ea79993eecc6f7176737f9f5adb98 Mon Sep 17 00:00:00 2001 From: Fede Barcelona Date: Fri, 2 May 2025 13:25:26 +0200 Subject: [PATCH 06/13] ci: update github actions to use uv instead of poetry --- .github/workflows/ci-master-scheduled.yml | 36 ++++++------------ .github/workflows/ci-pull-request.yml | 45 ++++++++--------------- .github/workflows/codeql-analysis.yml | 6 +-- .github/workflows/release.yml | 22 ++++++----- .pre-commit-config.yaml | 6 +++ Makefile | 8 ++-- README.md | 4 +- docs/quickstart/install.rst | 6 +-- 8 files changed, 59 insertions(+), 74 deletions(-) diff --git a/.github/workflows/ci-master-scheduled.yml b/.github/workflows/ci-master-scheduled.yml index f3b2247d..565945a4 100644 --- a/.github/workflows/ci-master-scheduled.yml +++ b/.github/workflows/ci-master-scheduled.yml @@ -22,33 +22,21 @@ jobs: steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - name: Install uv + uses: astral-sh/setup-uv@v5 with: - python-version: ${{ matrix.python_version }} - - - name: Install Poetry - run: python -m pip install poetry poetry-dynamic-versioning + enable-cache: true + cache-dependency-glob: "uv.lock" - - uses: actions/cache@v3 - name: Cache Poetry dependencies + - uses: actions/setup-python@v5 with: - path: | - ~/.cache - ~/.local/share/virtualenvs/ - key: ${{ runner.os }}-poetry-${{ hashFiles('**/poetry.lock') }} - restore-keys: | - ${{ runner.os }}-poetry- - - - name: Get dependencies - run: poetry install + python-version: ${{ matrix.python_version }} - name: Lint - continue-on-error: true - run: | - # stop the build if there are Python syntax errors or undefined names - poetry run flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - poetry run flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + uses: astral-sh/ruff-action@v3 + + - name: Check format + run: ruff format --check - name: Travis Test - Start agent id: start_agent @@ -61,7 +49,7 @@ jobs: - name: Travis Test - Install dependencies run: | - poetry build + uv build python -m pip install $(find dist -iname "*.whl" | head -1) - name: Travis Test - Secure APIs @@ -75,7 +63,7 @@ jobs: SDC_SECURE_TOKEN: ${{ secrets.STAGING_SECURE_API_TOKEN }} SDC_MONITOR_URL: "https://app-staging.sysdigcloud.com" SDC_SECURE_URL: "https://secure-staging.sysdig.com" - run: poetry run mamba -f documentation + run: uv run mamba -f documentation - name: Travis Test - Stop agent run: ./test/stop_agent.sh diff --git a/.github/workflows/ci-pull-request.yml b/.github/workflows/ci-pull-request.yml index a0159573..a1f55ead 100644 --- a/.github/workflows/ci-pull-request.yml +++ b/.github/workflows/ci-pull-request.yml @@ -25,32 +25,21 @@ jobs: steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - name: Install uv + uses: astral-sh/setup-uv@v5 with: - python-version: ${{ matrix.python_version }} - - - name: Install Poetry - run: python -m pip install poetry poetry-dynamic-versioning + enable-cache: true + cache-dependency-glob: "uv.lock" - - uses: actions/cache@v3 - name: Cache Poetry dependencies + - uses: actions/setup-python@v5 with: - path: | - ~/.cache - ~/.local/share/virtualenvs/ - key: ${{ runner.os }}-poetry-${{ hashFiles('**/poetry.lock') }} - restore-keys: | - ${{ runner.os }}-poetry- - - - name: Get dependencies - run: poetry install + python-version: ${{ matrix.python_version }} - name: Lint - run: | - # stop the build if there are Python syntax errors or undefined names - poetry run flake8 - # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - poetry run flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + uses: astral-sh/ruff-action@v3 + + - name: Check format + run: ruff format --check - name: Test in staging env: @@ -58,7 +47,7 @@ jobs: SDC_SECURE_TOKEN: ${{ secrets.STAGING_SECURE_API_TOKEN }} SDC_MONITOR_URL: "https://app-staging.sysdigcloud.com" SDC_SECURE_URL: "https://secure-staging.sysdig.com" - run: poetry run mamba -f documentation -t integration + run: uv run mamba -f documentation -t integration test-release: runs-on: ubuntu-latest @@ -81,14 +70,12 @@ jobs: run: git-chglog -c .github/git-chglog/config.yml -o RELEASE_CHANGELOG.md $(git describe --tags $(git rev-list --tags --max-count=1)) - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: 3.8 + python-version: 3.10 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install poetry poetry-dynamic-versioning + - name: Install uv + uses: astral-sh/setup-uv@v5 - name: Build - run: poetry build + run: uv build diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index ae4544c3..0c36e567 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -43,7 +43,7 @@ jobs: # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -54,7 +54,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@v2 + uses: github/codeql-action/autobuild@v3 # â„šī¸ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl @@ -68,4 +68,4 @@ jobs: # make release - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 361be321..68108823 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -47,15 +47,19 @@ jobs: steps: - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v4 + - name: Install uv + uses: astral-sh/setup-uv@v5 with: - python-version: 3.8 + enable-cache: true + cache-dependency-glob: "uv.lock" - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install poetry poetry-dynamic-versioning + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: 3.10 - - name: Build and publish - run: poetry publish --build -u ${{ secrets.PYPI_USER }} -p ${{ secrets.PYPI_PASSWORD }} + - name: Build + run: uv build + + - name: Publish + run: uv publish -u ${{ secrets.PYPI_USER }} -p ${{ secrets.PYPI_PASSWORD }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e6d4ad1b..746756e3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,3 +7,9 @@ repos: args: [ --fix ] # Run the formatter. - id: ruff-format + +- repo: https://github.com/rhysd/actionlint + rev: v1.7.7 + hooks: + - id: actionlint + diff --git a/Makefile b/Makefile index b5a97bcf..f9612a32 100644 --- a/Makefile +++ b/Makefile @@ -2,15 +2,15 @@ .PHONY: test test: - poetry run mamba -f documentation + uv run mamba -f documentation .coverage: - poetry run coverage run $(shell poetry run which mamba) -f documentation || true + uv run coverage run $(shell uv run which mamba) -f documentation || true cover: .coverage - poetry run coverage report --include 'sdcclient/*' + uv run coverage report --include 'sdcclient/*' .PHONY: cover-html cover-html: .coverage - poetry run coverage html -d coverage --include 'sdcclient/*' + uv run coverage html -d coverage --include 'sdcclient/*' diff --git a/README.md b/README.md index 2fd46bb8..182e2ebf 100644 --- a/README.md +++ b/README.md @@ -18,11 +18,11 @@ Installation #### Manual (development only) -This method requires [Poetry](https://python-poetry.org/) installed +This method requires [uv](https://docs.astral.sh/uv/) installed git clone https://github.com/sysdiglabs/sysdig-sdk-python.git cd python-sdc-client - poetry install + uv build Quick start ----------- diff --git a/docs/quickstart/install.rst b/docs/quickstart/install.rst index 78e8304c..1bce2266 100644 --- a/docs/quickstart/install.rst +++ b/docs/quickstart/install.rst @@ -11,12 +11,12 @@ Automatic with PyPI Manual (development only) ------------------------- -This method requires `Poetry`_ installed. +This method requires `uv`_ installed. -.. _Poetry: https://python-poetry.org/ +.. _uv: https://docs.astral.sh/uv/ :: $ git clone https://github.com/sysdiglabs/sysdig-sdk-python.git $ cd python-sdc-client - $ poetry install + $ uv build From 84ae754662c85ce836dc06c3767c170c3ddb3bc0 Mon Sep 17 00:00:00 2001 From: Fede Barcelona Date: Thu, 8 May 2025 10:49:43 +0200 Subject: [PATCH 07/13] fix(ci): change yaml 3.10 numeric to string -- duh --- .github/workflows/ci-pull-request.yml | 2 +- .github/workflows/release.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-pull-request.yml b/.github/workflows/ci-pull-request.yml index a1f55ead..3b6dc7dd 100644 --- a/.github/workflows/ci-pull-request.yml +++ b/.github/workflows/ci-pull-request.yml @@ -72,7 +72,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: 3.10 + python-version: "3.10" - name: Install uv uses: astral-sh/setup-uv@v5 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 68108823..9040e25b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -56,7 +56,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: 3.10 + python-version: "3.10" - name: Build run: uv build From 7665b35fb6903bf7552447f9d427b6ebffb6596d Mon Sep 17 00:00:00 2001 From: Fede Barcelona Date: Thu, 8 May 2025 11:49:33 +0200 Subject: [PATCH 08/13] chore: add lsp as dependencies to work with --- flake.nix | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/flake.nix b/flake.nix index 6a5c3374..4ad9a14a 100644 --- a/flake.nix +++ b/flake.nix @@ -23,6 +23,10 @@ uv ruff pre-commit + + # LSPs + python3Packages.python-lsp-server + basedpyright ]; shellHook = '' From e35801511f252741e29e6eb01bc8fe07a1abc585 Mon Sep 17 00:00:00 2001 From: Fede Barcelona Date: Thu, 8 May 2025 11:50:38 +0200 Subject: [PATCH 09/13] feat!: remove support for dashboards v1 and v2 --- examples/dashboard_backup_v1_restore_v2.py | 62 -- sdcclient/monitor/__init__.py | 2 - sdcclient/monitor/_dashboards_v2.py | 666 ------------------ sdcclient/monitor/_dashboards_v3.py | 16 +- .../_dashboard_versions.py | 300 -------- specs/monitor/dashboards_v2_spec.py | 203 ------ 6 files changed, 2 insertions(+), 1247 deletions(-) delete mode 100755 examples/dashboard_backup_v1_restore_v2.py delete mode 100644 sdcclient/monitor/_dashboards_v2.py delete mode 100644 specs/monitor/dashboards_v2_spec.py diff --git a/examples/dashboard_backup_v1_restore_v2.py b/examples/dashboard_backup_v1_restore_v2.py deleted file mode 100755 index 7b31a538..00000000 --- a/examples/dashboard_backup_v1_restore_v2.py +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/env python -# -# Save the first user dashboard to file and then use create_dashboard_from_file() -# to apply the stored dasboard again with a different filter. -# -import sys - -from sdcclient import SdMonitorClient -from sdcclient import SdMonitorClientV1 - -# -# Parse arguments -# -if len(sys.argv) != 5: - print( - f"usage: {sys.argv[0]} " - ) - print("You can find your token at https://app.sysdigcloud.com/#/settings/user") - sys.exit(1) - -sdc_v1_url = sys.argv[1] -sdc_v1_token = sys.argv[2] -sdc_v2_url = sys.argv[3] -sdc_v2_token = sys.argv[4] - -# -# Instantiate the SDC client -# -sdclient_v2 = SdMonitorClient(sdc_v2_token, sdc_url=sdc_v2_url) -sdclient_v1 = SdMonitorClientV1(sdc_v1_token, sdc_url=sdc_v1_url) - -# -# Serialize the first user dashboard to disk -# -ok, res = sdclient_v1.get_dashboards() - -if not ok: - print(res) - sys.exit(1) - -for dashboard in res["dashboards"]: - file_name = "{}.json".format(dashboard["id"]) - print(("Saving v1 dashboard {} to file {}...".format(dashboard["name"], file_name))) - sdclient_v1.save_dashboard_to_file(dashboard, file_name) - - print("Importing dashboard to v2...") - ok, res = sdclient_v2.create_dashboard_from_file( - "import of {}".format(dashboard["name"]), - file_name, - None, - shared=dashboard["isShared"], - public=dashboard["isPublic"], - ) - - if ok: - print(("Dashboard {} imported!".format(dashboard["name"]))) - sdclient_v2.delete_dashboard(res["dashboard"]) - else: - print(("Dashboard {} import failed:".format(dashboard["name"]))) - print(res) - - print("\n") diff --git a/sdcclient/monitor/__init__.py b/sdcclient/monitor/__init__.py index 7401f020..81c30728 100644 --- a/sdcclient/monitor/__init__.py +++ b/sdcclient/monitor/__init__.py @@ -1,11 +1,9 @@ -from ._dashboards_v2 import DashboardsClientV2 from ._dashboards_v3 import DashboardsClientV3 from ._events_v1 import EventsClientV1 from ._events_v2 import EventsClientV2 __all__ = [ "DashboardsClientV3", - "DashboardsClientV2", "EventsClientV1", "EventsClientV2", ] diff --git a/sdcclient/monitor/_dashboards_v2.py b/sdcclient/monitor/_dashboards_v2.py deleted file mode 100644 index 948c1cd6..00000000 --- a/sdcclient/monitor/_dashboards_v2.py +++ /dev/null @@ -1,666 +0,0 @@ -import copy -import json - -from sdcclient._common import _SdcCommon -from sdcclient.monitor.dashboard_converters import convert_dashboard_between_versions -from sdcclient.monitor.dashboard_converters._dashboard_scope import ( - convert_scope_string_to_expression, -) - - -class DashboardsClientV2(_SdcCommon): - def __init__( - self, - token="", - sdc_url="https://app.sysdigcloud.com", - ssl_verify=True, - custom_headers=None, - ): - super(DashboardsClientV2, self).__init__( - token, sdc_url, ssl_verify, custom_headers - ) - self.product = "SDC" - self._dashboards_api_version = "v2" - self._dashboards_api_endpoint = "/api/{}/dashboards".format( - self._dashboards_api_version - ) - self._default_dashboards_api_endpoint = "/api/{}/defaultDashboards".format( - self._dashboards_api_version - ) - - def get_views_list(self): - res = self.http.get( - self.url + self._default_dashboards_api_endpoint, - headers=self.hdrs, - verify=self.ssl_verify, - ) - if not self._checkResponse(res): - return [False, self.lasterr] - return [True, res.json()] - - def get_view(self, name): - gvres = self.get_views_list() - if gvres[0] is False: - return gvres - - vlist = gvres[1]["defaultDashboards"] - - id = None - - for v in vlist: - if v["name"] == name: - id = v["id"] - break - - if not id: - return [False, "view " + name + " not found"] - - res = self.http.get( - self.url + self._default_dashboards_api_endpoint + "/" + id, - headers=self.hdrs, - verify=self.ssl_verify, - ) - return self._request_result(res) - - def get_dashboards(self): - """**Description** - Return the list of dashboards available under the given user account. This includes the dashboards created by the user and the ones shared with her by other users. - - **Success Return Value** - A dictionary containing the list of available sampling intervals. - - **Example** - `examples/list_dashboards.py `_ - """ - res = self.http.get( - self.url + self._dashboards_api_endpoint, - headers=self.hdrs, - verify=self.ssl_verify, - ) - return self._request_result(res) - - def update_dashboard(self, dashboard_data): - """**Description** - Updates dashboard with provided in data. Please note that the dictionary will require a valid ID and version field to work as expected. - - **Success Return Value** - A dictionary containing the updated dashboard data. - - **Example** - `examples/dashboard_basic_crud.py `_ - """ - res = self.http.put( - self.url + self._dashboards_api_endpoint + "/" + str(dashboard_data["id"]), - headers=self.hdrs, - verify=self.ssl_verify, - data=json.dumps({"dashboard": dashboard_data}), - ) - return self._request_result(res) - - def find_dashboard_by(self, name=None): - """**Description** - Finds dashboards with the specified name. You can then delete the dashboard (with :func:`~SdcClient.delete_dashboard`) or edit panels (with :func:`~SdcClient.add_dashboard_panel` and :func:`~SdcClient.remove_dashboard_panel`) - - **Arguments** - - **name**: the name of the dashboards to find. - - **Success Return Value** - A list of dictionaries of dashboards matching the specified name. - - **Example** - `examples/dashboard.py `_ - """ - res = self.get_dashboards() - if res[0] is False: - return res - else: - - def filter_fn(configuration): - return configuration["name"] == name - - def create_item(configuration): - return {"dashboard": configuration} - - dashboards = list( - map(create_item, list(filter(filter_fn, res[1]["dashboards"]))) - ) - return [True, dashboards] - - def create_dashboard_with_configuration(self, configuration): - # Remove id and version properties if already set - configuration_clone = copy.deepcopy(configuration) - if "id" in configuration_clone: - del configuration_clone["id"] - if "version" in configuration_clone: - del configuration_clone["version"] - - res = self.http.post( - self.url + self._dashboards_api_endpoint, - headers=self.hdrs, - data=json.dumps({"dashboard": configuration_clone}), - verify=self.ssl_verify, - ) - return self._request_result(res) - - def create_dashboard(self, name): - """ - **Description** - Creates an empty dashboard. You can then add panels by using ``add_dashboard_panel``. - - **Arguments** - - **name**: the name of the dashboard that will be created. - - **Success Return Value** - A dictionary showing the details of the new dashboard. - - **Example** - `examples/dashboard.py `_ - """ - dashboard_configuration = { - "name": name, - "schema": 2, - "widgets": [], - "eventsOverlaySettings": {"filterNotificationsUserInputFilter": ""}, - } - - # - # Create the new dashboard - # - res = self.http.post( - self.url + self._dashboards_api_endpoint, - headers=self.hdrs, - data=json.dumps({"dashboard": dashboard_configuration}), - verify=self.ssl_verify, - ) - return self._request_result(res) - - # TODO COVER - def add_dashboard_panel( - self, - dashboard, - name, - panel_type, - metrics, - scope=None, - sort_direction="desc", - limit=None, - layout=None, - ): - """**Description** - Adds a panel to the dashboard. A panel can be a time series, or a top chart (i.e. bar chart), or a number panel. - - **Arguments** - - **dashboard**: dashboard to edit - - **name**: name of the new panel - - **panel_type**: type of the new panel. Valid values are: ``timeSeries``, ``top``, ``number`` - - **metrics**: a list of dictionaries, specifying the metrics to show in the panel, and optionally, if there is only one metric, a grouping key to segment that metric by. A metric is any of the entries that can be found in the *Metrics* section of the Explore page in Sysdig Monitor. Metric entries require an *aggregations* section specifying how to aggregate the metric across time and groups of containers/hosts. A grouping key is any of the entries that can be found in the *Show* or *Segment By* sections of the Explore page in Sysdig Monitor. Refer to the examples section below for ready to use code snippets. Note, certain panels allow certain combinations of metrics and grouping keys: - - ``timeSeries``: 1 or more metrics OR 1 metric + 1 grouping key - - ``top``: 1 or more metrics OR 1 metric + 1 grouping key - - ``number``: 1 metric only - - **scope**: filter to apply to the panel; must be based on metadata available in Sysdig Monitor; Example: *kubernetes.namespace.name='production' and container.image='nginx'*. - - **sort_direction**: Data sorting; The parameter is optional and it's a string identifying the sorting direction (it can be ``desc`` or ``asc``) - - **limit**: This parameter sets the limit on the number of lines/bars shown in a ``timeSeries`` or ``top`` panel. In the case of more entities being available than the limit, the top entities according to the sort will be shown. The default value is 10 for ``top`` panels (for ``timeSeries`` the default is defined by Sysdig Monitor itself). Note that increasing the limit above 10 is not officially supported and may cause performance and rendering issues - - **layout**: Size and position of the panel. The dashboard layout is defined by a grid of 12 columns, each row height is equal to the column height. For example, say you want to show 2 panels at the top: one panel might be 6 x 3 (half the width, 3 rows height) located in row 1 and column 1 (top-left corner of the viewport), the second panel might be 6 x 3 located in row 1 and position 7. The location is specified by a dictionary of ``row`` (row position), ``col`` (column position), ``size_x`` (width), ``size_y`` (height). - - **Success Return Value** - A dictionary showing the details of the edited dashboard. - - **Example** - `examples/dashboard.py `_ - """ - panel_configuration = { - "name": name, - "showAs": None, - "metrics": [], - "gridConfiguration": {"col": 1, "row": 1, "size_x": 12, "size_y": 6}, - "customDisplayOptions": {}, - } - - if panel_type == "timeSeries": - # - # In case of a time series, the current dashboard implementation - # requires the timestamp to be explicitly specified as "key". - # However, this function uses the same abstraction of the data API - # that doesn't require to specify a timestamp key (you only need to - # specify time window and sampling) - # - metrics = copy.copy(metrics) - metrics.insert(0, {"id": "timestamp"}) - - # - # Convert list of metrics to format used by Sysdig Monitor - # - property_names = {} - k_count = 0 - v_count = 0 - for i, metric in enumerate(metrics): - property_name = "v" if "aggregations" in metric else "k" - - if property_name == "k": - i = k_count - k_count += 1 - else: - i = v_count - v_count += 1 - property_names[metric["id"]] = property_name + str(i) - - panel_configuration["metrics"].append( - { - "id": metric["id"], - "timeAggregation": metric["aggregations"]["time"] - if "aggregations" in metric - else None, - "groupAggregation": metric["aggregations"]["group"] - if "aggregations" in metric - else None, - "propertyName": property_name + str(i), - } - ) - - panel_configuration["scope"] = scope - # if chart scope is equal to dashboard scope, set it as non override - panel_configuration["overrideScope"] = ( - "scope" in dashboard and dashboard["scope"] != scope - ) or ("scope" not in dashboard and scope is not None) - - if "custom_display_options" not in panel_configuration: - panel_configuration["custom_display_options"] = { - "valueLimit": {"count": 10, "direction": "desc"}, - "histogram": {"numberOfBuckets": 10}, - "yAxisScale": "linear", - "yAxisLeftDomain": {"from": 0, "to": None}, - "yAxisRightDomain": {"from": 0, "to": None}, - "xAxis": {"from": 0, "to": None}, - } - # - # Configure panel type - # - if panel_type == "timeSeries": - panel_configuration["showAs"] = "timeSeries" - - if limit is not None: - panel_configuration["custom_display_options"]["valueLimit"] = { - "count": limit, - "direction": "desc", - } - - elif panel_type == "number": - panel_configuration["showAs"] = "summary" - elif panel_type == "top": - panel_configuration["showAs"] = "top" - - if limit is not None: - panel_configuration["custom_display_options"]["valueLimit"] = { - "count": limit, - "direction": sort_direction, - } - - # - # Configure layout - # - if layout is not None: - panel_configuration["gridConfiguration"] = layout - - # - # Clone existing dashboard... - # - dashboard_configuration = copy.deepcopy(dashboard) - - # - # ... and add the new panel - # - dashboard_configuration["widgets"].append(panel_configuration) - - # - # Update dashboard - # - res = self.http.put( - self.url + self._dashboards_api_endpoint + "/" + str(dashboard["id"]), - headers=self.hdrs, - data=json.dumps({"dashboard": dashboard_configuration}), - verify=self.ssl_verify, - ) - return self._request_result(res) - - # TODO COVER - def remove_dashboard_panel(self, dashboard, panel_name): - """**Description** - Removes a panel from the dashboard. The panel to remove is identified by the specified ``name``. - - **Arguments** - - **name**: name of the panel to find and remove - - **Success Return Value** - A dictionary showing the details of the edited dashboard. - - **Example** - `examples/dashboard.py `_ - """ - # - # Clone existing dashboard... - # - dashboard_configuration = copy.deepcopy(dashboard) - - # - # ... find the panel - # - def filter_fn(panel): - return panel["name"] == panel_name - - panels = list(filter(filter_fn, dashboard_configuration["widgets"])) - - if len(panels) > 0: - # - # ... and remove it - # - for panel in panels: - dashboard_configuration["widgets"].remove(panel) - - # - # Update dashboard - # - res = self.http.put( - self.url + self._dashboards_api_endpoint + "/" + str(dashboard["id"]), - headers=self.hdrs, - data=json.dumps({"dashboard": dashboard_configuration}), - verify=self.ssl_verify, - ) - return self._request_result(res) - else: - return [False, "Not found"] - - def create_dashboard_from_template( - self, dashboard_name, template, scope, shared=False, public=False - ): - if scope is not None: - if not isinstance(scope, str): - return [False, "Invalid scope format: Expected a string"] - - # - # Clean up the dashboard we retireved so it's ready to be pushed - # - template["id"] = None - template["version"] = None - template["schema"] = 2 - template["name"] = dashboard_name - template["shared"] = shared - template["public"] = public - template["publicToken"] = None - - # default dashboards don't have eventsOverlaySettings property - # make sure to add the default set if the template doesn't include it - if ( - "eventsOverlaySettings" not in template - or not template["eventsOverlaySettings"] - ): - template["eventsOverlaySettings"] = { - "filterNotificationsUserInputFilter": "" - } - - # set dashboard scope to the specific parameter - scopeOk, scopeRes = convert_scope_string_to_expression(scope) - if not scopeOk: - return scopeOk, scopeRes - if scopeRes: - template["scopeExpressionList"] = list( - map( - lambda ex: { - "operand": ex["operand"], - "operator": ex["operator"], - "value": ex["value"], - "displayName": "", - "variable": False, - }, - scopeRes, - ) - ) - else: - template["scopeExpressionList"] = None - - # NOTE: Individual panels might override the dashboard scope, the override will NOT be reset - if "widgets" in template and template["widgets"] is not None: - for chart in template["widgets"]: - if "overrideScope" not in chart: - chart["overrideScope"] = False - - if not chart["overrideScope"]: - # patch frontend bug to hide scope override warning even when it's not really overridden - chart["scope"] = scope - - if chart["showAs"] != "map": - # if chart scope is equal to dashboard scope, set it as non override - chart_scope = chart["scope"] if "scope" in chart else None - chart["overrideScope"] = chart_scope != scope - else: - # topology panels must override the scope - chart["overrideScope"] = True - - # - # Create the new dashboard - # - res = self.http.post( - self.url + self._dashboards_api_endpoint, - headers=self.hdrs, - data=json.dumps({"dashboard": template}), - verify=self.ssl_verify, - ) - - return self._request_result(res) - - def create_dashboard_from_view( - self, newdashname, viewname, filter, shared=False, public=False - ): - """**Description** - Create a new dasboard using one of the Sysdig Monitor views as a template. You will be able to define the scope of the new dashboard. - - **Arguments** - - **newdashname**: the name of the dashboard that will be created. - - **viewname**: the name of the view to use as the template for the new dashboard. This corresponds to the name that the view has in the Explore page. - - **filter**: a boolean expression combining Sysdig Monitor segmentation criteria that defines what the new dasboard will be applied to. For example: *kubernetes.namespace.name='production' and container.image='nginx'*. - - **shared**: if set to True, the new dashboard will be a shared one. - - **public**: if set to True, the new dashboard will be shared with public token. - - **Success Return Value** - A dictionary showing the details of the new dashboard. - - **Example** - `examples/create_dashboard.py `_ - """ - # - # Find our template view - # - ok, gvres = self.get_view(viewname) - if not ok: - return ok, gvres - - view = gvres["defaultDashboard"] - - view["timeMode"] = {"mode": 1} - view["time"] = { - "last": 2 * 60 * 60 * 1000000, - "sampling": 2 * 60 * 60 * 1000000, - } - - # - # Create the new dashboard - # - return self.create_dashboard_from_template( - newdashname, view, filter, shared, public - ) - - def get_dashboard(self, dashboard_id): - """**Description** - Return a dashboard with the pased in ID. This includes the dashboards created by the user and the ones shared with them by other users. - - **Success Return Value** - A dictionary containing the requested dashboard data. - - **Example** - `examples/dashboard_basic_crud.py `_ - """ - res = self.http.get( - self.url + self._dashboards_api_endpoint + "/" + str(dashboard_id), - headers=self.hdrs, - verify=self.ssl_verify, - ) - return self._request_result(res) - - def create_dashboard_from_dashboard( - self, newdashname, templatename, filter, shared=False, public=False - ): - """**Description** - Create a new dasboard using one of the existing dashboards as a template. You will be able to define the scope of the new dasboard. - - **Arguments** - - **newdashname**: the name of the dashboard that will be created. - - **viewname**: the name of the dasboard to use as the template, as it appears in the Sysdig Monitor dashboard page. - - **filter**: a boolean expression combining Sysdig Monitor segmentation criteria defines what the new dasboard will be applied to. For example: *kubernetes.namespace.name='production' and container.image='nginx'*. - - **shared**: if set to True, the new dashboard will be a shared one. - - **public**: if set to True, the new dashboard will be shared with public token. - - **Success Return Value** - A dictionary showing the details of the new dashboard. - - **Example** - `examples/create_dashboard.py `_ - """ - # - # Get the list of dashboards from the server - # - res = self.http.get( - self.url + self._dashboards_api_endpoint, - headers=self.hdrs, - verify=self.ssl_verify, - ) - if not self._checkResponse(res): - return [False, self.lasterr] - - j = res.json() - - # - # Find our template dashboard - # - dboard = None - - for db in j["dashboards"]: - if db["name"] == templatename: - dboard = db - break - - if dboard is None: - self.lasterr = ( - "can't find dashboard " + templatename + " to use as a template" - ) - return [False, self.lasterr] - - # - # Create the dashboard - # - return self.create_dashboard_from_template( - newdashname, dboard, filter, shared, public - ) - - def create_dashboard_from_file( - self, dashboard_name, filename, filter, shared=False, public=False - ): - """ - **Description** - Create a new dasboard using a dashboard template saved to disk. See :func:`~SdcClient.save_dashboard_to_file` to use the file to create a dashboard (usefl to create and restore backups). - - The file can contain the following JSON formats: - 1. dashboard object in the format of an array element returned by :func:`~SdcClient.get_dashboards` - 2. JSON object with the following properties: - * version: dashboards API version (e.g. 'v2') - * dashboard: dashboard object in the format of an array element returned by :func:`~SdcClient.get_dashboards` - - **Arguments** - - **dashboard_name**: the name of the dashboard that will be created. - - **filename**: name of a file containing a JSON object - - **filter**: a boolean expression combining Sysdig Monitor segmentation criteria defines what the new dasboard will be applied to. For example: *kubernetes.namespace.name='production' and container.image='nginx'*. - - **shared**: if set to True, the new dashboard will be a shared one. - - **public**: if set to True, the new dashboard will be shared with public token. - - **Success Return Value** - A dictionary showing the details of the new dashboard. - - **Example** - `examples/dashboard_save_load.py `_ - """ - # - # Load the Dashboard - # - with open(filename) as data_file: - loaded_object = json.load(data_file) - - # - # Handle old files - # - if "dashboard" not in loaded_object: - loaded_object = {"version": "v1", "dashboard": loaded_object} - - dashboard = loaded_object["dashboard"] - - if loaded_object["version"] != self._dashboards_api_version: - # - # Convert the dashboard (if possible) - # - conversion_result, dashboard = convert_dashboard_between_versions( - dashboard, loaded_object["version"], self._dashboards_api_version - ) - - if not conversion_result: - return conversion_result, dashboard - - # - # Create the new dashboard - # - return self.create_dashboard_from_template( - dashboard_name, dashboard, filter, shared, public - ) - - def save_dashboard_to_file(self, dashboard, filename): - """ - **Description** - Save a dashboard to disk. See :func:`~SdcClient.create_dashboard_from_file` to use the file to create a dashboard (usefl to create and restore backups). - - The file will contain a JSON object with the following properties: - * version: dashboards API version (e.g. 'v2') - * dashboard: dashboard object in the format of an array element returned by :func:`~SdcClient.get_dashboards` - - **Arguments** - - **dashboard**: dashboard object in the format of an array element returned by :func:`~SdcClient.get_dashboards` - - **filename**: name of a file that will contain a JSON object - - **Example** - `examples/dashboard_save_load.py `_ - """ - with open(filename, "w") as outf: - json.dump( - {"version": self._dashboards_api_version, "dashboard": dashboard}, outf - ) - - def delete_dashboard(self, dashboard): - """**Description** - Deletes a dashboard. - - **Arguments** - - **dashboard**: the dashboard object as returned by :func:`~SdcClient.get_dashboards`. - - **Success Return Value** - `None`. - - **Example** - `examples/delete_dashboard.py `_ - """ - if "id" not in dashboard: - return [False, "Invalid dashboard format"] - - res = self.http.delete( - self.url + self._dashboards_api_endpoint + "/" + str(dashboard["id"]), - headers=self.hdrs, - verify=self.ssl_verify, - ) - if not self._checkResponse(res): - return [False, self.lasterr] - - return [True, None] diff --git a/sdcclient/monitor/_dashboards_v3.py b/sdcclient/monitor/_dashboards_v3.py index e964970d..66112db6 100644 --- a/sdcclient/monitor/_dashboards_v3.py +++ b/sdcclient/monitor/_dashboards_v3.py @@ -3,7 +3,6 @@ from sdcclient._common import _SdcCommon from sdcclient.monitor.dashboard_converters import ( - convert_dashboard_between_versions, convert_scope_string_to_expression, ) @@ -379,7 +378,7 @@ def create_dashboard_from_file( The file can contain the following JSON formats: 1. dashboard object in the format of an array element returned by :func:`~SdcClient.get_dashboards` 2. JSON object with the following properties: - * version: dashboards API version (e.g. 'v2') + * version: dashboards API version (e.g. 'v3') * dashboard: dashboard object in the format of an array element returned by :func:`~SdcClient.get_dashboards` **Arguments** @@ -412,17 +411,6 @@ def create_dashboard_from_file( dashboard = loaded_object["dashboard"] - if loaded_object["version"] != self._dashboards_api_version: - # - # Convert the dashboard (if possible) - # - conversion_result, dashboard = convert_dashboard_between_versions( - dashboard, loaded_object["version"], self._dashboards_api_version - ) - - if not conversion_result: - return conversion_result, dashboard - # # Create the new dashboard # @@ -605,7 +593,7 @@ def save_dashboard_to_file(self, dashboard, filename): Save a dashboard to disk. See :func:`~SdcClient.create_dashboard_from_file` to use the file to create a dashboard (usefl to create and restore backups). The file will contain a JSON object with the following properties: - * version: dashboards API version (e.g. 'v2') + * version: dashboards API version (e.g. 'v3') * dashboard: dashboard object in the format of an array element returned by :func:`~SdcClient.get_dashboards` **Arguments** diff --git a/sdcclient/monitor/dashboard_converters/_dashboard_versions.py b/sdcclient/monitor/dashboard_converters/_dashboard_versions.py index 39c0ab4c..8b137891 100644 --- a/sdcclient/monitor/dashboard_converters/_dashboard_versions.py +++ b/sdcclient/monitor/dashboard_converters/_dashboard_versions.py @@ -1,301 +1 @@ -import copy -from sdcclient.monitor.dashboard_converters._dashboard_scope import ( - convert_scope_string_to_expression, -) - - -def _convert_dashboard_v1_to_v2(dashboard): - # - # Migrations - # - # Each converter function will take: - # 1. name of the v1 dashboard property - # 2. v1 dashboard configuration - # 3. v2 dashboard configuration - # - # Each converter will apply changes to v2 dashboard configuration according to v1 - # - def when_set(converter): - def fn(prop_name, old_obj, new_obj): - if prop_name in old_obj and old_obj[prop_name] is not None: - converter(prop_name, old_obj, new_obj) - - return fn - - def with_default(converter, default=None): - def fn(prop_name, old_obj, new_obj): - if prop_name not in old_obj: - old_obj[prop_name] = default - - converter(prop_name, old_obj, new_obj) - - return fn - - def keep_as_is(prop_name, old_obj, new_obj): - new_obj[prop_name] = old_obj[prop_name] - - def drop_it(prop_name=None, old_obj=None, new_obj=None): - pass - - def ignore(prop_name=None, old_obj=None, new_obj=None): - pass - - def rename_to(new_prop_name): - def rename(prop_name, old_obj, new_obj): - new_obj[new_prop_name] = old_obj[prop_name] - - return rename - - def convert_schema(prop_name, old_dashboard, new_dashboard): - new_dashboard[prop_name] = 2 - - def convert_scope(prop_name, old_dashboard, new_dashboard): - # # TODO! - - scope = old_dashboard[prop_name] - scope_conversion = convert_scope_string_to_expression(scope) - - if scope_conversion[0]: - if scope_conversion[1]: - new_dashboard["scopeExpressionList"] = scope_conversion[1] - else: - # the property can be either `null` or a non-empty array - new_dashboard["scopeExpressionList"] = None - else: - raise SyntaxError("scope not supported by the current grammar") - - def convert_events_filter(prop_name, old_dashboard, new_dashboard): - rename_to("eventsOverlaySettings")(prop_name, old_dashboard, new_dashboard) - - if ( - "showNotificationsDoNotFilterSameMetrics" - in new_dashboard["eventsOverlaySettings"] - ): - del new_dashboard["eventsOverlaySettings"][ - "showNotificationsDoNotFilterSameMetrics" - ] - if ( - "showNotificationsDoNotFilterSameScope" - in new_dashboard["eventsOverlaySettings"] - ): - del new_dashboard["eventsOverlaySettings"][ - "showNotificationsDoNotFilterSameScope" - ] - - def convert_items(prop_name, old_dashboard, new_dashboard): - def convert_color_coding(prop_name, old_widget, new_widget): - best_value = None - worst_value = None - for item in old_widget[prop_name]["thresholds"]: - if item["color"] == "best": - best_value = item["max"] if not item["max"] else item["min"] - elif item["color"] == "worst": - worst_value = item["min"] if not item["min"] else item["max"] - - if best_value is not None and worst_value is not None: - new_widget[prop_name] = {"best": best_value, "worst": worst_value} - - def convert_display_options(prop_name, old_widget, new_widget): - keep_as_is(prop_name, old_widget, new_widget) - - if "yAxisScaleFactor" in new_widget[prop_name]: - del new_widget[prop_name]["yAxisScaleFactor"] - - def convert_group(prop_name, old_widget, new_widget): - group_by_metrics = old_widget[prop_name]["configuration"]["groups"][0][ - "groupBy" - ] - - migrated = [] - for metric in group_by_metrics: - migrated.append({"id": metric["metric"]}) - - new_widget["groupingLabelIds"] = migrated - - def convert_override_filter(prop_name, old_widget, new_widget): - if old_widget["showAs"] == "map": - # override scope always true if scope is set - new_widget["overrideScope"] = True - else: - new_widget["overrideScope"] = old_widget[prop_name] - - def convert_name(prop_name, old_widget, new_widget): - # - # enforce unique name (on old dashboard, before migration) - # - unique_id = 1 - name = old_widget[prop_name] - - for widget in old_dashboard["items"]: - if widget == old_widget: - break - - if old_widget[prop_name] == widget[prop_name]: - old_widget[prop_name] = "{} ({})".format(name, unique_id) - unique_id += 1 - - keep_as_is(prop_name, old_widget, new_widget) - - def convert_metrics(prop_name, old_widget, new_widget): - def convert_property_name(prop_name, old_metric, new_metric): - keep_as_is(prop_name, old_metric, new_metric) - - if old_metric["metricId"] == "timestamp": - return "k0" - - metric_migrations = { - "metricId": rename_to("id"), - "aggregation": rename_to("timeAggregation"), - "groupAggregation": rename_to("groupAggregation"), - "propertyName": convert_property_name, - } - - migrated_metrics = [] - for old_metric in old_widget[prop_name]: - migrated_metric = {} - - for key in metric_migrations.keys(): - if key in old_metric: - metric_migrations[key](key, old_metric, migrated_metric) - - migrated_metrics.append(migrated_metric) - - # Property name convention: - # timestamp: k0 (if present) - # other keys: k* (from 0 or 1, depending on timestamp) - # values: v* (from 0) - sorted_metrics = [] - timestamp_key = [ - m - for m in migrated_metrics - if m["id"] == "timestamp" - and "timeAggregation" not in m - or not (m["timeAggregation"] is not None) - ] - no_timestamp_keys = [ - m - for m in migrated_metrics - if m["id"] != "timestamp" - and "timeAggregation" not in m - or not (m["timeAggregation"] is not None) - ] - values = [ - m - for m in migrated_metrics - if "timeAggregation" in m and m["timeAggregation"] is not None - ] - if timestamp_key: - timestamp_key[0]["propertyName"] = "k0" - sorted_metrics.append(timestamp_key[0]) - k_offset = 1 if timestamp_key else 0 - for i in range(0, len(no_timestamp_keys)): - no_timestamp_keys[i]["propertyName"] = "k{}".format(i + k_offset) - sorted_metrics.append(no_timestamp_keys[i]) - for i in range(0, len(values)): - values[i]["propertyName"] = "v{}".format(i) - sorted_metrics.append(values[i]) - - new_widget["metrics"] = sorted_metrics - - widget_migrations = { - "colorCoding": when_set(convert_color_coding), - "compareToConfig": when_set(keep_as_is), - "customDisplayOptions": with_default(convert_display_options, {}), - "gridConfiguration": keep_as_is, - "group": when_set(convert_group), - "hasTransparentBackground": when_set(rename_to("transparentBackground")), - "limitToScope": when_set(keep_as_is), - "isPanelTitleVisible": when_set(rename_to("panelTitleVisible")), - "markdownSource": when_set(keep_as_is), - "metrics": with_default(convert_metrics, []), - "name": with_default(convert_name, "Panel"), - "overrideFilter": convert_override_filter, - "paging": drop_it, - "scope": with_default(keep_as_is, None), - "showAs": keep_as_is, - "showAsType": drop_it, - "sorting": drop_it, - "textpanelTooltip": when_set(keep_as_is), - } - - migrated_widgets = [] - for old_widget in old_dashboard[prop_name]: - migrated_widget = {} - - for key in widget_migrations.keys(): - widget_migrations[key](key, old_widget, migrated_widget) - - migrated_widgets.append(migrated_widget) - - new_dashboard["widgets"] = migrated_widgets - - return migrated - - migrations = { - "autoCreated": keep_as_is, - "createdOn": keep_as_is, - "eventsFilter": with_default( - convert_events_filter, {"filterNotificationsUserInputFilter": ""} - ), - "filterExpression": convert_scope, - "scopeExpressionList": ignore, # scope will be generated from 'filterExpression' - "id": keep_as_is, - "isPublic": rename_to("public"), - "isShared": rename_to("shared"), - "items": convert_items, - "layout": drop_it, - "modifiedOn": keep_as_is, - "name": keep_as_is, - "publicToken": drop_it, - "schema": convert_schema, - "teamId": keep_as_is, - "username": keep_as_is, - "version": keep_as_is, - } - - # - # Apply migrations - # - migrated = {} - for key in migrations.keys(): - migrations[key](key, copy.deepcopy(dashboard), migrated) - - return True, migrated - - -_DASHBOARD_CONVERTERS = {"v2": {"v1": _convert_dashboard_v1_to_v2}} - - -def convert_dashboard_between_versions(dashboard, version_from, version_to): - """ - **Description** - Converts a dashboard from a version to another version. - Current conversions supported: - - v1 -> v2 - - **Arguments** - - **version_from**: the version of the original dashboard to convert from - - **version_to**: the version of the wanted dashboard - - **Success Return Value** - A dashboard transformed between versions. - """ - converters_to = _DASHBOARD_CONVERTERS.get(version_to, None) - if converters_to is None: - return ( - False, - f"unexpected error: no dashboard converters from version {version_to} are supported", - ) - - converter = converters_to.get(version_from, None) - - if converter is None: - return False, "dashboard version {} cannot be converted to {}".format( - version_from, version_to - ) - - try: - return converter(dashboard) - except Exception as err: - return False, str(err) diff --git a/specs/monitor/dashboards_v2_spec.py b/specs/monitor/dashboards_v2_spec.py deleted file mode 100644 index 47f5113e..00000000 --- a/specs/monitor/dashboards_v2_spec.py +++ /dev/null @@ -1,203 +0,0 @@ -import json -import os -import tempfile - -from expects import expect, have_key, have_keys, contain, equal, start_with -from expects.matchers.built_in import be_false -from mamba import before, it, context, after, description - -from sdcclient.monitor import DashboardsClientV2 -from specs import be_successful_api_call - -_DASHBOARD_NAME = "test_dashboard_ci" - -with description("Dashboards v2", "integration") as self: - with before.all: - self.client = DashboardsClientV2( - sdc_url=os.getenv("SDC_MONITOR_URL", "https://app.sysdigcloud.com"), - token=os.getenv("SDC_MONITOR_TOKEN"), - ) - - with before.each: - self.cleanup_test_dashboards() - - with after.each: - self.cleanup_test_dashboards() - - def cleanup_test_dashboards(self): - ok, res = self.client.get_dashboards() - expect((ok, res)).to(be_successful_api_call) - - for dashboard in res["dashboards"]: - if str(dashboard["name"]).startswith(_DASHBOARD_NAME): - call = self.client.delete_dashboard(dashboard) - expect(call).to(be_successful_api_call) - - def create_test_dashboard(self): - ok, res = self.client.create_dashboard(name=_DASHBOARD_NAME) - if ok: - self.test_dashboard = res["dashboard"] - - return ok, res - - with it("is able to create a dashboard with just a name"): - ok, res = self.client.create_dashboard(name=_DASHBOARD_NAME) - expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_key("dashboard")) - - with it("is able to create a dashboard from a file"): - self.create_test_dashboard() - with tempfile.NamedTemporaryFile(mode="w+") as f: - # Write the info to the temp file - json.dump({"dashboard": self.test_dashboard, "version": "v2"}, f) - f.flush() - f.seek(0) - - ok, res = self.client.create_dashboard_from_file( - dashboard_name=f"{_DASHBOARD_NAME}_2", filename=f.name, filter=None - ) - expect((ok, res)).to(be_successful_api_call) - - with it("is able to create a dashboard from a view"): - _, res_view_list = self.client.get_views_list() - - call = self.client.create_dashboard_from_view( - newdashname=f"{_DASHBOARD_NAME}_2", - viewname=res_view_list["defaultDashboards"][0]["name"], - filter=None, - ) - expect(call).to(be_successful_api_call) - - with context("when there are existing dashbords"): - with before.each: - self.create_test_dashboard() - - with it("is able to list all the dashboards"): - ok, res = self.client.get_dashboards() - expect((ok, res)).to(be_successful_api_call) - expect(res).to(have_key("dashboards", contain(have_keys("name", "id")))) - - with it("is able to retrieve the test dashboard by its id"): - ok, res = self.client.get_dashboard(dashboard_id=self.test_dashboard["id"]) - expect((ok, res)).to(be_successful_api_call) - expect(res).to( - have_key( - "dashboard", have_keys("name", id=equal(self.test_dashboard["id"])) - ) - ) - - with context("when deleting a dashboard"): - with it("is able to remove it if all the info provided is correct"): - _, res = self.client.get_dashboards() - dashboard_len_before = len(res["dashboards"]) - - call = self.client.delete_dashboard(self.test_dashboard) - - _, res = self.client.get_dashboards() - dashboard_len_after = len(res["dashboards"]) - - expect(call).to(be_successful_api_call) - expect(dashboard_len_after).to(equal(dashboard_len_before - 1)) - - with it("fails to delete it if the info provided is not correct"): - ok, res = self.client.delete_dashboard({"id": 0}) - expect(ok).to(be_false) - expect(res).to(equal("status code 404")) - - with it( - "returns an error if there is not 'id' field in the provided object" - ): - ok, res = self.client.delete_dashboard({}) - - expect(ok).to(be_false) - expect(res).to(equal("Invalid dashboard format")) - - with it("is able to dump the dashboard to a file"): - with tempfile.NamedTemporaryFile(mode="w+") as f: - self.client.save_dashboard_to_file( - dashboard=self.test_dashboard, filename=f.name - ) - f.flush() - f.seek(0) - - data = json.load(f) - expect(data).to( - have_keys(version=equal("v2"), dashboard=equal(self.test_dashboard)) - ) - - with it("is able to create a dashboard from template"): - call = self.client.create_dashboard_from_template( - dashboard_name=f"{_DASHBOARD_NAME}_2", - template=self.test_dashboard, - scope='agent.id = "foo"', - ) - expect(call).to(be_successful_api_call) - - with context("when it's created with an incorrect scope"): - with it("fails if the scope is not a string"): - ok, res = self.client.create_dashboard_from_template( - dashboard_name=f"{_DASHBOARD_NAME}_2", - template=self.test_dashboard, - scope={}, - ) - expect(ok).to(be_false) - expect(res).to(equal("Invalid scope format: Expected a string")) - - with it("fails if the scope has incorrect format"): - ok, res = self.client.create_dashboard_from_template( - dashboard_name=f"{_DASHBOARD_NAME}_2", - template=self.test_dashboard, - scope="foobarbaz", - ) - expect(ok).to(be_false) - expect(res).to(start_with("invalid scope: foobarbaz")) - - with it("is able to create a dashboard from a configuration"): - self.test_dashboard["name"] = f"{_DASHBOARD_NAME}_2" - call = self.client.create_dashboard_with_configuration(self.test_dashboard) - - expect(call).to(be_successful_api_call) - - with context("when creating a dashboard from other dashboard"): - with it("creates the dashboard correctly if the template exists"): - ok, res = self.client.create_dashboard_from_dashboard( - newdashname=f"{_DASHBOARD_NAME}_2", - templatename=_DASHBOARD_NAME, - filter=None, - ) - expect((ok, res)).to(be_successful_api_call) - - with it("returns an error saying the dashboard does not exist"): - ok, res = self.client.create_dashboard_from_dashboard( - newdashname=f"{_DASHBOARD_NAME}_2", - templatename="NonExistingDashboard", - filter=None, - ) - expect(ok).to(be_false) - expect(res).to( - equal( - "can't find dashboard NonExistingDashboard to use as a template" - ) - ) - - with it("is able to update a dashboard"): - self.test_dashboard["name"] = f"{_DASHBOARD_NAME}_updated" - call = self.client.update_dashboard(self.test_dashboard) - - expect(call).to(be_successful_api_call) - - with it("is able to retrieve the dashboard by it's name"): - ok, res = self.client.find_dashboard_by(name=self.test_dashboard["name"]) - - expect((ok, res)).to(be_successful_api_call) - expect(res).to( - contain( - have_key( - "dashboard", - have_keys( - id=self.test_dashboard["id"], - name=self.test_dashboard["name"], - ), - ) - ) - ) From 3bffef5f330908d50fffd7e87caf37061f27881e Mon Sep 17 00:00:00 2001 From: Fede Barcelona Date: Thu, 8 May 2025 11:51:18 +0200 Subject: [PATCH 10/13] fix(ci): change error returned by api --- specs/_common/user_provisioning_spec.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/specs/_common/user_provisioning_spec.py b/specs/_common/user_provisioning_spec.py index 5aaf52a8..411768b4 100644 --- a/specs/_common/user_provisioning_spec.py +++ b/specs/_common/user_provisioning_spec.py @@ -46,4 +46,8 @@ ok, res = self.client.create_user(self.user_name) expect((ok, res)).not_to(be_successful_api_call) - expect(res).to(contain(f"User {self.user_name} already exists")) + expect(res).to( + contain( + "A user with the same email already exists for the same customer" + ) + ) From 41089caed76e7b8964b80ca076a20e785b4497c4 Mon Sep 17 00:00:00 2001 From: Fede Barcelona Date: Thu, 8 May 2025 11:51:37 +0200 Subject: [PATCH 11/13] fix(ci): change event filter returned by api --- specs/monitor/events_v2_spec.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/monitor/events_v2_spec.py b/specs/monitor/events_v2_spec.py index 579291b6..1f8aba42 100644 --- a/specs/monitor/events_v2_spec.py +++ b/specs/monitor/events_v2_spec.py @@ -38,7 +38,7 @@ call = self.client.post_event( name=self.event_name, description="This event was created in a CI pipeline for the Python SDK library", - event_filter="host.hostName='ci'", + event_filter="host_hostName='ci'", ) expect(call).to(be_successful_api_call) sleep(2) # sleep to guarantee the event is created From 04607c12022b23efc6b522e5e1a38f815157fb9e Mon Sep 17 00:00:00 2001 From: Fede Barcelona Date: Thu, 8 May 2025 11:53:35 +0200 Subject: [PATCH 12/13] fix: remove unused import --- sdcclient/monitor/dashboard_converters/__init__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/sdcclient/monitor/dashboard_converters/__init__.py b/sdcclient/monitor/dashboard_converters/__init__.py index 246a404b..445cebb5 100644 --- a/sdcclient/monitor/dashboard_converters/__init__.py +++ b/sdcclient/monitor/dashboard_converters/__init__.py @@ -1,4 +1,3 @@ from ._dashboard_scope import convert_scope_string_to_expression -from ._dashboard_versions import convert_dashboard_between_versions -__all__ = ["convert_dashboard_between_versions", "convert_scope_string_to_expression"] +__all__ = ["convert_scope_string_to_expression"] From de94a6591b56135a116aed7c54a0c56d28c5d712 Mon Sep 17 00:00:00 2001 From: Fede Barcelona Date: Thu, 8 May 2025 12:05:18 +0200 Subject: [PATCH 13/13] fix(ci): stabilize events v2 tests --- specs/monitor/events_v2_spec.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/specs/monitor/events_v2_spec.py b/specs/monitor/events_v2_spec.py index 1f8aba42..a43adabc 100644 --- a/specs/monitor/events_v2_spec.py +++ b/specs/monitor/events_v2_spec.py @@ -47,7 +47,7 @@ expect((ok, res)).to(be_successful_api_call) expect(res).to(have_key("events")) expect(res["events"]).to( - contain(have_key("scope", equal("host.hostName = 'ci'"))) + contain(have_key("scope", equal("host_hostName = 'ci'"))) ) with it("is able to retrieve an event by ID"): @@ -56,6 +56,7 @@ description="This event was created in a CI pipeline for the Python SDK library", ) expect((ok, res)).to(be_successful_api_call) + time.sleep(5) # Wait for the API to stabilize the events event = res["event"] event_id = event["id"]