From 10b82d18c5be50fdc40fdfd46b3267b52b62a2c6 Mon Sep 17 00:00:00 2001 From: ablakley-r7 <96182471+ablakley-r7@users.noreply.github.com> Date: Wed, 5 Feb 2025 11:19:00 +0000 Subject: [PATCH] Mimecast V2 1.0.0 Release (#3072) * [SOAR-18655] Mimecast V2 (#3063) * Initial plugin * Initial plugin * [SOAR-18656] Mimecast V2 (#3066) * Initial task code * Format api.py * Refactor task * Validators * Update state * Add todo. Update state handling. * Update version * [SOAR-18657] mimecast v2 (#3068) * Update threads, error handling, custom config, rate limiting * Add connection test * Fix lint * Add unit tests * Add unit tests * Fix requiremnts * Fix requiremnts * Mimecast V2 - Fix connection test | Fix custom config (#3071) * Fix connection test | Fix custom config * Fix type hint * Add tooltip (#3074) --- plugins/mimecast_v2/.CHECKSUM | 15 ++ plugins/mimecast_v2/.dockerignore | 9 + plugins/mimecast_v2/Dockerfile | 20 ++ plugins/mimecast_v2/Makefile | 53 ++++ plugins/mimecast_v2/bin/icon_mimecast_v2 | 46 ++++ plugins/mimecast_v2/extension.png | Bin 0 -> 4306 bytes plugins/mimecast_v2/help.md | 114 +++++++++ plugins/mimecast_v2/icon.png | Bin 0 -> 4671 bytes .../mimecast_v2/icon_mimecast_v2/__init__.py | 1 + .../icon_mimecast_v2/actions/__init__.py | 2 + .../icon_mimecast_v2/connection/__init__.py | 2 + .../icon_mimecast_v2/connection/connection.py | 51 ++++ .../icon_mimecast_v2/connection/schema.py | 62 +++++ .../icon_mimecast_v2/tasks/__init__.py | 4 + .../tasks/monitor_siem_logs/__init__.py | 2 + .../tasks/monitor_siem_logs/schema.py | 55 +++++ .../tasks/monitor_siem_logs/task.py | 229 ++++++++++++++++++ .../icon_mimecast_v2/triggers/__init__.py | 2 + .../icon_mimecast_v2/util/__init__.py | 1 + .../mimecast_v2/icon_mimecast_v2/util/api.py | 135 +++++++++++ .../icon_mimecast_v2/util/constants.py | 6 + plugins/mimecast_v2/plugin.spec.yaml | 69 ++++++ plugins/mimecast_v2/requirements.txt | 5 + plugins/mimecast_v2/setup.py | 14 ++ plugins/mimecast_v2/unit_test/__init__.py | 4 + .../expected/monitor_siem_logs.json.exp | 47 ++++ .../responses/authenticate.json.resp | 6 + .../responses/monitor_siem_logs.json.resp | 1 + .../monitor_siem_logs_batch.json.resp | 11 + .../monitor_siem_logs_error.json.resp | 7 + .../monitor_siem_logs_json_error.json.resp | 1 + .../unit_test/test_monitor_siem_logs.py | 225 +++++++++++++++++ plugins/mimecast_v2/unit_test/util.py | 110 +++++++++ 33 files changed, 1309 insertions(+) create mode 100644 plugins/mimecast_v2/.CHECKSUM create mode 100644 plugins/mimecast_v2/.dockerignore create mode 100644 plugins/mimecast_v2/Dockerfile create mode 100644 plugins/mimecast_v2/Makefile create mode 100644 plugins/mimecast_v2/bin/icon_mimecast_v2 create mode 100644 plugins/mimecast_v2/extension.png create mode 100644 plugins/mimecast_v2/help.md create mode 100644 plugins/mimecast_v2/icon.png create mode 100644 plugins/mimecast_v2/icon_mimecast_v2/__init__.py create mode 100644 plugins/mimecast_v2/icon_mimecast_v2/actions/__init__.py create mode 100644 plugins/mimecast_v2/icon_mimecast_v2/connection/__init__.py create mode 100644 plugins/mimecast_v2/icon_mimecast_v2/connection/connection.py create mode 100644 plugins/mimecast_v2/icon_mimecast_v2/connection/schema.py create mode 100644 plugins/mimecast_v2/icon_mimecast_v2/tasks/__init__.py create mode 100644 plugins/mimecast_v2/icon_mimecast_v2/tasks/monitor_siem_logs/__init__.py create mode 100644 plugins/mimecast_v2/icon_mimecast_v2/tasks/monitor_siem_logs/schema.py create mode 100644 plugins/mimecast_v2/icon_mimecast_v2/tasks/monitor_siem_logs/task.py create mode 100644 plugins/mimecast_v2/icon_mimecast_v2/triggers/__init__.py create mode 100644 plugins/mimecast_v2/icon_mimecast_v2/util/__init__.py create mode 100644 plugins/mimecast_v2/icon_mimecast_v2/util/api.py create mode 100644 plugins/mimecast_v2/icon_mimecast_v2/util/constants.py create mode 100644 plugins/mimecast_v2/plugin.spec.yaml create mode 100644 plugins/mimecast_v2/requirements.txt create mode 100644 plugins/mimecast_v2/setup.py create mode 100644 plugins/mimecast_v2/unit_test/__init__.py create mode 100644 plugins/mimecast_v2/unit_test/expected/monitor_siem_logs.json.exp create mode 100644 plugins/mimecast_v2/unit_test/responses/authenticate.json.resp create mode 100644 plugins/mimecast_v2/unit_test/responses/monitor_siem_logs.json.resp create mode 100644 plugins/mimecast_v2/unit_test/responses/monitor_siem_logs_batch.json.resp create mode 100644 plugins/mimecast_v2/unit_test/responses/monitor_siem_logs_error.json.resp create mode 100644 plugins/mimecast_v2/unit_test/responses/monitor_siem_logs_json_error.json.resp create mode 100644 plugins/mimecast_v2/unit_test/test_monitor_siem_logs.py create mode 100644 plugins/mimecast_v2/unit_test/util.py diff --git a/plugins/mimecast_v2/.CHECKSUM b/plugins/mimecast_v2/.CHECKSUM new file mode 100644 index 0000000000..d16ba519fe --- /dev/null +++ b/plugins/mimecast_v2/.CHECKSUM @@ -0,0 +1,15 @@ +{ + "spec": "a075e9a45f8da26b40eae68c765d196e", + "manifest": "e0e42959bee1c96589545b1afb0b1f61", + "setup": "ea867af34e3163ba06ef9660ec9023fc", + "schemas": [ + { + "identifier": "connection/schema.py", + "hash": "3253f4b76caee91ee0f0cdc596b64a98" + }, + { + "identifier": "monitor_siem_logs/schema.py", + "hash": "0a566c7d94e6689de4d0528d24470b94" + } + ] +} \ No newline at end of file diff --git a/plugins/mimecast_v2/.dockerignore b/plugins/mimecast_v2/.dockerignore new file mode 100644 index 0000000000..6da49864f5 --- /dev/null +++ b/plugins/mimecast_v2/.dockerignore @@ -0,0 +1,9 @@ +unit_test/**/* +unit_test +examples/**/* +examples +tests +tests/**/* +**/*.json +**/*.tar +**/*.gz diff --git a/plugins/mimecast_v2/Dockerfile b/plugins/mimecast_v2/Dockerfile new file mode 100644 index 0000000000..74c3c3774f --- /dev/null +++ b/plugins/mimecast_v2/Dockerfile @@ -0,0 +1,20 @@ +FROM --platform=linux/amd64 rapid7/insightconnect-python-3-slim-plugin:6.2.4 + +LABEL organization=rapid7 +LABEL sdk=python + +WORKDIR /python/src + +ADD ./plugin.spec.yaml /plugin.spec.yaml +ADD ./requirements.txt /python/src/requirements.txt + +RUN if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + +ADD . /python/src + +RUN pip install . + +# User to run plugin code. The two supported users are: root, nobody +USER nobody + +ENTRYPOINT ["/usr/local/bin/icon_mimecast_v2"] diff --git a/plugins/mimecast_v2/Makefile b/plugins/mimecast_v2/Makefile new file mode 100644 index 0000000000..cdbcdb1721 --- /dev/null +++ b/plugins/mimecast_v2/Makefile @@ -0,0 +1,53 @@ +# Include other Makefiles for improved functionality +INCLUDE_DIR = ../../tools/Makefiles +MAKEFILES := $(wildcard $(INCLUDE_DIR)/*.mk) +# We can't guarantee customers will have the include files +# - prefix to ignore Makefiles when not present +# https://www.gnu.org/software/make/manual/html_node/Include.html +-include $(MAKEFILES) + +ifneq ($(MAKEFILES),) + $(info [$(YELLOW)*$(NORMAL)] Use ``make menu`` for available targets) + $(info [$(YELLOW)*$(NORMAL)] Including available Makefiles: $(MAKEFILES)) + $(info --) +else + $(warning Makefile includes directory not present: $(INCLUDE_DIR)) +endif + +VERSION?=$(shell grep '^version: ' plugin.spec.yaml | sed 's/version: //') +NAME?=$(shell grep '^name: ' plugin.spec.yaml | sed 's/name: //') +VENDOR?=$(shell grep '^vendor: ' plugin.spec.yaml | sed 's/vendor: //') +CWD?=$(shell basename $(PWD)) +_NAME?=$(shell echo $(NAME) | awk '{ print toupper(substr($$0,1,1)) tolower(substr($$0,2)) }') +PKG=$(VENDOR)-$(NAME)-$(VERSION).tar.gz + +# Set default target explicitly. Make's default behavior is the first target in the Makefile. +# We don't want that behavior due to includes which are read first +.DEFAULT_GOAL := default # Make >= v3.80 (make -version) + + +default: image tarball + +tarball: + $(info [$(YELLOW)*$(NORMAL)] Creating plugin tarball) + rm -rf build + rm -rf $(PKG) + tar -cvzf $(PKG) --exclude=$(PKG) --exclude=tests --exclude=run.sh * + +image: + $(info [$(YELLOW)*$(NORMAL)] Building plugin image) + docker build --pull -t $(VENDOR)/$(NAME):$(VERSION) . + docker tag $(VENDOR)/$(NAME):$(VERSION) $(VENDOR)/$(NAME):latest + +regenerate: + $(info [$(YELLOW)*$(NORMAL)] Refreshing schema from plugin.spec.yaml) + insight-plugin refresh + +export: image + $(info [$(YELLOW)*$(NORMAL)] Exporting docker image) + @printf "\n ---> Exporting Docker image to ./$(VENDOR)_$(NAME)_$(VERSION).tar\n" + @docker save $(VENDOR)/$(NAME):$(VERSION) | gzip > $(VENDOR)_$(NAME)_$(VERSION).tar + +# Make will not run a target if a file of the same name exists unless setting phony targets +# https://www.gnu.org/software/make/manual/html_node/Phony-Targets.html +.PHONY: default tarball image regenerate diff --git a/plugins/mimecast_v2/bin/icon_mimecast_v2 b/plugins/mimecast_v2/bin/icon_mimecast_v2 new file mode 100644 index 0000000000..e45eb777bc --- /dev/null +++ b/plugins/mimecast_v2/bin/icon_mimecast_v2 @@ -0,0 +1,46 @@ +#!/usr/bin/env python +# GENERATED BY INSIGHT-PLUGIN - DO NOT EDIT +import os +import json +from sys import argv + +Name = "Mimecast V2" +Vendor = "rapid7" +Version = "1.0.0" +Description = "[Mimecast](https://www.mimecast.com) is a set of cloud services designed to provide next generation protection against advanced email-borne threats such as malicious URLs, malware, impersonation attacks, as well as internally generated threats, with a focus on email security. This plugin utilizes the [Mimecast API](https://www.mimecast.com/developer/documentation)" + + +def main(): + if 'http' in argv: + if os.environ.get("GUNICORN_CONFIG_FILE"): + with open(os.environ.get("GUNICORN_CONFIG_FILE")) as gf: + gunicorn_cfg = json.load(gf) + if gunicorn_cfg.get("worker_class", "sync") == "gevent": + from gevent import monkey + monkey.patch_all() + elif 'gevent' in argv: + from gevent import monkey + monkey.patch_all() + + import insightconnect_plugin_runtime + from icon_mimecast_v2 import connection, actions, triggers, tasks + + class ICONMimecastV2(insightconnect_plugin_runtime.Plugin): + def __init__(self): + super(self.__class__, self).__init__( + name=Name, + vendor=Vendor, + version=Version, + description=Description, + connection=connection.Connection() + ) + self.add_task(tasks.MonitorSiemLogs()) + + + """Run plugin""" + cli = insightconnect_plugin_runtime.CLI(ICONMimecastV2()) + cli.run() + + +if __name__ == "__main__": + main() diff --git a/plugins/mimecast_v2/extension.png b/plugins/mimecast_v2/extension.png new file mode 100644 index 0000000000000000000000000000000000000000..10045d35f61b3dcba03d1e132c82db74f594ae45 GIT binary patch literal 4306 zcmV;@5H0VCP)yt07&xk_8A{n002roL2LH*`6Vl2YHf-Y8&%lZ z;w36w01Z%TZI9X7;&OD4d3%&BF=RA3YybdF4i;0evBA&J*JEadxw^;WSzLdKi=|LheL+NXSzLoc zMQoXytPA{Bc>n+p=t)FDRCt{2UFl+?IusUClmTlEBG!VGMMXu?dS~YSAMN4_oD-1X zHno0#S+adePR^EE?H_^by2`!l@WcU%Veg=Q^FayQQ54PE!Svo0d18ac)%oc#Od$oL+V`$d zRoK7M?VvBuHS50kveLv7X#L(5s*2FdG{jooyFx!4J(?L0(!Y0ws>09C5aE|`?;8DR zwAyl(Sa0{PQB^=Zl>&Yr}&BOZrX zb~8&X+Pim+el#A3ShZPZFC@)xtC4#MJP@(mm8X(DBq}`*g?GgR5vx}FyXs3@kk{pN zy}B|v^L?;1Vv5O80mEV0a|_UDNU;4jcu3v_(% zDn0Q4m|6r7iwJ{zSLz(N#uhHiPS{c^qxG~qa-gLJD&$qnEHjv1r5oMJ>g62FR^8=z zV!KLi?OKCqKHA}R47PORD|$Txf1;OJed#*|rQ_--t`i2`)ojLW&-)3Dh@u#dA9U;N z&%ZR#yCJ)jbpQ2&aH^C$Fe|Pde#K>18mZSlTCaAyq40XBOHVsphmx@Sx+!AdN@qY6B~hi8ZuaY8saOry8Q0&;p+)7qR#S7FPf~3X zM5?K8K4*TfT4tE(-M5pytE!ZGO@018IpwL=nvT~E)61`VF(ipetIeXd>Bwzes1@uB zQwHJS9JyW_sX3+bLZVh@9Wo}~m!6p(v(ziLDCPZ*6GXM7KVK)Nz2bb1M2oQFL7C z*GkmqKAi4GKRXh7s0`5S+LB{W4MVRUu{XSQy{rgS#s)>u_vBfV{gcWd*4)WyXv-g2 zWk<%t;dY8Iu>};2X3_!8lAjzQ(K%DfQP%V(w2Cp@RR1${`~I6f`Hz!M34Wa}i4)>5 zJGN6mEV-Ku_N9pxmu|98&-SWWeq^xwv=(`#Ig%{N71~Es62ijcZE#(d)#qa~tK7L^ z%$>+KOk9_exID20(#UkpZHW~xS}gyGiN&+&qF;oBLlQlUSaAb%)PUBuYqJL&SADE}VNkN{cDN#v3AZFvmFM4t>29ft3 zzKgfzxTl_x#^pN163nD$ZcnUq>6;3iB1UFCAhQ0R`Fz2*jna8q&p91=N98a={!cPf zLn-W~qTiH-WO+O2;^*;D>V#)mdKknaqD+s-QwI`t_grQ#(Gb z#r-=}W*+r2GLjOpRt*yW!qbPQxyk2aHWG;StX5Dd(&3U#5}mj%-T}G^f{@&B{lUiD zTVvQXb&J&mGE9x>1nu#EQqb!=N4wZ(HKN)P{*cY=JVq!_wtQQ>6i`etk z8hyTRp1-$O@tnlZ)zxv<^ASsxJ_G-NV8x^@@m`kW`0;rB-i~C3&QnbX#kQfN$o3EY zRWP0`rrIf|)>bcXA0o1+j-pk28I1jKvd4nY3es|?e*2Z7$w*I)cT@u_dS7TRk9~9x zZG~GS|GA30I3kwVAEf)*b()wrL|-1)d&QVWCYp34F$r=Ji@s#Gb!FOMl-}@@8oG9U zh+ZKd$#!Jm6CJTszicwQDSOc*w5fI-`y?;RzzyHfaLer@X_OUgpVzKz#8W=t&!p%& z$uuCbq(1B_1Ns~a!VqYtr`X__Xi0g9rADxRZiGt*BBl0%K^4-b3vSquSh{n5{aJv-YT$Cv9xtNdw_)Q2>v%5YAQqAEa1CJT zGRo+rA?ha@PH3szL^g78!eI&i@~_}>-X&WoD>yT~U3SqVeHpNWS>0#tfJKBQYD9S&Zh(1UIB>lda~3=Os* zH8Eio&I*6HS(FYSmdpx(hm)SAHv0gJhIG`~auAEMva$jBoxCxI6_%c?sc^cdMyv!h z#-X^myoSFeY(dgXb|d@bJSyCaSj@}{XxJFU3V@VA=-~+uu~^arpa$tPgv})^{m@+o zTy2wjl>uy3jNx@$#K++5nwprf4$X7Q`F6xYf|!1IatmU?lr}`os}|)D+!^bgpkFDd z#Q|2QB!!~}RiNHE)c$tFvY|QdC=}j;SkUK-9OMwF10oiW(gv+}=bgxk0DCD}Sy3b) z>73!%`R$0M!DH;N!;_m33-V`AEOgppew_=f^ER)QBmm~8J5G)G00FfrlK5Wa!(51O zM=WCm7VMi63n~>3!-8}`WHfP#F-=)Dkcd@1fojXO?djm}I0%+Ki06WQ~G?E%DnC3yGMwB^EH$9)<-T z;!DDGwqGM-3baz5hBK@f1(7CJP2=Jwq0Jz7)%t$Kg7U~si3LqBhhag6#&Xy@tssM~ z$tz-gRdy=Q5Ub`0T%M?ej`B1TPhuT$4zUxgrRRaM5?$x=whGP&Fu zl;Iif=#W_PL8NPPMMKLC*2ay-=~}5GADdWbo(iGTmrr>2&xr=a#HzLIZeQj|3!+Ah z1s1JXH}c1NFkwbycx+;60*hEGQ~!NH=ja5E-25od4a!pnsVOC$(fsw#%dB3n zf9_DPzx1(p4x3oDP3kpwIvEGu`ugS1pVvQoyv|1*isD|N3wez(PR%P>KOdV|$X2*{ zrq}-t^07$CQP5T>t~f>k`*MkTFqqnm_zPp%fbZrKD+evtUi4J3Pl_*SGeqOWCpVVI zV-pKUxClfpuRe!|N+NKK0^+p#lJBw;!czt>v9>Vgpc}9gIT|N^rEfVrHnA8nc|5C* z3;nV3a{aLY!_EjTQdYpI+DgPizFT-W(Kx{bD3`*66AQbXa2b-32kEdhD^Xy)RXBnU zTpX*Z0f1YV^Z{Q$&8`28=FxE3^mYxhir3>Q_AY0vab~ z_{;_H;KX8ht*cJ?-odBJ(h?dB=)%dBGJsnCio^neaE!n7z&J@*z`qC{ome<$nIx9) zNz|(N6jjpU9L73IEOlk-^{cE{41*1h0&psobPA77EF1|fsITP%DsT!Uq6Ven3ei?r z>}`J`8S5)c8wqFRgUVu zRI0BJaFCVD>Rp3b4^J$Okw28sQOLa32g7FQ?p1X*Ih|(aKN!a$7JP#2idBfEI6?^< z6wXE=udg6x-JV!#lGNiC#H#gJ`LQIA*2e?K)(}wsQkE%M)TLSQcY-lUDu;BQ>dPR9 zjxLPk#>7$#T-7w}>A=S+!&%7?7>Ot8Xnb+~`{NVKWoGo-S`0;@gYgRP)|fLL&NqPz zER&;foc-7cEz14=X!IJbXRCTQg{X7NR=8L*Dw@IO$`s_T%1rUXkUC=tzB8&XgTS9& z%yM`KV*O4&x&^Un(}EnH4Ce-ExXP?rc89|Yip0C@Ve(I~;7=s7y;&;d+YsZFcSFlJ z^AnjCB{XF#KgTR5H6m?te_H0+VSQ2oKY&;@pELbFa5tIiTn>9l*5qo*xOYgm0Bx#e zk>k0*IFKA6HXY_S-1rH^s*SlW0Eh=9D{=UI(TT@A@9vtQKmD$XE+U)fTO@iAAJBg0$5<#&h{!=L1BNF!PeF!E(SfP5>KJ zDx)7z1f{jXpv#!27@#eX=Bx!CD1xxN$mS+Vy=)+Y&gfhFw^=ffA$t>!yDEA6W){*h z@mj4_<@ONq*9n`Z)5r<0X54XA1t8%B^RI)f@Jib(r8USeoLt%g^*QOK7M9LZ(+#pg z>TF*-P&(UtEnHdUmJhbqmHV}7l($N%2n!0zQ3?pvSK+TWZ`o9mpVQVG1{OIGGanWcQ3a*~(z z9&uOHqFXH?^@O}8^#pJCj%*D-b9@9^W7RwSNMtUlduaZ0J!&KoROnK9j}!_St*QUM zn)j`t9{&PeW-ohv_7*ht+v~ok$EB9IZ=PCNZ$x|UmHBL20ZSJ?(Y@TcaKrc6D5;jL zk1D5@Lx`tweT%N2yL`6nukP-pWkHfc+uyy^4S;39u7B)!)rltH^Oo%g%MF4-zFUbA z?%rAY1(yhEll_lE<3ztIu4WJ7V!Qh`Wza3oFSV<*YuNU55suM6x`X_UL@{h940^0W zK@|enOxNk1{A%~)h0@a^zZfHMQumuu2R@?X5EC8&tWl^9E-?{&VJHOS1yFE^#F;Qs zR!P8zh@1&K=)Pp%2sdbN$Y%-}v{kWbn@|xdWQzzc8}S=rIKV+T1oQDR;b9UvC&rne z^UDFx$T5k4*Qvmv&V)Bf`TK6hyGdmbp6+gLzyChUdb2o8x99UbiuO>>aSf=LuMn@u89NmMEk zXb|PG5||%Dl*rdCdf-8Fp)4c<4v|Xmh)2Fa8VNfSK&Ed>2#@gfedkyre+v|Vj1QX8_To7cz#9iqh=@a`iv=t`1tPL25TEEMW>blLA(c&}(*yzvB%txx4CbPr z-?H~`^WeHO$z&dbN~ciVS!_oSIyiCXav4-Ec(Z8hC6U8?i4ak%W-vKghIr1Q?1uz8ei7 zyc;e^0wziZM(5_&rh8B*>=#}B)>N17=O6Z7xNTzK9wf$#T}4&3L2j3q z{URne1}XK9>Cwoa<`MSqk1Y7{T$yZj)W2TX zNc2z`4hpY9?a$LkrC^cIJNfO=8~IliAiq=jy-h&KKOpM+wOtQD{n0SImkZ^BJK>$NvChfg-_vz@yc zjtR{E&Kb>3ZFep;d>nrt_#dxcTlmqT-U2Mp>CCneT3zeMYcBu!XlG#~xB0Z5nOLog zS@}T<_M4TRDUcGe#o)?FO+#!9=aWXCnX3hI#2_fM3)P){AkS?VT%S+6GaO|OV}!N| zdPowh(TmQ+>i+(VXbeY0xo$X4jsrQfV}~}{AP&AgqSVIv^&|Q>?JD$cIDtU36lz8y zt*isMaH0(`bp~r+wsk%SpsftDom1pU_UE~M=~3K6MeI)VaAkpwNMg%^v;DHNiaAUq zgkP5FXq_2240X&%WFX#d-n}N)1-g6B`2z#%Pal2fH9^T?x}879~IJG40qPlnfMnsYRsm!r!1%U0lufi!A2 zSMq6+X?eaBFvhvOH6tKB&#mL13pNRp1zKPk7+=2>-g~c@awu`xV4QZ!dLg${ z1~4$2A)Am=UG@-)I>&t!2iwO_XmOwi==%KXTIaYM6Z=3`yT$6>z8vQjuz~eXx`}Gg zE>XTK`e-MuKIe;p3PWyc`bjV8NO0oB6_4iAmD^{cEaHOd099^W>#&;MCO9#lqh}89 zZC`!P%4d(rKTj1PR!fgX zjd~87v3}xIP9lZ4Y*pE}`z3j?U*&X8`vZ`bCg`6X=@~gu!=CdS7^>tvRcd{Hpw#3> z4$^PVt6s|jZST}Whf(@Zjv2V=x}9rwRP1)rAKKPPL{y7Vm9~jqe+dY zO)x+p6Fc2zwCCzqx?DzZx5ip3UL69_rDAo{G5*9OG0Plgb-Z+XSo8M_SDwT@#V!zuy62dvAVkEWT0+N%Iy_v{9YEo zy4iB#4chw+w9p3Vhl{>y%yCLHG_da488|)Sn_Tpgm9cU8toN6I*?mIe6s-JE*Nb`o z(Ta~kOD0=49MJW&&lr4;LK0I=oO=GuoHsWusrT_8DGd{=>9VdpEc#X>6%` zb8^N(J`4BYPl_P#gEq7o{Bib3|HJdkxadZU*qF~W2!-Xk>=-Snsl3Mgqer^E&EX4e zm%7pCRc`01T5Z^XA|(kjb7Kr=m*7zLlNR~CZ;G;X6-Hz2>w3>T+1;}?cV+3Fz^DOS zTm%&m8=swkVh!fQ_U_PyB}5N4RopNJoO36LY?t9U;Hr1P&_ z{XX$8G1U(c1UH{*maBId*Qf1J470&d?7dj|p|dKP=zS@_x~g>qAyr9iWZAKrN~@>* zWTm!Q-juKm+=km)k`e~O?4N`CE=Mg2mHIJ2h#KO zw->7W5vT%-8zMlwa=#Fz1Y@SHB^es$Z2e* zbsN>_M&ns9^dl#%%;BKrKh@_Kv*ILSrd+gm>#UE$8zEs80o)K{%d{iA>U32Vs++uQ zR#mJ3Z!V>GTds%=@*W+E)vmL_aLBp81gqVcD&4kuXu$+yd&{#ir^5uDg1uj)E-n}Y zbP(PMn8Ggy3DjCVXVV72FmRib>)fT7*W|X4iFoU&owiaJ499VGLtv^T-Ch(ap;-a( zV})&k+CH<)avV3r<3aL)?<` zJ~4#6@#krLX6}~iut4TuqU!j%GVL|pj<>jGi8ZjkPNPrJ+L1C7Y9oT`o=|iPtU@8t z1Ygm;-wlY~rXO&8>^ahQ&2Hn>S>;E-HY#gMGy+h_EU{tA?~t}6=RY%r{6HvI>h{@( z;Rp4(=yZTTwSz=Jx`>|NafCL2?7(CuxcJTt?sg}&BWF?Y^o^iIm@9(Cz6Rp1Dh>Dz@z!4!~bAb9|G$);v9Hr z?*v$7m45DmdvMi$^lU|bs}Y8?`K6t1dzUo#(Lvpr_7UlEz0kW i@UKxF#?GVkQSr8&)Lkv7)*}DX@p9k9`{C1|lz#)Z5-jlm literal 0 HcmV?d00001 diff --git a/plugins/mimecast_v2/icon_mimecast_v2/__init__.py b/plugins/mimecast_v2/icon_mimecast_v2/__init__.py new file mode 100644 index 0000000000..797e426edf --- /dev/null +++ b/plugins/mimecast_v2/icon_mimecast_v2/__init__.py @@ -0,0 +1 @@ +# GENERATED BY INSIGHT-PLUGIN - DO NOT EDIT diff --git a/plugins/mimecast_v2/icon_mimecast_v2/actions/__init__.py b/plugins/mimecast_v2/icon_mimecast_v2/actions/__init__.py new file mode 100644 index 0000000000..7020c9a4ad --- /dev/null +++ b/plugins/mimecast_v2/icon_mimecast_v2/actions/__init__.py @@ -0,0 +1,2 @@ +# GENERATED BY INSIGHT-PLUGIN - DO NOT EDIT + diff --git a/plugins/mimecast_v2/icon_mimecast_v2/connection/__init__.py b/plugins/mimecast_v2/icon_mimecast_v2/connection/__init__.py new file mode 100644 index 0000000000..c78d3356be --- /dev/null +++ b/plugins/mimecast_v2/icon_mimecast_v2/connection/__init__.py @@ -0,0 +1,2 @@ +# GENERATED BY INSIGHT-PLUGIN - DO NOT EDIT +from .connection import Connection diff --git a/plugins/mimecast_v2/icon_mimecast_v2/connection/connection.py b/plugins/mimecast_v2/icon_mimecast_v2/connection/connection.py new file mode 100644 index 0000000000..6f907e8d7b --- /dev/null +++ b/plugins/mimecast_v2/icon_mimecast_v2/connection/connection.py @@ -0,0 +1,51 @@ +import insightconnect_plugin_runtime +from insightconnect_plugin_runtime.exceptions import PluginException, ConnectionTestException +from .schema import ConnectionSchema, Input +from icon_mimecast_v2.util.api import API + +# Custom imports below +from datetime import datetime, timezone + + +class Connection(insightconnect_plugin_runtime.Connection): + def __init__(self): + super(self.__class__, self).__init__(input=ConnectionSchema()) + + def connect(self, params): + self.logger.info("Connect: Connecting...") + self.client_secret = params.get(Input.CLIENT_SECRET, {}).get("secretKey", "").strip() + self.client_id = params.get(Input.CLIENT_ID, {}).get("secretKey", "").strip() + self.api = API(client_id=self.client_id, client_secret=self.client_secret, logger=self.logger) + self.api.authenticate() + + def test(self): + try: + now_date = datetime.now(tz=timezone.utc).date() + self.api.get_siem_logs(log_type="receipt", query_date=now_date, page_size=1, max_threads=1, next_page=None) + return {"success": True} + except PluginException as error: + raise ConnectionTestException(cause=error.cause, assistance=error.assistance, data=error.data) + + def test_task(self): + try: + now_date = datetime.now(tz=timezone.utc).date() + self.api.get_siem_logs(log_type="receipt", query_date=now_date, page_size=1, max_threads=1, next_page=None) + self.logger.info("The connection test to Mimecast was successful.") + return {"success": True} + except PluginException as error: + return_message = "" + failed_message = "The connection test to Mimecast for has failed." + self.logger.info(failed_message) + return_message += f"{failed_message}\n" + + cause_message = f"This failure was caused by: '{error.cause}'" + self.logger.info(cause_message) + return_message += f"{cause_message}\n" + + self.logger.info(error.assistance) + return_message += f"{error.assistance}\n" + raise ConnectionTestException( + cause="Configured credentials do not have permission for this API endpoint.", + assistance="Please ensure credentials have required permissions.", + data=return_message, + ) diff --git a/plugins/mimecast_v2/icon_mimecast_v2/connection/schema.py b/plugins/mimecast_v2/icon_mimecast_v2/connection/schema.py new file mode 100644 index 0000000000..7f5dfa3142 --- /dev/null +++ b/plugins/mimecast_v2/icon_mimecast_v2/connection/schema.py @@ -0,0 +1,62 @@ +# GENERATED BY INSIGHT-PLUGIN - DO NOT EDIT +import insightconnect_plugin_runtime +import json + + +class Input: + CLIENT_ID = "client_id" + CLIENT_SECRET = "client_secret" + + +class ConnectionSchema(insightconnect_plugin_runtime.Input): + schema = json.loads(r""" + { + "type": "object", + "title": "Variables", + "properties": { + "client_id": { + "$ref": "#/definitions/credential_secret_key", + "title": "Client ID", + "description": "The Mimecast 2.0 Application Client ID", + "placeholder": "Client ID", + "tooltip": "Enter the Client ID obtained from the Mimecast 2.0 API Application", + "order": 1 + }, + "client_secret": { + "$ref": "#/definitions/credential_secret_key", + "title": "Client Secret", + "description": "The Mimecast 2.0 Application Client Secret", + "placeholder": "Client Secret", + "tooltip": "Enter the Client ID obtained from the Mimecast 2.0 API Application", + "order": 2 + } + }, + "required": [ + "client_id", + "client_secret" + ], + "definitions": { + "credential_secret_key": { + "id": "credential_secret_key", + "type": "object", + "title": "Credential: Secret Key", + "description": "A shared secret key", + "required": [ + "secretKey" + ], + "properties": { + "secretKey": { + "type": "string", + "title": "Secret Key", + "description": "The shared secret key", + "format": "password", + "displayType": "password" + } + } + } + } +} + """) + + def __init__(self): + super(self.__class__, self).__init__(self.schema) diff --git a/plugins/mimecast_v2/icon_mimecast_v2/tasks/__init__.py b/plugins/mimecast_v2/icon_mimecast_v2/tasks/__init__.py new file mode 100644 index 0000000000..0ac82f0338 --- /dev/null +++ b/plugins/mimecast_v2/icon_mimecast_v2/tasks/__init__.py @@ -0,0 +1,4 @@ +# GENERATED BY INSIGHT-PLUGIN - DO NOT EDIT + +from .monitor_siem_logs.task import MonitorSiemLogs + diff --git a/plugins/mimecast_v2/icon_mimecast_v2/tasks/monitor_siem_logs/__init__.py b/plugins/mimecast_v2/icon_mimecast_v2/tasks/monitor_siem_logs/__init__.py new file mode 100644 index 0000000000..c0d4387b06 --- /dev/null +++ b/plugins/mimecast_v2/icon_mimecast_v2/tasks/monitor_siem_logs/__init__.py @@ -0,0 +1,2 @@ +# GENERATED BY INSIGHT-PLUGIN - DO NOT EDIT +from .task import MonitorSiemLogs diff --git a/plugins/mimecast_v2/icon_mimecast_v2/tasks/monitor_siem_logs/schema.py b/plugins/mimecast_v2/icon_mimecast_v2/tasks/monitor_siem_logs/schema.py new file mode 100644 index 0000000000..91a6407de0 --- /dev/null +++ b/plugins/mimecast_v2/icon_mimecast_v2/tasks/monitor_siem_logs/schema.py @@ -0,0 +1,55 @@ +# GENERATED BY INSIGHT-PLUGIN - DO NOT EDIT +import insightconnect_plugin_runtime +import json + + +class Component: + DESCRIPTION = "Monitor and retrieve the latest logs" + + +class Input: + pass + + +class State: + pass + + +class Output: + DATA = "data" + + +class MonitorSiemLogsInput(insightconnect_plugin_runtime.Input): + schema = json.loads(r""" + {} + """) + + def __init__(self): + super(self.__class__, self).__init__(self.schema) + + +class MonitorSiemLogsState(insightconnect_plugin_runtime.State): + schema = json.loads(r""" + {} + """) + + def __init__(self): + super(self.__class__, self).__init__(self.schema) + + +class MonitorSiemLogsOutput(insightconnect_plugin_runtime.Output): + schema = json.loads(r""" + { + "type": "array", + "title": "Data", + "description": "List of logs", + "items": {}, + "required": [ + "data" + ], + "definitions": {} +} + """) + + def __init__(self): + super(self.__class__, self).__init__(self.schema) diff --git a/plugins/mimecast_v2/icon_mimecast_v2/tasks/monitor_siem_logs/task.py b/plugins/mimecast_v2/icon_mimecast_v2/tasks/monitor_siem_logs/task.py new file mode 100644 index 0000000000..6b1384a954 --- /dev/null +++ b/plugins/mimecast_v2/icon_mimecast_v2/tasks/monitor_siem_logs/task.py @@ -0,0 +1,229 @@ +import insightconnect_plugin_runtime +from insightconnect_plugin_runtime.exceptions import APIException, PluginException +from insightconnect_plugin_runtime.helper import compare_and_dedupe_hashes, hash_sha1 +from .schema import MonitorSiemLogsInput, MonitorSiemLogsOutput, MonitorSiemLogsState, Input, Output, Component, State +from typing import Dict, List, Tuple +from datetime import datetime, timezone, timedelta +import copy + +# Date format for conversion +DATE_FORMAT = "%Y-%m-%d" +# Default and max values +LOG_TYPES = ["receipt", "url protect", "attachment protect"] +DEFAULT_THREAD_COUNT = 10 +DEFAULT_PAGE_SIZE = 100 +MAX_LOOKBACK_DAYS = 7 +INITIAL_MAX_LOOKBACK_DAYS = 1 +# Run type +INITIAL_RUN = "initial_run" +SUBSEQUENT_RUN = "subsequent_run" +PAGINATION_RUN = "pagination_run" +# Access keys for state and custom config +LOG_HASHES = "log_hashes" +QUERY_CONFIG = "query_config" +QUERY_DATE = "query_date" +CAUGHT_UP = "caught_up" +NEXT_PAGE = "next_page" +# Access keys for custom config +THREAD_COUNT = "thread_count" +PAGE_SIZE = "page_size" + + +class MonitorSiemLogs(insightconnect_plugin_runtime.Task): + def __init__(self): + super(self.__class__, self).__init__( + name="monitor_siem_logs", + description=Component.DESCRIPTION, + input=MonitorSiemLogsInput(), + output=MonitorSiemLogsOutput(), + state=MonitorSiemLogsState(), + ) + + def run(self, params={}, state={}, custom_config={}): # pylint: disable=unused-argument + self.logger.info(f"TASK: Received State: {state.get(QUERY_CONFIG)}") + existing_state = state.copy() + try: + now_date = datetime.now(tz=timezone.utc).date() + run_condition = self.detect_run_condition(state.get(QUERY_CONFIG, {}), now_date) + self.logger.info(f"TASK: Run state is {run_condition}") + state = self.update_state(state) + page_size, thead_count = self.apply_custom_config(state, run_condition, custom_config) + max_run_lookback_date = self.get_max_lookback_date(now_date, run_condition, bool(custom_config)) + query_config = self.prepare_query_params(state.get(QUERY_CONFIG, {}), max_run_lookback_date, now_date) + logs, query_config = self.get_all_logs(run_condition, query_config, page_size, thead_count) + self.logger.info(f"TASK: Total logs collected this run {len(logs)}") + logs, log_hashes = compare_and_dedupe_hashes(state.get(LOG_HASHES, []), logs) + self.logger.info(f"TASK: Total logs after deduplication {len(logs)}") + exit_state, has_more_pages = self.prepare_exit_state(state, query_config, now_date, log_hashes) + return logs, exit_state, has_more_pages, 200, None + except APIException as error: + self.logger.info( + f"Error: An API exception has occurred. Status code: {error.status_code} returned. Cause: {error.cause}. Error data: {error.data}." + ) + return [], existing_state, False, error.status_code, error + except PluginException as error: + self.logger.info(f"Error: A Plugin exception has occurred. Cause: {error.cause} Error data: {error.data}.") + return [], existing_state, False, 500, error + except Exception as error: + self.logger.info(f"Error: Unknown exception has occurred. No results returned. Error Data: {error}") + return [], existing_state, False, 500, PluginException(preset=PluginException.Preset.UNKNOWN, data=error) + + def detect_run_condition(self, query_config: Dict, now_date: datetime) -> str: + """ + Return runtype based on query configuration + :param query_config: + :param now_date: + :return: runtype string + """ + if not query_config: + return INITIAL_RUN + for log_type_config in query_config.values(): + if not log_type_config.get(CAUGHT_UP) or log_type_config.get(QUERY_DATE) not in str(now_date): + return PAGINATION_RUN + return SUBSEQUENT_RUN + + def update_state(self, state: Dict) -> Dict: + """ + Initialise state, validate state, apply custom config + :param state: + :return: State + """ + initial_log_type_config = {CAUGHT_UP: False} + if not state: + self.logger.info("TASK: Initializing first state...") + state = {QUERY_CONFIG: {log_type: copy.deepcopy(initial_log_type_config) for log_type in LOG_TYPES}} + else: + for log_type in LOG_TYPES: + if log_type not in state.get(QUERY_CONFIG, {}).keys(): + self.logger.info(f"TASK: {log_type} missing from state. Initializing...") + state[QUERY_CONFIG][log_type] = copy.deepcopy(initial_log_type_config) + return state + + def get_max_lookback_date(self, now_date: datetime, run_condition: str, custom_config: bool) -> datetime: + """ + Get max lookback date for run condition + :param now_date: + :param run_condition: + :param custom_config: + :return: max_run_lookback_date + """ + max_run_lookback_days = MAX_LOOKBACK_DAYS + if run_condition in [INITIAL_RUN] and not custom_config: + max_run_lookback_days = INITIAL_MAX_LOOKBACK_DAYS + + max_run_lookback_date = now_date - timedelta(days=max_run_lookback_days) + return max_run_lookback_date + + def apply_custom_config(self, state: Dict, run_type: str, custom_config: Dict = {}) -> Tuple[int, int]: + """ + Apply custom configuration for lookback, query date applies to start and end time of query + :param current_query_config: + :param run_type: + :param custom_config: + :return: Page size and thread count + """ + custom_query_config = {} + if custom_config: + self.logger.info("TASK: Custom config detected") + custom_query_config = custom_config.get("query_config", {}) + if run_type == INITIAL_RUN: + current_query_config = state.get(QUERY_CONFIG) + for log_type, log_query_config in custom_query_config.items(): + log_query_date = log_query_config.get("query_date", None) + self.logger.info(f"TASK: Supplied lookback date of {log_query_date} for log type {log_type}") + current_query_config[log_type] = {QUERY_DATE: log_query_date} + page_size = max(1, min(custom_config.get(PAGE_SIZE, DEFAULT_PAGE_SIZE), DEFAULT_PAGE_SIZE)) + thread_count = max(1, custom_config.get(THREAD_COUNT, DEFAULT_THREAD_COUNT)) + return page_size, thread_count + + def prepare_query_params(self, query_config: Dict, max_lookback_date: Dict, now_date: datetime) -> Dict: + """ + Prepare query for request. Validate query dates, move forward when caught up + :param query_config: + :param max_lookback_date: + :param now_date: + :return: + """ + for log_type, log_type_config in query_config.items(): + query_date_str = log_type_config.get(QUERY_DATE) + if query_date_str: + query_date = datetime.strptime(query_date_str, DATE_FORMAT).date() + if not query_date_str: + self.logger.info( + f"TASK: Query date for {log_type} log type is not present. Initializing a {max_lookback_date}" + ) + log_type_config[QUERY_DATE] = max_lookback_date + elif query_date < now_date and log_type_config.get(CAUGHT_UP) is True: + self.logger.info(f"TASK: Log type {log_type} has caught up for {query_date}") + log_type_config[QUERY_DATE] = query_date + timedelta(days=1) + log_type_config[CAUGHT_UP] = False + log_type_config.pop(NEXT_PAGE) + query_config[log_type] = self.validate_config_lookback(log_type_config, max_lookback_date, now_date) + return query_config + + def validate_config_lookback(self, log_type_config: Dict, max_lookback_date: datetime, now_date: datetime) -> Dict: + """ + Ensures provided query date in scope of request time window + :param log_type_config: + :param max_lookback_date: + :param now_date: + :return: log_type_config + """ + query_date = log_type_config.get(QUERY_DATE) + if isinstance(query_date, str): + query_date = datetime.strptime(query_date, DATE_FORMAT).date() + if query_date < max_lookback_date: + return {QUERY_DATE: max_lookback_date} + if query_date > now_date: + log_type_config[QUERY_DATE] = now_date + return log_type_config + + def get_all_logs( + self, run_condition: str, query_config: Dict, page_size: int, thead_count: int + ) -> Tuple[List, Dict]: + """ + Gets all logs of provided log type. First retrieves batch URLs. Then downloads and reads batches, pooling logs. + :param run_condition: + :param query_config: + :param page_size: + :param thead_count: + :return: Logs, updated query configuration (state) + """ + complete_logs = [] + for log_type, log_type_config in query_config.items(): + if (not log_type_config.get(CAUGHT_UP)) or (run_condition != PAGINATION_RUN): + logs, results_next_page, caught_up = self.connection.api.get_siem_logs( + log_type=log_type, + query_date=log_type_config.get(QUERY_DATE), + next_page=log_type_config.get(NEXT_PAGE), + page_size=page_size, + max_threads=thead_count, + ) + complete_logs.extend(logs) + log_type_config.update({NEXT_PAGE: results_next_page, CAUGHT_UP: caught_up}) + else: + self.logger.info(f"TASK: Query for {log_type} is caught up. Skipping as we are currently paginating") + return complete_logs, query_config + + def prepare_exit_state( + self, state: dict, query_config: dict, now_date: datetime, log_hashes: List[str] + ) -> Tuple[Dict, bool]: + """ + Prepare state and pagination for task completion. Format date time. + :param state: + :param query_config: + :param now_date: + :param log_hashes: + :return: state, has_more_pages + """ + has_more_pages = False + for log_type_config in query_config.values(): + query_date = log_type_config.get(QUERY_DATE) + if isinstance(query_date, str): + query_date = datetime.strptime(query_date, DATE_FORMAT).date() + if (not log_type_config.get(CAUGHT_UP)) or query_date < now_date: + has_more_pages = True + log_type_config[QUERY_DATE] = query_date.strftime(DATE_FORMAT) + state[QUERY_CONFIG] = query_config + state[LOG_HASHES] = log_hashes + return state, has_more_pages diff --git a/plugins/mimecast_v2/icon_mimecast_v2/triggers/__init__.py b/plugins/mimecast_v2/icon_mimecast_v2/triggers/__init__.py new file mode 100644 index 0000000000..7020c9a4ad --- /dev/null +++ b/plugins/mimecast_v2/icon_mimecast_v2/triggers/__init__.py @@ -0,0 +1,2 @@ +# GENERATED BY INSIGHT-PLUGIN - DO NOT EDIT + diff --git a/plugins/mimecast_v2/icon_mimecast_v2/util/__init__.py b/plugins/mimecast_v2/icon_mimecast_v2/util/__init__.py new file mode 100644 index 0000000000..797e426edf --- /dev/null +++ b/plugins/mimecast_v2/icon_mimecast_v2/util/__init__.py @@ -0,0 +1 @@ +# GENERATED BY INSIGHT-PLUGIN - DO NOT EDIT diff --git a/plugins/mimecast_v2/icon_mimecast_v2/util/api.py b/plugins/mimecast_v2/icon_mimecast_v2/util/api.py new file mode 100644 index 0000000000..893ee662be --- /dev/null +++ b/plugins/mimecast_v2/icon_mimecast_v2/util/api.py @@ -0,0 +1,135 @@ +import requests +from insightconnect_plugin_runtime.exceptions import ( + APIException, + PluginException, + HTTPStatusCodes, + ResponseExceptionData, +) +from insightconnect_plugin_runtime.helper import extract_json, make_request, rate_limiting +from logging import Logger +from requests import Response, Request +from io import BytesIO +from icon_mimecast_v2.util.constants import Endpoints +from typing import Dict, List, Tuple +from multiprocessing.dummy import Pool +import gzip +import json + +GET = "GET" +POST = "POST" + + +class API: + def __init__(self, client_id: str, client_secret: str, logger: Logger) -> None: + self.client_id = client_id + self.client_secret = client_secret + self.logger = logger + self.access_token = None + + def authenticate(self) -> None: + self.logger.info("API: Authenticating...") + data = {"client_id": self.client_id, "client_secret": self.client_secret, "grant_type": "client_credentials"} + response = self.make_api_request( + url=Endpoints.AUTH, + method=POST, + headers={"Content-Type": "application/x-www-form-urlencoded"}, + data=data, + auth=False, + ) + self.access_token = response.get("access_token") + self.logger.info("API: Authenticated") + + def get_siem_logs( + self, log_type: str, query_date: str, next_page: str, page_size: int = 100, max_threads: int = 10 + ) -> Tuple[List[str], str, bool]: + batch_download_urls, result_next_page, caught_up = self.get_siem_batches( + log_type, query_date, next_page, page_size + ) + logs = [] + self.logger.info(f"API: Getting SIEM logs from batches for log type {log_type}...") + self.logger.info(f"API: Applying page size limit of {page_size}") + with Pool(max_threads) as pool: + batch_logs = pool.imap(self.get_siem_logs_from_batch, batch_download_urls) + for result in batch_logs: + if isinstance(result, (List, Dict)): + logs.extend(result) + self.logger.info(f"API: Discovered {len(logs)} logs for log type {log_type}") + return logs, result_next_page, caught_up + + def get_siem_batches( + self, log_type: str, query_date: str, next_page: str, page_size: int = 100 + ) -> Tuple[List[str], str, bool]: + self.logger.info( + f"API: Getting SIEM batches for log type {log_type} for {query_date} with page token {next_page}..." + ) + params = { + "type": log_type, + "dateRangeStartsAt": query_date, + "dateRangeEndsAt": query_date, + "pageSize": page_size, + } + if next_page: + params.update({"nextPage": next_page}) + batch_response = self.make_api_request(url=Endpoints.GET_SIEM_LOGS_BATCH, method=GET, params=params) + batch_list = batch_response.get("value", []) + caught_up = batch_response.get("isCaughtUp") + self.logger.info( + f"API: Discovered {len(batch_list)} batches for log type {log_type}. Response reporting {caught_up} that logs have caught up to query window" + ) + urls = [batch.get("url") for batch in batch_list] + return urls, batch_response.get("@nextPage"), caught_up + + def get_siem_logs_from_batch(self, url: str): + response = requests.request(method=GET, url=url, stream=False) + with gzip.GzipFile(fileobj=BytesIO(response.content), mode="rb") as file_: + logs = [] + # Iterate over lines in the decompressed file, decode and load the JSON + for line in file_: + decoded_line = line.decode("utf-8").strip() + logs.append(json.loads(decoded_line)) + return logs + + @rate_limiting(5) + def make_api_request( + self, + url: str, + method: str, + headers: Dict = {}, + json: Dict = None, + data: Dict = None, + params: Dict = None, + return_json: bool = True, + auth=True, + ) -> Response: + if auth: + headers["Authorization"] = f"Bearer {self.access_token}" + request = Request(url=url, method=method, headers=headers, params=params, data=data, json=json) + try: + response = make_request( + _request=request, + allowed_status_codes=[HTTPStatusCodes.UNAUTHORIZED], + exception_data_location=ResponseExceptionData.RESPONSE, + ) + except PluginException as exception: + if isinstance(exception.data, Response): + raise APIException( + cause=exception.cause, + assistance=exception.assistance, + data=exception.data, + status_code=exception.data.status_code, + ) + raise exception + if response.status_code == HTTPStatusCodes.UNAUTHORIZED: + json_data = extract_json(response) + if json_data.get("fail", [{}])[0].get("code") == "token_expired": + self.authenticate() + self.logger.info("API: Token has expired, attempting re-authentication...") + return self.make_api_request(url, method, headers, json, data, params, return_json, auth) + if response.status_code == HTTPStatusCodes.UNAUTHORIZED: + raise APIException( + preset=PluginException.Preset.API_KEY, data=response.text, status_code=response.status_code + ) + if return_json: + json_data = extract_json(response) + return json_data + return response diff --git a/plugins/mimecast_v2/icon_mimecast_v2/util/constants.py b/plugins/mimecast_v2/icon_mimecast_v2/util/constants.py new file mode 100644 index 0000000000..b76b693c1d --- /dev/null +++ b/plugins/mimecast_v2/icon_mimecast_v2/util/constants.py @@ -0,0 +1,6 @@ +BASE_URL = "https://api.services.mimecast.com/" + + +class Endpoints: + AUTH = f"{BASE_URL}oauth/token" + GET_SIEM_LOGS_BATCH = f"{BASE_URL}siem/v1/batch/events/cg" diff --git a/plugins/mimecast_v2/plugin.spec.yaml b/plugins/mimecast_v2/plugin.spec.yaml new file mode 100644 index 0000000000..65fc886f95 --- /dev/null +++ b/plugins/mimecast_v2/plugin.spec.yaml @@ -0,0 +1,69 @@ +plugin_spec_version: v2 +extension: plugin +products: [insightconnect] +name: mimecast_v2 +title: Mimecast V2 +description: "[Mimecast](https://www.mimecast.com) is a set of cloud services designed to provide next generation protection against advanced email-borne threats such as malicious URLs, malware, impersonation attacks, as well as internally generated threats, with a focus on email security. This plugin utilizes the [Mimecast API](https://www.mimecast.com/developer/documentation)" +troubleshooting: "API 2.0 endpoints uses Client Id and Client Secret to authenticate the users. Please refer to Mimecast Customer Community to manage API 2.0 Application credentials. In order to create the Client Id and Client Secret, the designated administrator must be assigned a Role with the following criteria." +key_features: +- Email security +- Malicious URL and attachment detection +requirements: +- Mimecast 2.0 Application Client ID +- Mimecast 2.0 Application Client Secret +links: + - "[Mimecast](http://mimecast.com)" +references: + - "[Mimecast API](https://www.mimecast.com/developer/documentation)" +version: 1.0.0 +connection_version: 1 +supported_versions: ["Mimecast 2.0 API 2025-01-23"] +vendor: rapid7 +support: rapid7 +cloud_ready: true +sdk: + type: slim + version: 6.2.4 + user: nobody +status: [] +resources: + source_url: https://github.com/rapid7/insightconnect-plugins/tree/master/plugins/mimecast_v2 + license_url: https://github.com/rapid7/insightconnect-plugins/blob/master/LICENSE + vendor_url: http://mimecast.com +tags: +- mimecast +- email +hub_tags: + use_cases: [threat_detection_and_response] + keywords: [mimecast, email, cloud_enabled] + features: [] +version_history: +- "1.0.0 - Initial plugin" +connection: + client_id: + title: Client ID + description: The Mimecast 2.0 Application Client ID + type: credential_secret_key + example: ZA7vkbu7NqcfBcGrXyWW8Rzk2sv2un2DCY7GGCX4BFWgJBZM + required: true + placeholder: Client ID + tooltip: Enter the Client ID obtained from the Mimecast 2.0 API Application + client_secret: + title: Client Secret + description: The Mimecast 2.0 Application Client Secret + type: credential_secret_key + example: ohknqKJpCd99XTkHjeVuc2TgYaKWrWn4tEEHCLkXFZhFgDRdcpNGVx3EipX2CvmE + required: true + placeholder: Client Secret + tooltip: Enter the Client ID obtained from the Mimecast 2.0 API Application +tasks: + monitor_siem_logs: + title: Monitor SIEM Logs + description: Monitor and retrieve the latest logs + output: + data: + title: Data + description: List of logs + type: "[]object" + required: true + example: [{ "processingId": "processingId", "aggregateId": "aggregateId", "spamProcessingDetail": "Spam Processing Detail", "numberAttachments": "1", "subject": "siem_recipient - email subject line", "tlsVersion": "TLSv1.2", "senderEnvelope": "user@example.com", "messageId": "messageId", "senderHeader": "user@example.com", "rejectionType": "rejectionType", "eventType": "receipt", "accountId": "C0A0", "recipients": "user@example.com", "tlsCipher": "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", "action": "Allow", "subType": "Allow", "spamInfo": null, "senderIp": "123.123.123.123", "timestamp": 1689685338597, "direction": "Inbound", "spamScore": "0", "spamDetectionLevel": "0" }] \ No newline at end of file diff --git a/plugins/mimecast_v2/requirements.txt b/plugins/mimecast_v2/requirements.txt new file mode 100644 index 0000000000..de275296d3 --- /dev/null +++ b/plugins/mimecast_v2/requirements.txt @@ -0,0 +1,5 @@ +# List third-party dependencies here, separated by newlines. +# All dependencies must be version-pinned, eg. requests==1.2.0 +# See: https://pip.pypa.io/en/stable/user_guide/#requirements-files +parameterized==0.8.1 +freezegun==1.5.1 diff --git a/plugins/mimecast_v2/setup.py b/plugins/mimecast_v2/setup.py new file mode 100644 index 0000000000..6a12dc8aab --- /dev/null +++ b/plugins/mimecast_v2/setup.py @@ -0,0 +1,14 @@ +# GENERATED BY INSIGHT-PLUGIN - DO NOT EDIT +from setuptools import setup, find_packages + + +setup(name="mimecast_v2-rapid7-plugin", + version="1.0.0", + description="[Mimecast](https://www.mimecast.com) is a set of cloud services designed to provide next generation protection against advanced email-borne threats such as malicious URLs, malware, impersonation attacks, as well as internally generated threats, with a focus on email security. This plugin utilizes the [Mimecast API](https://www.mimecast.com/developer/documentation)", + author="rapid7", + author_email="", + url="", + packages=find_packages(), + install_requires=['insightconnect-plugin-runtime'], # Add third-party dependencies to requirements.txt, not here! + scripts=['bin/icon_mimecast_v2'] + ) diff --git a/plugins/mimecast_v2/unit_test/__init__.py b/plugins/mimecast_v2/unit_test/__init__.py new file mode 100644 index 0000000000..d9ae09fc16 --- /dev/null +++ b/plugins/mimecast_v2/unit_test/__init__.py @@ -0,0 +1,4 @@ +# GENERATED BY INSIGHT-PLUGIN - DO NOT EDIT +import sys + +sys.path.append("../") \ No newline at end of file diff --git a/plugins/mimecast_v2/unit_test/expected/monitor_siem_logs.json.exp b/plugins/mimecast_v2/unit_test/expected/monitor_siem_logs.json.exp new file mode 100644 index 0000000000..5c70e65a8b --- /dev/null +++ b/plugins/mimecast_v2/unit_test/expected/monitor_siem_logs.json.exp @@ -0,0 +1,47 @@ +[ + { + "_offset": 273017, + "_partition": 125, + "accountId": "PDLW18H03", + "aggregateId": "jdWc03xmmwdc903dnsnALm", + "fileExtension": "pdf", + "fileName": "attach-9fac38ge-8387-57f0-dfd9-d709gb3ce83d.pdf", + "md5": "f34f2eya7e54hhr80d3937hd084898fe", + "processingId": "JDgeKPUdn2pe8Suxow5oplvfpz37i8M1lWNyGD-OUPE_1738206319", + "sha1": "534dbfd55d2f56fd213b3735fa74d2e80ege4127", + "sha256": "d473cgeerf4e5g2s133584188cf263131a37c54e33648c203d31309f73aa3b00", + "subtype": null, + "timestamp": 946684800, + "type": "attachment protect" + }, + { + "_offset": 273017, + "_partition": 125, + "accountId": "PDLW18H03", + "aggregateId": "jdWc03xmmwdc903dnsnALm", + "fileExtension": "pdf", + "fileName": "attach-9fac38ge-8387-57f0-dfd9-d709gb3ce83d.pdf", + "md5": "f34f2eya7e54hhr80d3937hd084898fe", + "processingId": "JDgeKPUdn2pe8Suxow5oplvfpz37i8M1lWNyGD-OUPE_1738206319", + "sha1": "534dbfd55d2f56fd213b3735fa74d2e80ege4127", + "sha256": "d473cgeerf4e5g2s133584188cf263131a37c54e33648c203d31309f73aa3b00", + "subtype": null, + "timestamp": 946684800, + "type": "attachment protect" + }, + { + "_offset": 273017, + "_partition": 125, + "accountId": "PDLW18H03", + "aggregateId": "jdWc03xmmwdc903dnsnALm", + "fileExtension": "pdf", + "fileName": "attach-9fac38ge-8387-57f0-dfd9-d709gb3ce83d.pdf", + "md5": "f34f2eya7e54hhr80d3937hd084898fe", + "processingId": "JDgeKPUdn2pe8Suxow5oplvfpz37i8M1lWNyGD-OUPE_1738206319", + "sha1": "534dbfd55d2f56fd213b3735fa74d2e80ege4127", + "sha256": "d473cgeerf4e5g2s133584188cf263131a37c54e33648c203d31309f73aa3b00", + "subtype": null, + "timestamp": 946684800, + "type": "attachment protect" + } +] \ No newline at end of file diff --git a/plugins/mimecast_v2/unit_test/responses/authenticate.json.resp b/plugins/mimecast_v2/unit_test/responses/authenticate.json.resp new file mode 100644 index 0000000000..b4427808e1 --- /dev/null +++ b/plugins/mimecast_v2/unit_test/responses/authenticate.json.resp @@ -0,0 +1,6 @@ +{ + "access_token": "RPjLow4SKlsPLycjML3dqvIxzi7B", + "token_type": "Bearer", + "expires_in": 1799, + "scope": "" +} \ No newline at end of file diff --git a/plugins/mimecast_v2/unit_test/responses/monitor_siem_logs.json.resp b/plugins/mimecast_v2/unit_test/responses/monitor_siem_logs.json.resp new file mode 100644 index 0000000000..12f2ac2387 --- /dev/null +++ b/plugins/mimecast_v2/unit_test/responses/monitor_siem_logs.json.resp @@ -0,0 +1 @@ +{ "_offset": 273017, "_partition": 125, "accountId": "PDLW18H03", "aggregateId": "jdWc03xmmwdc903dnsnALm", "fileExtension": "pdf", "fileName": "attach-9fac38ge-8387-57f0-dfd9-d709gb3ce83d.pdf", "md5": "f34f2eya7e54hhr80d3937hd084898fe", "processingId": "JDgeKPUdn2pe8Suxow5oplvfpz37i8M1lWNyGD-OUPE_1738206319", "sha1": "534dbfd55d2f56fd213b3735fa74d2e80ege4127", "sha256": "d473cgeerf4e5g2s133584188cf263131a37c54e33648c203d31309f73aa3b00", "subtype": null, "timestamp": 946684800, "type": "attachment protect" } \ No newline at end of file diff --git a/plugins/mimecast_v2/unit_test/responses/monitor_siem_logs_batch.json.resp b/plugins/mimecast_v2/unit_test/responses/monitor_siem_logs_batch.json.resp new file mode 100644 index 0000000000..66ce90286d --- /dev/null +++ b/plugins/mimecast_v2/unit_test/responses/monitor_siem_logs_batch.json.resp @@ -0,0 +1,11 @@ +{ + "value": [ + { + "url": "https://example.com", + "expiry": "2000-01-10T00:10:00.000Z", + "size": 354 + } + ], + "@nextPage": "NDU1NA==", + "isCaughtUp": true +} \ No newline at end of file diff --git a/plugins/mimecast_v2/unit_test/responses/monitor_siem_logs_error.json.resp b/plugins/mimecast_v2/unit_test/responses/monitor_siem_logs_error.json.resp new file mode 100644 index 0000000000..e29af9c064 --- /dev/null +++ b/plugins/mimecast_v2/unit_test/responses/monitor_siem_logs_error.json.resp @@ -0,0 +1,7 @@ +{ + "fail": [ + { + "code": "unauthorized" + } + ] +} \ No newline at end of file diff --git a/plugins/mimecast_v2/unit_test/responses/monitor_siem_logs_json_error.json.resp b/plugins/mimecast_v2/unit_test/responses/monitor_siem_logs_json_error.json.resp new file mode 100644 index 0000000000..f57ca5f651 --- /dev/null +++ b/plugins/mimecast_v2/unit_test/responses/monitor_siem_logs_json_error.json.resp @@ -0,0 +1 @@ +This is not JSON \ No newline at end of file diff --git a/plugins/mimecast_v2/unit_test/test_monitor_siem_logs.py b/plugins/mimecast_v2/unit_test/test_monitor_siem_logs.py new file mode 100644 index 0000000000..c5ab0c5120 --- /dev/null +++ b/plugins/mimecast_v2/unit_test/test_monitor_siem_logs.py @@ -0,0 +1,225 @@ +import sys +import os + +sys.path.append(os.path.abspath("../")) + +from icon_mimecast_v2.tasks.monitor_siem_logs import MonitorSiemLogs +from icon_mimecast_v2.tasks.monitor_siem_logs.schema import MonitorSiemLogsOutput + +from unittest import TestCase +from unittest.mock import patch +from util import Util +from parameterized import parameterized +from jsonschema import validate +from freezegun import freeze_time + + +STUB_STATE_EXPECTED = { + "log_hashes": [ + "d98dafb4f13b3bb70539a6c251a8a9b42ea80de1", + "d98dafb4f13b3bb70539a6c251a8a9b42ea80de1", + "d98dafb4f13b3bb70539a6c251a8a9b42ea80de1", + ], + "query_config": { + "attachment protect": {"caught_up": True, "next_page": "NDU1NA==", "query_date": "2000-01-06"}, + "receipt": {"caught_up": True, "next_page": "NDU1NA==", "query_date": "2000-01-06"}, + "url protect": {"caught_up": True, "next_page": "NDU1NA==", "query_date": "2000-01-06"}, + }, +} + +STUB_STATE_PAGINATING = { + "log_hashes": [], + "query_config": { + "attachment protect": {"caught_up": False, "next_page": "NDU1NA==", "query_date": "2000-01-06"}, + "receipt": {"caught_up": False, "next_page": "NDU1NA==", "query_date": "2000-01-06"}, + "url protect": {"caught_up": False, "next_page": "NDU1NA==", "query_date": "2000-01-06"}, + }, +} + +STUB_STATE_PAGINATING_LAST_PAGE = { + "log_hashes": [], + "query_config": { + "attachment protect": {"caught_up": True, "next_page": "NDU1NA==", "query_date": "2000-01-06"}, + "receipt": {"caught_up": True, "next_page": "NDU1NA==", "query_date": "2000-01-06"}, + "url protect": {"caught_up": True, "next_page": "NDU1NA==", "query_date": "2000-01-06"}, + }, +} + +STUB_STATE_SECOND_RUN_EXPECTED = { + "log_hashes": [ + "d98dafb4f13b3bb70539a6c251a8a9b42ea80de1", + "d98dafb4f13b3bb70539a6c251a8a9b42ea80de1", + "d98dafb4f13b3bb70539a6c251a8a9b42ea80de1", + ], + "query_config": { + "attachment protect": {"caught_up": True, "next_page": "NDU1NA==", "query_date": "2000-01-07"}, + "receipt": {"caught_up": True, "next_page": "NDU1NA==", "query_date": "2000-01-07"}, + "url protect": {"caught_up": True, "next_page": "NDU1NA==", "query_date": "2000-01-07"}, + }, +} + +STUB_STATE_EXPECTED_CUSTOM_CONFIG = { + "log_hashes": [ + "d98dafb4f13b3bb70539a6c251a8a9b42ea80de1", + "d98dafb4f13b3bb70539a6c251a8a9b42ea80de1", + "d98dafb4f13b3bb70539a6c251a8a9b42ea80de1", + ], + "query_config": { + "attachment protect": {"caught_up": True, "next_page": "NDU1NA==", "query_date": "1999-12-31"}, + "receipt": {"caught_up": True, "next_page": "NDU1NA==", "query_date": "1999-12-31"}, + "url protect": {"caught_up": True, "next_page": "NDU1NA==", "query_date": "1999-12-31"}, + }, +} + +STUB_CUSTOM_CONFIG = { + "query_config": { + "attachment protect": {"caught_up": True, "next_page": "NDU1NA==", "query_date": "1999-12-31"}, + "receipt": {"caught_up": True, "next_page": "NDU1NA==", "query_date": "1999-12-31"}, + "url protect": {"caught_up": True, "next_page": "NDU1NA==", "query_date": "1999-12-31"}, + }, + "page_size": 1, + "thread_count": 1, +} + +STUB_CUSTOM_CONFIG_EXCEED_DATE = { + "query_config": { + "attachment protect": {"caught_up": True, "next_page": "NDU1NA==", "query_date": "1999-12-30"}, + "receipt": {"caught_up": True, "next_page": "NDU1NA==", "query_date": "1999-12-30"}, + "url protect": {"caught_up": True, "next_page": "NDU1NA==", "query_date": "1999-12-30"}, + }, + "page_size": 1, + "thread_count": 1, +} + + +@freeze_time("2000-01-07T00:00:00.000000Z") +class TestMonitorLogs(TestCase): + @classmethod + @patch("requests.Session.send", side_effect=Util.mocked_request) + def setUpClass(cls, mocked_request) -> None: + cls.task = Util.default_connector(MonitorSiemLogs()) + + @parameterized.expand( + [ + [ + "starting", + {}, + {}, + Util.read_file_to_dict("expected/monitor_siem_logs.json.exp"), + STUB_STATE_EXPECTED, + True, + 200, + None, + ], + [ + "paginating", + STUB_STATE_PAGINATING, + {}, + Util.read_file_to_dict("expected/monitor_siem_logs.json.exp"), + STUB_STATE_EXPECTED, + True, + 200, + None, + ], + [ + "paginating_last_page", + STUB_STATE_PAGINATING_LAST_PAGE, + {}, + Util.read_file_to_dict("expected/monitor_siem_logs.json.exp"), + STUB_STATE_SECOND_RUN_EXPECTED, + False, + 200, + None, + ], + [ + "custom_config", + {}, + STUB_CUSTOM_CONFIG, + Util.read_file_to_dict("expected/monitor_siem_logs.json.exp"), + STUB_STATE_EXPECTED_CUSTOM_CONFIG, + True, + 200, + None, + ], + [ + "custom_config_past_cutoff", + {}, + STUB_CUSTOM_CONFIG_EXCEED_DATE, + Util.read_file_to_dict("expected/monitor_siem_logs.json.exp"), + STUB_STATE_EXPECTED_CUSTOM_CONFIG, + True, + 200, + None, + ], + ] + ) + @patch("requests.Session.send", side_effect=Util.mocked_request) + def test_monitor_logs( + self, + test_name, + state, + custom_config, + expected_output, + expected_state, + expected_has_more_pages, + expected_status_code, + expected_error, + mock_request, + ): + output, state, has_more_pages, status_code, error = self.task.run( + params={}, state=state, custom_config=custom_config + ) + self.assertEqual(expected_output, output) + self.assertEqual(expected_state, state) + self.assertEqual(expected_has_more_pages, has_more_pages) + self.assertEqual(expected_status_code, status_code) + self.assertEqual(expected_error, error) + validate(output, MonitorSiemLogsOutput.schema) + + @parameterized.expand( + [ + [ + "401", + {"query_config": {"receipt": {"caught_up": True, "next_page": "NDU1NA==", "query_date": "2000-01-01"}}}, + "Invalid API key provided.", + "Verify your API key configured in your connection is correct.", + 401, + ], + [ + "500", + {"query_config": {"receipt": {"caught_up": True, "next_page": "NDU1NA==", "query_date": "2000-01-02"}}}, + "Something unexpected occurred.", + "Check the logs and if the issue persists please contact support.", + 500, + ], + [ + "json_decode", + {"query_config": {"receipt": {"caught_up": True, "next_page": "NDU1NA==", "query_date": "2000-01-03"}}}, + "Received an unexpected response from the server.", + "(non-JSON or no response was received).", + 500, + ], + [ + "unknown", + {"query_config": {"receipt": {"caught_up": True, "next_page": "NDU1NA==", "query_date": "2000-01-04"}}}, + "Something unexpected occurred.", + "Check the logs and if the issue persists please contact support.", + 500, + ], + ] + ) + @patch("requests.Session.send", side_effect=Util.mocked_request) + def test_monitor_logs_errors( + self, + test_name, + state, + expected_cause, + expected_assistance, + expected_status_code, + mock_request, + ): + output, state, has_more_pages, status_code, error = self.task.run(params={}, state=state) + self.assertEqual(expected_status_code, status_code) + self.assertEqual(expected_cause, error.cause) + self.assertEqual(expected_assistance, error.assistance) + validate(output, MonitorSiemLogsOutput.schema) diff --git a/plugins/mimecast_v2/unit_test/util.py b/plugins/mimecast_v2/unit_test/util.py new file mode 100644 index 0000000000..e21604458d --- /dev/null +++ b/plugins/mimecast_v2/unit_test/util.py @@ -0,0 +1,110 @@ +import sys +import os +import json +import logging + +sys.path.append(os.path.abspath("../")) + +from requests.exceptions import HTTPError +from icon_mimecast_v2.connection.connection import Connection +from icon_mimecast_v2.connection.schema import Input +from icon_mimecast_v2.util.constants import BASE_URL +import gzip +from io import BytesIO + + +class Util: + @staticmethod + def default_connector(action, connect_params: object = None): + default_connection = Connection() + default_connection.logger = logging.getLogger("connection logger") + if connect_params: + params = connect_params + else: + params = { + Input.CLIENT_ID: {"secretKey": "test"}, + Input.CLIENT_SECRET: {"secretKey": "test"}, + } + default_connection.connect(params) + action.connection = default_connection + action.logger = logging.getLogger("action logger") + return action + + @staticmethod + def read_file_to_string(filename: str) -> str: + with open( + os.path.join(os.path.dirname(os.path.realpath(__file__)), filename), "r", encoding="utf-8" + ) as file_reader: + return file_reader.read() + + @staticmethod + def read_file_to_bytes(filename: str) -> bytes: + with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), filename), "rb") as file_reader: + return file_reader.read() + + @staticmethod + def read_file_to_dict(filename: str) -> dict: + return json.loads(Util.read_file_to_string(filename)) + + @staticmethod + def mocked_request(*args, **kwargs): + class MockResponse: + def __init__(self, status_code: int, filename: str = None, url: str = None, gzip=False): + self.filename = filename + self.status_code = status_code + self.text = "This is some error text" + self.url = url + if filename: + self.text = Util.read_file_to_string(f"responses/{filename}.json.resp") + if gzip: + self.content = self._gzip_compress(self.text) + + def _gzip_compress(self, data): + """Compress content using gzip.""" + buf = BytesIO() # Create a buffer to hold the gzipped content + with gzip.GzipFile(fileobj=buf, mode="wb") as f: + f.write(data.encode("utf-8")) # Write the string data to gzip (must be bytes) + return buf.getvalue() + + def json(self): + return json.loads(self.text) + + def raise_for_status(self): + if self.status_code == 200: + return + raise HTTPError("Bad response", response=self) + + if args[0].url == f"{BASE_URL}oauth/token": + return MockResponse(200, "authenticate") + if args[0].url in [ + f"{BASE_URL}siem/v1/batch/events/cg?type=receipt&dateRangeStartsAt=2000-01-06&dateRangeEndsAt=2000-01-06&pageSize=100", + f"{BASE_URL}siem/v1/batch/events/cg?type=url+protect&dateRangeStartsAt=2000-01-06&dateRangeEndsAt=2000-01-06&pageSize=100", + f"{BASE_URL}siem/v1/batch/events/cg?type=attachment+protect&dateRangeStartsAt=2000-01-06&dateRangeEndsAt=2000-01-06&pageSize=100", + f"{BASE_URL}siem/v1/batch/events/cg?type=receipt&dateRangeStartsAt=2000-01-06&dateRangeEndsAt=2000-01-06&pageSize=100&nextPage=NDU1NA%3D%3D", + f"{BASE_URL}siem/v1/batch/events/cg?type=url+protect&dateRangeStartsAt=2000-01-06&dateRangeEndsAt=2000-01-06&pageSize=100&nextPage=NDU1NA%3D%3D", + f"{BASE_URL}siem/v1/batch/events/cg?type=attachment+protect&dateRangeStartsAt=2000-01-06&dateRangeEndsAt=2000-01-06&pageSize=100&nextPage=NDU1NA%3D%3D", + f"{BASE_URL}siem/v1/batch/events/cg?type=attachment+protect&dateRangeStartsAt=2000-01-07&dateRangeEndsAt=2000-01-07&pageSize=100", + f"{BASE_URL}siem/v1/batch/events/cg?type=receipt&dateRangeStartsAt=2000-01-07&dateRangeEndsAt=2000-01-07&pageSize=100", + f"{BASE_URL}siem/v1/batch/events/cg?type=url+protect&dateRangeStartsAt=2000-01-07&dateRangeEndsAt=2000-01-07&pageSize=100", + f"{BASE_URL}siem/v1/batch/events/cg?type=attachment+protect&dateRangeStartsAt=1999-12-31&dateRangeEndsAt=1999-12-31&pageSize=1", + f"{BASE_URL}siem/v1/batch/events/cg?type=receipt&dateRangeStartsAt=1999-12-31&dateRangeEndsAt=1999-12-31&pageSize=1", + f"{BASE_URL}siem/v1/batch/events/cg?type=url+protect&dateRangeStartsAt=1999-12-31&dateRangeEndsAt=1999-12-31&pageSize=1", + ]: + return MockResponse(200, "monitor_siem_logs_batch") + if ( + args[0].url + == f"{BASE_URL}siem/v1/batch/events/cg?type=receipt&dateRangeStartsAt=2000-01-02&dateRangeEndsAt=2000-01-02&pageSize=100" + ): + return MockResponse(401, "monitor_siem_logs_error") + if ( + args[0].url + == f"{BASE_URL}siem/v1/batch/events/cg?type=receipt&dateRangeStartsAt=2000-01-04&dateRangeEndsAt=2000-01-04&pageSize=100" + ): + return MockResponse(401, "monitor_siem_logs_json_error") + if ( + args[0].url + == f"{BASE_URL}siem/v1/batch/events/cg?type=receipt&dateRangeStartsAt=2000-01-05&dateRangeEndsAt=2000-01-05&pageSize=100" + ): + raise AttributeError + if args[0].url == "https://example.com/": + return MockResponse(200, "monitor_siem_logs", gzip=True)