diff --git a/application/apps/indexer/Cargo.lock b/application/apps/indexer/Cargo.lock
index a838680f1c..e350e876fb 100644
--- a/application/apps/indexer/Cargo.lock
+++ b/application/apps/indexer/Cargo.lock
@@ -2,6 +2,16 @@
 # It is not intended for manual editing.
 version = 4
 
+[[package]]
+name = "Inflector"
+version = "0.11.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3"
+dependencies = [
+ "lazy_static",
+ "regex",
+]
+
 [[package]]
 name = "addr2line"
 version = "0.24.2"
@@ -26,6 +36,15 @@ dependencies = [
  "memchr",
 ]
 
+[[package]]
+name = "aligned"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "377e4c0ba83e4431b10df45c1d4666f178ea9c552cac93e60c3a88bf32785923"
+dependencies = [
+ "as-slice",
+]
+
 [[package]]
 name = "android-tzdata"
 version = "0.1.1"
@@ -123,6 +142,160 @@ version = "0.7.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
 
+[[package]]
+name = "as-slice"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "516b6b4f0e40d50dcda9365d53964ec74560ad4284da2e7fc97122cd83174516"
+dependencies = [
+ "stable_deref_trait",
+]
+
+[[package]]
+name = "async-channel"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35"
+dependencies = [
+ "concurrent-queue",
+ "event-listener 2.5.3",
+ "futures-core",
+]
+
+[[package]]
+name = "async-channel"
+version = "2.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a"
+dependencies = [
+ "concurrent-queue",
+ "event-listener-strategy",
+ "futures-core",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "async-executor"
+version = "1.13.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "30ca9a001c1e8ba5149f91a74362376cc6bc5b919d92d988668657bd570bdcec"
+dependencies = [
+ "async-task",
+ "concurrent-queue",
+ "fastrand",
+ "futures-lite",
+ "slab",
+]
+
+[[package]]
+name = "async-global-executor"
+version = "2.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c"
+dependencies = [
+ "async-channel 2.3.1",
+ "async-executor",
+ "async-io",
+ "async-lock",
+ "blocking",
+ "futures-lite",
+ "once_cell",
+]
+
+[[package]]
+name = "async-io"
+version = "2.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "43a2b323ccce0a1d90b449fd71f2a06ca7faa7c54c2751f06c9bd851fc061059"
+dependencies = [
+ "async-lock",
+ "cfg-if",
+ "concurrent-queue",
+ "futures-io",
+ "futures-lite",
+ "parking",
+ "polling",
+ "rustix",
+ "slab",
+ "tracing",
+ "windows-sys 0.59.0",
+]
+
+[[package]]
+name = "async-lock"
+version = "3.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18"
+dependencies = [
+ "event-listener 5.3.1",
+ "event-listener-strategy",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "async-process"
+version = "2.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "63255f1dc2381611000436537bbedfe83183faa303a5a0edaf191edef06526bb"
+dependencies = [
+ "async-channel 2.3.1",
+ "async-io",
+ "async-lock",
+ "async-signal",
+ "async-task",
+ "blocking",
+ "cfg-if",
+ "event-listener 5.3.1",
+ "futures-lite",
+ "rustix",
+ "tracing",
+]
+
+[[package]]
+name = "async-signal"
+version = "0.2.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "637e00349800c0bdf8bfc21ebbc0b6524abea702b0da4168ac00d070d0c0b9f3"
+dependencies = [
+ "async-io",
+ "async-lock",
+ "atomic-waker",
+ "cfg-if",
+ "futures-core",
+ "futures-io",
+ "rustix",
+ "signal-hook-registry",
+ "slab",
+ "windows-sys 0.59.0",
+]
+
+[[package]]
+name = "async-std"
+version = "1.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c634475f29802fde2b8f0b505b1bd00dfe4df7d4a000f0b36f7671197d5c3615"
+dependencies = [
+ "async-channel 1.9.0",
+ "async-global-executor",
+ "async-io",
+ "async-lock",
+ "async-process",
+ "crossbeam-utils",
+ "futures-channel",
+ "futures-core",
+ "futures-io",
+ "futures-lite",
+ "gloo-timers",
+ "kv-log-macro",
+ "log",
+ "memchr",
+ "once_cell",
+ "pin-project-lite",
+ "pin-utils",
+ "slab",
+ "wasm-bindgen-futures",
+]
+
 [[package]]
 name = "async-stream"
 version = "0.3.6"
@@ -145,6 +318,29 @@ dependencies = [
  "syn 2.0.90",
 ]
 
+[[package]]
+name = "async-task"
+version = "4.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de"
+
+[[package]]
+name = "async-trait"
+version = "0.1.83"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.90",
+]
+
+[[package]]
+name = "atomic-waker"
+version = "1.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0"
+
 [[package]]
 name = "atty"
 version = "0.2.14"
@@ -186,6 +382,21 @@ dependencies = [
  "serde",
 ]
 
+[[package]]
+name = "bit-set"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1"
+dependencies = [
+ "bit-vec",
+]
+
+[[package]]
+name = "bit-vec"
+version = "0.6.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb"
+
 [[package]]
 name = "bitflags"
 version = "1.3.2"
@@ -211,6 +422,19 @@ dependencies = [
  "constant_time_eq",
 ]
 
+[[package]]
+name = "blocking"
+version = "1.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "703f41c54fc768e63e091340b424302bb1c29ef4aa0c7f10fe849dfb114d29ea"
+dependencies = [
+ "async-channel 2.3.1",
+ "async-task",
+ "futures-io",
+ "futures-lite",
+ "piper",
+]
+
 [[package]]
 name = "bstr"
 version = "1.11.0"
@@ -218,7 +442,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "1a68f1f47cdf0ec8ee4b941b2eee2a80cb796db73118c0dd09ac63fbe405be22"
 dependencies = [
  "memchr",
- "regex-automata",
+ "regex-automata 0.4.9",
  "serde",
 ]
 
@@ -417,6 +641,15 @@ version = "1.0.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990"
 
+[[package]]
+name = "concurrent-queue"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973"
+dependencies = [
+ "crossbeam-utils",
+]
+
 [[package]]
 name = "console"
 version = "0.15.8"
@@ -536,6 +769,35 @@ version = "0.2.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7"
 
+[[package]]
+name = "ctor"
+version = "0.1.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096"
+dependencies = [
+ "quote",
+ "syn 1.0.109",
+]
+
+[[package]]
+name = "ctor"
+version = "0.2.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "32a2785755761f3ddc1492979ce1e48d2c00d09311c39e4466429188f3dd6501"
+dependencies = [
+ "quote",
+ "syn 2.0.90",
+]
+
+[[package]]
+name = "cvt"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d2ae9bf77fbf2d39ef573205d554d87e86c12f1994e9ea335b0651b9b278bcf1"
+dependencies = [
+ "cfg-if",
+]
+
 [[package]]
 name = "darling"
 version = "0.20.10"
@@ -644,9 +906,9 @@ dependencies = [
 
 [[package]]
 name = "dlt-core"
-version = "0.17.0"
+version = "0.18.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fa52d43b97a134644192c66296e5d3e7ed8b3d409b117c62203047bb42c6b9f1"
+checksum = "0b304e32f1164b8c2ef1dc746b32d321f25f88a32672f0f5bcba2df0f70a3b70"
 dependencies = [
  "buf_redux 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)",
  "byteorder",
@@ -739,9 +1001,9 @@ dependencies = [
 
 [[package]]
 name = "envvars"
-version = "0.1.9"
+version = "0.1.10"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f62cb1fd7910188b23784a60e0738f3e85925e863617d61d1d9c9d7c59d99289"
+checksum = "7e09f83a3152f7c37f9dd3d423c0c96cbe8fc6f671731bcd60cf09e57370d4ec"
 dependencies = [
  "blake3",
  "fs_extra",
@@ -780,6 +1042,42 @@ dependencies = [
  "arrayvec",
 ]
 
+[[package]]
+name = "event-listener"
+version = "2.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0"
+
+[[package]]
+name = "event-listener"
+version = "5.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6032be9bd27023a771701cc49f9f053c751055f71efb2e0ae5c15809093675ba"
+dependencies = [
+ "concurrent-queue",
+ "parking",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "event-listener-strategy"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0f214dc438f977e6d4e3500aaa277f5ad94ca83fbbd9b1a15713ce2344ccc5a1"
+dependencies = [
+ "event-listener 5.3.1",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "extend"
+version = "0.1.0"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.90",
+]
+
 [[package]]
 name = "fastrand"
 version = "2.2.0"
@@ -804,12 +1102,40 @@ dependencies = [
  "anyhow",
 ]
 
+[[package]]
+name = "fluvio-future"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0a28090046453db33a8bace0e1f71350b9878cd7fb576e48592ae8284bc83c7e"
+dependencies = [
+ "anyhow",
+ "async-std",
+ "cfg-if",
+ "thiserror 1.0.69",
+ "tracing",
+ "tracing-subscriber",
+]
+
 [[package]]
 name = "fnv"
 version = "1.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
 
+[[package]]
+name = "fs_at"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "14af6c9694ea25db25baa2a1788703b9e7c6648dcaeeebeb98f7561b5384c036"
+dependencies = [
+ "aligned",
+ "cfg-if",
+ "cvt",
+ "libc",
+ "nix 0.29.0",
+ "windows-sys 0.52.0",
+]
+
 [[package]]
 name = "fs_extra"
 version = "1.3.0"
@@ -864,6 +1190,19 @@ version = "0.3.31"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6"
 
+[[package]]
+name = "futures-lite"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cef40d21ae2c515b51041df9ed313ed21e572df340ea58a922a0aefe7e8891a1"
+dependencies = [
+ "fastrand",
+ "futures-core",
+ "futures-io",
+ "parking",
+ "pin-project-lite",
+]
+
 [[package]]
 name = "futures-macro"
 version = "0.3.31"
@@ -916,12 +1255,35 @@ dependencies = [
  "wasi",
 ]
 
+[[package]]
+name = "ghost"
+version = "0.1.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b0e085ded9f1267c32176b40921b9754c474f7dd96f7e808d4a982e48aa1e854"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.90",
+]
+
 [[package]]
 name = "gimli"
 version = "0.31.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f"
 
+[[package]]
+name = "gloo-timers"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbb143cf96099802033e0d4f4963b19fd2e0b728bcf076cd9cf7f6634f092994"
+dependencies = [
+ "futures-channel",
+ "futures-core",
+ "js-sys",
+ "wasm-bindgen",
+]
+
 [[package]]
 name = "grep-matcher"
 version = "0.1.7"
@@ -940,8 +1302,8 @@ dependencies = [
  "bstr",
  "grep-matcher",
  "log",
- "regex-automata",
- "regex-syntax",
+ "regex-automata 0.4.9",
+ "regex-syntax 0.8.5",
 ]
 
 [[package]]
@@ -1075,6 +1437,7 @@ dependencies = [
  "session",
  "sources",
  "structopt",
+ "stypes",
  "tokio",
  "tokio-util",
  "uuid",
@@ -1106,6 +1469,28 @@ dependencies = [
  "similar",
 ]
 
+[[package]]
+name = "inventory"
+version = "0.1.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f0eb5160c60ba1e809707918ee329adb99d222888155835c6feedba19f6c3fd4"
+dependencies = [
+ "ctor 0.1.26",
+ "ghost",
+ "inventory-impl",
+]
+
+[[package]]
+name = "inventory-impl"
+version = "0.1.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7e41b53715c6f0c4be49510bb82dee2c1e51c8586d885abe65396e82ed518548"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 1.0.109",
+]
+
 [[package]]
 name = "io-kit-sys"
 version = "0.4.1"
@@ -1167,6 +1552,15 @@ dependencies = [
  "wasm-bindgen",
 ]
 
+[[package]]
+name = "kv-log-macro"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f"
+dependencies = [
+ "log",
+]
+
 [[package]]
 name = "lazy_static"
 version = "1.5.0"
@@ -1179,6 +1573,12 @@ version = "0.2.167"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "09d6582e104315a817dff97f75133544b2e094ee22447d2acf4a74e189ba06fc"
 
+[[package]]
+name = "libm"
+version = "0.2.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa"
+
 [[package]]
 name = "libmimalloc-sys"
 version = "0.1.39"
@@ -1246,6 +1646,9 @@ name = "log"
 version = "0.4.22"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
+dependencies = [
+ "value-bag",
+]
 
 [[package]]
 name = "mach"
@@ -1265,6 +1668,15 @@ dependencies = [
  "libc",
 ]
 
+[[package]]
+name = "matchers"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
+dependencies = [
+ "regex-automata 0.1.10",
+]
+
 [[package]]
 name = "memchr"
 version = "2.7.4"
@@ -1409,6 +1821,49 @@ dependencies = [
  "libc",
 ]
 
+[[package]]
+name = "nj-core"
+version = "6.1.0"
+source = "git+https://github.com/infinyon/node-bindgen.git?branch=master#1c6f19b658b0acaf71a25f5f2e4d8b4d970363ad"
+dependencies = [
+ "async-trait",
+ "ctor 0.2.9",
+ "fluvio-future",
+ "futures-lite",
+ "inventory",
+ "libc",
+ "nj-sys",
+ "num-bigint",
+ "pin-utils",
+ "tracing",
+]
+
+[[package]]
+name = "nj-derive"
+version = "3.4.3"
+source = "git+https://github.com/infinyon/node-bindgen.git?branch=master#1c6f19b658b0acaf71a25f5f2e4d8b4d970363ad"
+dependencies = [
+ "Inflector",
+ "proc-macro2",
+ "quote",
+ "syn 1.0.109",
+]
+
+[[package]]
+name = "nj-sys"
+version = "4.0.0"
+source = "git+https://github.com/infinyon/node-bindgen.git?branch=master#1c6f19b658b0acaf71a25f5f2e4d8b4d970363ad"
+
+[[package]]
+name = "node-bindgen"
+version = "6.1.0"
+source = "git+https://github.com/infinyon/node-bindgen.git?branch=master#1c6f19b658b0acaf71a25f5f2e4d8b4d970363ad"
+dependencies = [
+ "nj-core",
+ "nj-derive",
+ "nj-sys",
+]
+
 [[package]]
 name = "nom"
 version = "7.1.3"
@@ -1419,6 +1874,44 @@ dependencies = [
  "minimal-lexical",
 ]
 
+[[package]]
+name = "normpath"
+version = "1.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c8911957c4b1549ac0dc74e30db9c8b0e66ddcd6d7acc33098f4c63a64a6d7ed"
+dependencies = [
+ "windows-sys 0.59.0",
+]
+
+[[package]]
+name = "nu-ansi-term"
+version = "0.46.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
+dependencies = [
+ "overload",
+ "winapi",
+]
+
+[[package]]
+name = "num-bigint"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9"
+dependencies = [
+ "num-integer",
+ "num-traits",
+]
+
+[[package]]
+name = "num-integer"
+version = "0.1.46"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f"
+dependencies = [
+ "num-traits",
+]
+
 [[package]]
 name = "num-traits"
 version = "0.2.19"
@@ -1426,6 +1919,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
 dependencies = [
  "autocfg",
+ "libm",
 ]
 
 [[package]]
@@ -1461,6 +1955,18 @@ version = "0.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
 
+[[package]]
+name = "overload"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
+
+[[package]]
+name = "parking"
+version = "2.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba"
+
 [[package]]
 name = "parking_lot"
 version = "0.12.3"
@@ -1515,6 +2021,12 @@ dependencies = [
  "thiserror 2.0.3",
 ]
 
+[[package]]
+name = "paste"
+version = "1.0.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
+
 [[package]]
 name = "pcap-parser"
 version = "0.16.0"
@@ -1576,6 +2088,17 @@ version = "0.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
 
+[[package]]
+name = "piper"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066"
+dependencies = [
+ "atomic-waker",
+ "fastrand",
+ "futures-io",
+]
+
 [[package]]
 name = "pkg-config"
 version = "0.3.31"
@@ -1610,6 +2133,21 @@ dependencies = [
  "plotters-backend",
 ]
 
+[[package]]
+name = "polling"
+version = "3.7.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a604568c3202727d1507653cb121dbd627a58684eb09a820fd746bee38b4442f"
+dependencies = [
+ "cfg-if",
+ "concurrent-queue",
+ "hermit-abi 0.4.0",
+ "pin-project-lite",
+ "rustix",
+ "tracing",
+ "windows-sys 0.59.0",
+]
+
 [[package]]
 name = "portable-atomic"
 version = "1.10.0"
@@ -1689,12 +2227,39 @@ dependencies = [
  "regex",
  "serde",
  "serde_json",
+ "stypes",
  "tempfile",
  "thiserror 2.0.3",
  "tokio-util",
  "uuid",
 ]
 
+[[package]]
+name = "proptest"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b4c2511913b88df1637da85cc8d96ec8e43a3f8bb8ccb71ee1ac240d6f3df58d"
+dependencies = [
+ "bit-set",
+ "bit-vec",
+ "bitflags 2.6.0",
+ "lazy_static",
+ "num-traits",
+ "rand",
+ "rand_chacha",
+ "rand_xorshift",
+ "regex-syntax 0.8.5",
+ "rusty-fork",
+ "tempfile",
+ "unarray",
+]
+
+[[package]]
+name = "quick-error"
+version = "1.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
+
 [[package]]
 name = "quick-xml"
 version = "0.23.1"
@@ -1762,6 +2327,15 @@ dependencies = [
  "getrandom",
 ]
 
+[[package]]
+name = "rand_xorshift"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f"
+dependencies = [
+ "rand_core",
+]
+
 [[package]]
 name = "rayon"
 version = "1.10.0"
@@ -1810,8 +2384,17 @@ checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
 dependencies = [
  "aho-corasick",
  "memchr",
- "regex-automata",
- "regex-syntax",
+ "regex-automata 0.4.9",
+ "regex-syntax 0.8.5",
+]
+
+[[package]]
+name = "regex-automata"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
+dependencies = [
+ "regex-syntax 0.6.29",
 ]
 
 [[package]]
@@ -1822,15 +2405,35 @@ checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
 dependencies = [
  "aho-corasick",
  "memchr",
- "regex-syntax",
+ "regex-syntax 0.8.5",
 ]
 
+[[package]]
+name = "regex-syntax"
+version = "0.6.29"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
+
 [[package]]
 name = "regex-syntax"
 version = "0.8.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
 
+[[package]]
+name = "remove_dir_all"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "808cc0b475acf76adf36f08ca49429b12aad9f678cb56143d5b3cb49b9a1dd08"
+dependencies = [
+ "cfg-if",
+ "cvt",
+ "fs_at",
+ "libc",
+ "normpath",
+ "windows-sys 0.59.0",
+]
+
 [[package]]
 name = "rustc-demangle"
 version = "0.1.24"
@@ -1880,6 +2483,18 @@ dependencies = [
  "windows-sys 0.52.0",
 ]
 
+[[package]]
+name = "rusty-fork"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f"
+dependencies = [
+ "fnv",
+ "quick-error",
+ "tempfile",
+ "wait-timeout",
+]
+
 [[package]]
 name = "rustyline"
 version = "15.0.0"
@@ -2010,6 +2625,7 @@ dependencies = [
  "serde_json",
  "serialport",
  "sources",
+ "stypes",
  "tempfile",
  "thiserror 2.0.3",
  "tokio",
@@ -2019,6 +2635,15 @@ dependencies = [
  "walkdir",
 ]
 
+[[package]]
+name = "sharded-slab"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6"
+dependencies = [
+ "lazy_static",
+]
+
 [[package]]
 name = "shellexpand"
 version = "3.1.0"
@@ -2143,6 +2768,7 @@ dependencies = [
  "regex",
  "serde",
  "shellexpand",
+ "stypes",
  "thiserror 2.0.3",
  "tikv-jemallocator",
  "tokio",
@@ -2152,6 +2778,12 @@ dependencies = [
  "uuid",
 ]
 
+[[package]]
+name = "stable_deref_trait"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
+
 [[package]]
 name = "stfu8"
 version = "0.2.7"
@@ -2200,6 +2832,27 @@ dependencies = [
  "syn 1.0.109",
 ]
 
+[[package]]
+name = "stypes"
+version = "0.1.0"
+dependencies = [
+ "bincode",
+ "dlt-core",
+ "envvars",
+ "extend",
+ "node-bindgen",
+ "paste",
+ "proptest",
+ "regex",
+ "remove_dir_all",
+ "serde",
+ "thiserror 2.0.3",
+ "tokio",
+ "ts-rs",
+ "uuid",
+ "walkdir",
+]
+
 [[package]]
 name = "syn"
 version = "1.0.109"
@@ -2235,6 +2888,15 @@ dependencies = [
  "windows-sys 0.59.0",
 ]
 
+[[package]]
+name = "termcolor"
+version = "1.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755"
+dependencies = [
+ "winapi-util",
+]
+
 [[package]]
 name = "text_grep"
 version = "0.1.0"
@@ -2298,6 +2960,16 @@ dependencies = [
  "syn 2.0.90",
 ]
 
+[[package]]
+name = "thread_local"
+version = "1.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c"
+dependencies = [
+ "cfg-if",
+ "once_cell",
+]
+
 [[package]]
 name = "tikv-jemalloc-sys"
 version = "0.6.0+5.3.0-1-ge13ca993e8ccb9ba9847cc330696e02839f328f7"
@@ -2398,6 +3070,97 @@ dependencies = [
  "tokio",
 ]
 
+[[package]]
+name = "tracing"
+version = "0.1.41"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
+dependencies = [
+ "pin-project-lite",
+ "tracing-attributes",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-attributes"
+version = "0.1.28"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.90",
+]
+
+[[package]]
+name = "tracing-core"
+version = "0.1.33"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c"
+dependencies = [
+ "once_cell",
+ "valuable",
+]
+
+[[package]]
+name = "tracing-log"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3"
+dependencies = [
+ "log",
+ "once_cell",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-subscriber"
+version = "0.3.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b"
+dependencies = [
+ "matchers",
+ "nu-ansi-term",
+ "once_cell",
+ "regex",
+ "sharded-slab",
+ "smallvec",
+ "thread_local",
+ "tracing",
+ "tracing-core",
+ "tracing-log",
+]
+
+[[package]]
+name = "ts-rs"
+version = "10.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e640d9b0964e9d39df633548591090ab92f7a4567bc31d3891af23471a3365c6"
+dependencies = [
+ "lazy_static",
+ "thiserror 2.0.3",
+ "ts-rs-macros",
+ "uuid",
+]
+
+[[package]]
+name = "ts-rs-macros"
+version = "10.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0e9d8656589772eeec2cf7a8264d9cda40fb28b9bc53118ceb9e8c07f8f38730"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.90",
+ "termcolor",
+]
+
+[[package]]
+name = "unarray"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94"
+
 [[package]]
 name = "unescaper"
 version = "0.1.5"
@@ -2459,6 +3222,18 @@ version = "0.1.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "3b59fc5417e036e53226bbebd90196825d358624fd5577432c4e486c95b1b096"
 
+[[package]]
+name = "valuable"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
+
+[[package]]
+name = "value-bag"
+version = "1.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3ef4c4aa54d5d05a279399bfa921ec387b7aba77caf7a682ae8d86785b8fdad2"
+
 [[package]]
 name = "vec_map"
 version = "0.8.2"
@@ -2482,6 +3257,15 @@ dependencies = [
  "unicode-segmentation",
 ]
 
+[[package]]
+name = "wait-timeout"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6"
+dependencies = [
+ "libc",
+]
+
 [[package]]
 name = "walkdir"
 version = "2.5.0"
@@ -2524,6 +3308,18 @@ dependencies = [
  "wasm-bindgen-shared",
 ]
 
+[[package]]
+name = "wasm-bindgen-futures"
+version = "0.4.45"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cc7ec4f8827a71586374db3e87abdb5a2bb3a15afed140221307c3ec06b1f63b"
+dependencies = [
+ "cfg-if",
+ "js-sys",
+ "wasm-bindgen",
+ "web-sys",
+]
+
 [[package]]
 name = "wasm-bindgen-macro"
 version = "0.2.97"
diff --git a/application/apps/indexer/Cargo.toml b/application/apps/indexer/Cargo.toml
index 5129232ae5..02852d740a 100644
--- a/application/apps/indexer/Cargo.toml
+++ b/application/apps/indexer/Cargo.toml
@@ -13,6 +13,7 @@ members = [
     "processor",
     "session",
     "sources",
+    "stypes"
 ]
 
 [workspace.dependencies]
@@ -23,7 +24,7 @@ thiserror = "2.0"
 lazy_static = "1.5"
 tokio = { version = "1", features = ["full"] }
 tokio-stream = "0.1"
-dlt-core = "0.17"
+dlt-core = "0.18.0"
 crossbeam-channel = "0.5"
 futures = "0.3"
 tokio-util = "0.7"
@@ -36,6 +37,8 @@ uuid = "1.3"
 grep-searcher = "0.1"
 tempfile = "3.14"
 env_logger = "0.11"
+walkdir = "2.5"
+envvars = "0.1"
 
 ## Development Dependencies ##
 # Support for `html_reports` needs running the benchmarks via `cargo-criterion` tool.
diff --git a/application/apps/indexer/addons/dlt-tools/src/lib.rs b/application/apps/indexer/addons/dlt-tools/src/lib.rs
index fb5791aa74..2ae3273e0b 100644
--- a/application/apps/indexer/addons/dlt-tools/src/lib.rs
+++ b/application/apps/indexer/addons/dlt-tools/src/lib.rs
@@ -135,7 +135,7 @@ mod tests {
         match scan_dlt_ft(input, None, true, cancel).await {
             Ok(files) => {
                 assert_eq!(files.len(), 3);
-                assert_eq!("test1.txt", files.get(0).unwrap().name);
+                assert_eq!("test1.txt", files.first().unwrap().name);
                 assert_eq!("test2.txt", files.get(1).unwrap().name);
                 assert_eq!("test3.txt", files.get(2).unwrap().name);
             }
@@ -179,7 +179,7 @@ mod tests {
         match scan_dlt_ft(input, Some(filter), true, cancel).await {
             Ok(files) => {
                 assert_eq!(files.len(), 1);
-                assert_eq!("test2.txt", files.get(0).unwrap().name);
+                assert_eq!("test2.txt", files.first().unwrap().name);
             }
             Err(error) => {
                 panic!("{}", format!("{error}"));
diff --git a/application/apps/indexer/addons/file-tools/src/lib.rs b/application/apps/indexer/addons/file-tools/src/lib.rs
index d378038be9..38850217ba 100644
--- a/application/apps/indexer/addons/file-tools/src/lib.rs
+++ b/application/apps/indexer/addons/file-tools/src/lib.rs
@@ -9,12 +9,7 @@ use std::{
 const BYTES_TO_READ: u64 = 10240;
 
 pub fn is_binary(file_path: String) -> Result<bool> {
-    let chunks = fetch_starting_chunk(Path::new(&file_path));
-    let buffer = match chunks {
-        Ok(buffer) => buffer,
-        Err(err) => return Err(err),
-    };
-
+    let buffer = fetch_starting_chunk(Path::new(&file_path))?;
     Ok(from_utf8(&buffer).map_or(true, |_file_content| false))
 }
 
diff --git a/application/apps/indexer/addons/someip-tools/src/lib.rs b/application/apps/indexer/addons/someip-tools/src/lib.rs
index 2e78d3c3a6..a96bebae91 100644
--- a/application/apps/indexer/addons/someip-tools/src/lib.rs
+++ b/application/apps/indexer/addons/someip-tools/src/lib.rs
@@ -7,8 +7,7 @@ use nom::{
     combinator::map,
     number::streaming::{be_u16, be_u32, be_u8},
     sequence::tuple,
-    IResult,
-    Finish,
+    Finish, IResult,
 };
 
 use thiserror::Error;
@@ -60,7 +59,8 @@ pub fn parse_prefix(input: &[u8]) -> Result<(&[u8], std::string::String), Error>
                     .map_or_else(String::default, |s| format!(" {}", s))
             )
         },
-    )(input).finish()
+    )(input)
+    .finish()
 }
 
 fn parse_instance(input: &[u8]) -> IResult<&[u8], usize, Error> {
diff --git a/application/apps/indexer/indexer_base/src/config.rs b/application/apps/indexer/indexer_base/src/config.rs
index 6796fc2dae..59578cc7b0 100644
--- a/application/apps/indexer/indexer_base/src/config.rs
+++ b/application/apps/indexer/indexer_base/src/config.rs
@@ -10,19 +10,7 @@
 // is strictly forbidden unless prior written permission is obtained
 // from E.S.R.Labs.
 use serde::{Deserialize, Serialize};
-use std::{
-    net::{IpAddr, SocketAddr},
-    ops::RangeInclusive,
-};
-use thiserror::Error;
-
-#[derive(Error, Debug)]
-pub enum Error {
-    #[error("Problem with configuration found: {0}")]
-    Configuration(String),
-    #[error("IO error: {0:?}")]
-    Io(#[from] std::io::Error),
-}
+use std::ops::RangeInclusive;
 
 /// A IndexSection describes a section of a file by indicies
 /// to identify lines 10-12 (inclusively) => first_line = 10, last_line = 12
@@ -33,11 +21,6 @@ pub struct IndexSection {
     pub last_line: usize,
 }
 
-#[derive(Serialize, Deserialize, Debug)]
-pub struct SectionConfig {
-    pub sections: Vec<IndexSection>,
-}
-
 impl IndexSection {
     pub fn len(&self) -> usize {
         self.last_line - self.first_line + 1
@@ -67,64 +50,3 @@ impl IndexSection {
         }
     }
 }
-
-#[derive(Serialize, Deserialize, Debug)]
-pub struct UdpConnectionInfo {
-    pub multicast_addr: Vec<MulticastInfo>,
-}
-
-/// network socket config
-/// if udp packets are sent via multicast, then the `multicast_addr` has to
-/// be specified
-#[derive(Serialize, Deserialize, Debug)]
-pub struct SocketConfig {
-    pub udp_connection_info: Option<UdpConnectionInfo>,
-    pub bind_addr: String,
-    pub port: String,
-}
-
-impl SocketConfig {
-    pub fn socket_addr(&self) -> Result<SocketAddr, Error> {
-        // Touch IPv4
-        let addr: Option<SocketAddr> = match format!("{}:{}", self.bind_addr, self.port).parse() {
-            Ok(addr) => Some(addr),
-            Err(_) => None,
-        };
-        if let Some(addr) = addr {
-            Ok(addr)
-        } else {
-            // Touch IPv6
-            format!("[{}]:{}", self.bind_addr, self.port)
-                .parse()
-                .map_err(|_| {
-                    Error::Configuration(format!(
-                        "Could not parse socket address from {}, port {}",
-                        self.bind_addr, self.port
-                    ))
-                })
-        }
-    }
-}
-
-/// Multicast config information.
-/// `multiaddr` address must be a valid multicast address
-/// `interface` is the address of the local interface with which the
-/// system should join the
-/// multicast group. If it's equal to `INADDR_ANY` then an appropriate
-/// interface is chosen by the system.
-#[derive(Clone, Serialize, Deserialize, Debug)]
-pub struct MulticastInfo {
-    pub multiaddr: String,
-    pub interface: Option<String>,
-}
-
-impl MulticastInfo {
-    pub fn multicast_addr(&self) -> Result<IpAddr, Error> {
-        self.multiaddr.to_string().parse().map_err(|e| {
-            Error::Configuration(format!(
-                "Could not parse multicast address \"{}\": {e}",
-                self.multiaddr
-            ))
-        })
-    }
-}
diff --git a/application/apps/indexer/indexer_cli/Cargo.toml b/application/apps/indexer/indexer_cli/Cargo.toml
index fcb6ac29f2..245768509e 100644
--- a/application/apps/indexer/indexer_cli/Cargo.toml
+++ b/application/apps/indexer/indexer_cli/Cargo.toml
@@ -27,3 +27,4 @@ serde_json.workspace = true
 tokio = { workspace = true , features = ["full"] }
 tokio-util = { workspace = true, features = ["codec", "net"] }
 uuid.workspace = true
+stypes = { path = "../stypes", features=["rustcore"] }
diff --git a/application/apps/indexer/indexer_cli/src/interactive.rs b/application/apps/indexer/indexer_cli/src/interactive.rs
index 263bb3021a..0b38b0888a 100644
--- a/application/apps/indexer/indexer_cli/src/interactive.rs
+++ b/application/apps/indexer/indexer_cli/src/interactive.rs
@@ -4,11 +4,7 @@ use parsers::{dlt::DltParser, MessageStreamItem, ParseYield};
 use processor::grabber::LineRange;
 use rustyline::{error::ReadlineError, DefaultEditor};
 use session::session::Session;
-use sources::{
-    factory::{DltParserSettings, FileFormat, ObserveOptions, ParserType},
-    producer::MessageProducer,
-    socket::udp::UdpSource,
-};
+use sources::{producer::MessageProducer, socket::udp::UdpSource};
 use std::path::PathBuf;
 use tokio_util::sync::CancellationToken;
 
@@ -88,15 +84,15 @@ pub(crate) async fn handle_interactive_session(input: Option<PathBuf>) {
                         start = Instant::now();
                         let uuid = Uuid::new_v4();
                         let file_path = input.clone().expect("input must be present");
-                        session.observe(uuid, ObserveOptions::file(file_path.clone(),FileFormat::Text, ParserType::Text)).expect("observe failed");
+                        session.observe(uuid, stypes::ObserveOptions::file(file_path.clone(),stypes::FileFormat::Text, stypes::ParserType::Text(()))).expect("observe failed");
                     }
                     Some(Command::Dlt) => {
                         println!("dlt command received");
                         start = Instant::now();
                         let uuid = Uuid::new_v4();
                         let file_path = input.clone().expect("input must be present");
-                        let dlt_parser_settings = DltParserSettings { filter_config: None, fibex_file_paths: None, with_storage_header: true, tz: None, fibex_metadata: None };
-                        session.observe(uuid, ObserveOptions::file(file_path.clone(), FileFormat::Binary, ParserType::Dlt(dlt_parser_settings))).expect("observe failed");
+                        let dlt_parser_settings = stypes::DltParserSettings { filter_config: None, fibex_file_paths: None, with_storage_header: true, tz: None, fibex_metadata: None };
+                        session.observe(uuid, stypes::ObserveOptions::file(file_path.clone(), stypes::FileFormat::Binary, stypes::ParserType::Dlt(dlt_parser_settings))).expect("observe failed");
                         println!("dlt session was destroyed");
                     }
                     Some(Command::Grab) => {
@@ -104,9 +100,9 @@ pub(crate) async fn handle_interactive_session(input: Option<PathBuf>) {
                         start = Instant::now();
                         let start_op = Instant::now();
                         let content = session.grab(LineRange::from(0u64..=1000)).await.expect("grab failed");
-                        let len = content.len();
+                        let len = content.0.len();
                         println!("content has {len} elemenst");
-                        for elem in content {
+                        for elem in content.0 {
                             println!("{elem:?}");
                         }
                         duration_report(start_op, format!("grabbing {len} lines"));
diff --git a/application/apps/indexer/parsers/src/dlt/attachment.rs b/application/apps/indexer/parsers/src/dlt/attachment.rs
index 59204a8b43..820aac593d 100644
--- a/application/apps/indexer/parsers/src/dlt/attachment.rs
+++ b/application/apps/indexer/parsers/src/dlt/attachment.rs
@@ -322,6 +322,42 @@ impl FileExtractor {
     }
 }
 
+pub struct TempDir {
+    pub dir: PathBuf,
+}
+
+impl TempDir {
+    pub fn new() -> Self {
+        use rand::Rng;
+        use std::{env, fs};
+        let mut rand = rand::thread_rng();
+        let dir = env::current_dir()
+            .unwrap()
+            .join(format!("temp_{}", rand.gen::<u64>()));
+        fs::create_dir(dir.clone()).unwrap();
+        TempDir { dir }
+    }
+
+    pub fn assert_file(&self, name: &str, content: &str) {
+        let path = self.dir.join(name);
+        let string =
+            std::fs::read_to_string(&path).unwrap_or_else(|_| panic!("{:?} should exist", &path));
+        assert_eq!(string, content);
+    }
+}
+
+impl Drop for TempDir {
+    fn drop(&mut self) {
+        std::fs::remove_dir_all(self.dir.clone()).unwrap();
+    }
+}
+
+impl Default for TempDir {
+    fn default() -> Self {
+        Self::new()
+    }
+}
+
 #[allow(clippy::get_first)]
 #[cfg(test)]
 pub mod tests {
@@ -751,39 +787,3 @@ pub mod tests {
         assert_eq!(files[2].1, "00000002_test3.txt");
     }
 }
-
-pub struct TempDir {
-    pub dir: PathBuf,
-}
-
-impl TempDir {
-    pub fn new() -> Self {
-        use rand::Rng;
-        use std::{env, fs};
-        let mut rand = rand::thread_rng();
-        let dir = env::current_dir()
-            .unwrap()
-            .join(format!("temp_{}", rand.gen::<u64>()));
-        fs::create_dir(dir.clone()).unwrap();
-        TempDir { dir }
-    }
-
-    pub fn assert_file(&self, name: &str, content: &str) {
-        let path = self.dir.join(name);
-        let string =
-            std::fs::read_to_string(&path).unwrap_or_else(|_| panic!("{:?} should exist", &path));
-        assert_eq!(string, content);
-    }
-}
-
-impl Drop for TempDir {
-    fn drop(&mut self) {
-        std::fs::remove_dir_all(self.dir.clone()).unwrap();
-    }
-}
-
-impl Default for TempDir {
-    fn default() -> Self {
-        Self::new()
-    }
-}
diff --git a/application/apps/indexer/processor/Cargo.toml b/application/apps/indexer/processor/Cargo.toml
index e03a0627d8..8c1c812df7 100644
--- a/application/apps/indexer/processor/Cargo.toml
+++ b/application/apps/indexer/processor/Cargo.toml
@@ -22,6 +22,7 @@ serde_json.workspace = true
 thiserror.workspace = true
 tokio-util.workspace = true
 uuid = { workspace = true , features = ["serde", "v4"] }
+stypes = { path = "../stypes", features=["rustcore"] }
 
 [dev-dependencies]
 criterion.workspace = true
diff --git a/application/apps/indexer/processor/benches/map_benchmarks.rs b/application/apps/indexer/processor/benches/map_benchmarks.rs
index fb14199281..f3879bf323 100644
--- a/application/apps/indexer/processor/benches/map_benchmarks.rs
+++ b/application/apps/indexer/processor/benches/map_benchmarks.rs
@@ -2,14 +2,14 @@ extern crate criterion;
 extern crate processor;
 
 use criterion::{Criterion, *};
-use processor::map::{FilterMatch, SearchMap};
+use processor::map::SearchMap;
 
 fn scaled_benchmark(c: &mut Criterion) {
     let mut example_map: SearchMap = SearchMap::new();
     let mut v = vec![];
     for i in (1..1_000_000).step_by(50) {
-        v.push(FilterMatch::new(i, vec![0]));
-        v.push(FilterMatch::new(i + 22, vec![0, 1]));
+        v.push(stypes::FilterMatch::new(i, vec![0]));
+        v.push(stypes::FilterMatch::new(i + 22, vec![0, 1]));
     }
     example_map.set(Some(v), None);
 
diff --git a/application/apps/indexer/processor/src/grabber/mod.rs b/application/apps/indexer/processor/src/grabber/mod.rs
index 1da28eac38..35a9600d8f 100644
--- a/application/apps/indexer/processor/src/grabber/mod.rs
+++ b/application/apps/indexer/processor/src/grabber/mod.rs
@@ -29,6 +29,48 @@ pub enum GrabError {
     Unsupported(String),
 }
 
+impl From<GrabError> for stypes::NativeError {
+    fn from(val: GrabError) -> Self {
+        match val {
+            GrabError::IoOperation(e) => stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::ComputationFailed,
+                message: Some(e),
+            },
+            GrabError::Config(msg) => stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Configuration,
+                message: Some(msg),
+            },
+            GrabError::Interrupted => stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Interrupted,
+                message: None,
+            },
+            GrabError::InvalidRange { .. } => stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::ComputationFailed,
+                message: Some("Invalid Range".to_string()),
+            },
+            GrabError::Communication(s) => stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::ComputationFailed,
+                message: Some(s),
+            },
+            GrabError::NotInitialize => stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::ComputationFailed,
+                message: Some("Grabbing failed, not initialized".to_owned()),
+            },
+            GrabError::Unsupported(s) => stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::ComputationFailed,
+                message: Some(format!("File type is not supported: {s}")),
+            },
+        }
+    }
+}
+
 #[derive(Debug)]
 pub enum ComputationResult<T> {
     Item(T),
diff --git a/application/apps/indexer/processor/src/map.rs b/application/apps/indexer/processor/src/map.rs
index 047dfb9bc1..91fd029d31 100644
--- a/application/apps/indexer/processor/src/map.rs
+++ b/application/apps/indexer/processor/src/map.rs
@@ -1,5 +1,4 @@
 use serde::{Deserialize, Serialize};
-use serde_json;
 use std::{collections::HashMap, ops::RangeInclusive};
 use thiserror::Error;
 
@@ -11,19 +10,6 @@ use thiserror::Error;
 /// [0-12] []
 pub type ScaledDistribution = Vec<Vec<(u8, u16)>>;
 
-/// Lists all matching filters at an index
-#[derive(Debug, Clone)]
-pub struct FilterMatch {
-    pub index: u64,
-    pub filters: Vec<u8>,
-}
-
-impl FilterMatch {
-    pub fn new(index: u64, filters: Vec<u8>) -> Self {
-        Self { index, filters }
-    }
-}
-
 #[derive(Debug, Serialize, Deserialize)]
 pub struct FiltersStats {
     pub stats: HashMap<String, u64>,
@@ -46,12 +32,6 @@ impl Default for FiltersStats {
     }
 }
 
-#[derive(Default, Debug, Serialize)]
-pub struct NearestPosition {
-    pub index: u64,    // Position in search results
-    pub position: u64, // Position in original stream/file
-}
-
 #[derive(Error, Debug, Serialize)]
 pub enum MapError {
     #[error("Out of range ({0})")]
@@ -69,7 +49,7 @@ pub enum MapError {
 ///
 #[derive(Default, Debug)]
 pub struct SearchMap {
-    pub matches: Vec<FilterMatch>,
+    pub matches: Vec<stypes::FilterMatch>,
     stats: FiltersStats,
     stream_len: u64,
 }
@@ -205,7 +185,7 @@ impl SearchMap {
         map
     }
 
-    pub fn indexes(&self, range: &RangeInclusive<u64>) -> Result<&[FilterMatch], MapError> {
+    pub fn indexes(&self, range: &RangeInclusive<u64>) -> Result<&[stypes::FilterMatch], MapError> {
         if range.end() >= &(self.len() as u64) {
             return Err(MapError::OutOfRange(format!(
                 "Search has: {} matches. Requested: {:?}",
@@ -222,7 +202,7 @@ impl SearchMap {
     /// [10, 200, 300, 350]
     /// In that case nearest for 310 will be 300
     /// Returns None if there are no search results
-    pub fn nearest_to(&self, position_in_stream: u64) -> Option<NearestPosition> {
+    pub fn nearest_to(&self, position_in_stream: u64) -> Option<stypes::NearestPosition> {
         if self.matches.is_empty() {
             None
         } else {
@@ -240,17 +220,17 @@ impl SearchMap {
             if distance == i64::MAX {
                 None
             } else {
-                Some(NearestPosition { index, position })
+                Some(stypes::NearestPosition { index, position })
             }
         }
     }
 
-    pub fn set(&mut self, matches: Option<Vec<FilterMatch>>, stats: Option<FiltersStats>) {
+    pub fn set(&mut self, matches: Option<Vec<stypes::FilterMatch>>, stats: Option<FiltersStats>) {
         self.matches = matches.map_or(vec![], |m| m);
         self.stats = stats.map_or(FiltersStats::default(), |s| s);
     }
 
-    pub fn append(&mut self, matches: &mut Vec<FilterMatch>) -> usize {
+    pub fn append(&mut self, matches: &mut Vec<stypes::FilterMatch>) -> usize {
         self.matches.append(matches);
         self.matches.len()
     }
@@ -276,11 +256,6 @@ impl SearchMap {
     pub fn is_empty(&self) -> bool {
         self.matches.is_empty()
     }
-
-    pub fn map_as_str(matches: &[FilterMatch]) -> String {
-        serde_json::to_string(&matches.iter().map(|m| m.index).collect::<Vec<u64>>())
-            .map_or(String::new(), |s| s)
-    }
 }
 
 #[allow(clippy::needless_range_loop)]
@@ -289,26 +264,26 @@ fn test_scaled_map() {
     let mut example_map: SearchMap = SearchMap::new();
     example_map.set(
         Some(vec![
-            FilterMatch::new(10, vec![0]),
-            FilterMatch::new(20, vec![1]),
-            FilterMatch::new(30, vec![0]),
-            FilterMatch::new(40, vec![1]),
-            FilterMatch::new(50, vec![0]),
-            FilterMatch::new(60, vec![1]),
-            FilterMatch::new(70, vec![0]),
-            FilterMatch::new(80, vec![1]),
-            FilterMatch::new(90, vec![0]),
-            FilterMatch::new(100, vec![1]),
-            FilterMatch::new(110, vec![0]),
-            FilterMatch::new(120, vec![1]),
-            FilterMatch::new(130, vec![0]),
-            FilterMatch::new(140, vec![1]),
-            FilterMatch::new(150, vec![0]),
-            FilterMatch::new(160, vec![1]),
-            FilterMatch::new(170, vec![0]),
-            FilterMatch::new(180, vec![1]),
-            FilterMatch::new(190, vec![0]),
-            FilterMatch::new(200, vec![1]),
+            stypes::FilterMatch::new(10, vec![0]),
+            stypes::FilterMatch::new(20, vec![1]),
+            stypes::FilterMatch::new(30, vec![0]),
+            stypes::FilterMatch::new(40, vec![1]),
+            stypes::FilterMatch::new(50, vec![0]),
+            stypes::FilterMatch::new(60, vec![1]),
+            stypes::FilterMatch::new(70, vec![0]),
+            stypes::FilterMatch::new(80, vec![1]),
+            stypes::FilterMatch::new(90, vec![0]),
+            stypes::FilterMatch::new(100, vec![1]),
+            stypes::FilterMatch::new(110, vec![0]),
+            stypes::FilterMatch::new(120, vec![1]),
+            stypes::FilterMatch::new(130, vec![0]),
+            stypes::FilterMatch::new(140, vec![1]),
+            stypes::FilterMatch::new(150, vec![0]),
+            stypes::FilterMatch::new(160, vec![1]),
+            stypes::FilterMatch::new(170, vec![0]),
+            stypes::FilterMatch::new(180, vec![1]),
+            stypes::FilterMatch::new(190, vec![0]),
+            stypes::FilterMatch::new(200, vec![1]),
         ]),
         None,
     );
@@ -451,7 +426,7 @@ fn test_scaled_map() {
                 (200, vec![3]),
             ]
             .into_iter()
-            .map(|(a, b)| FilterMatch::new(a, b))
+            .map(|(a, b)| stypes::FilterMatch::new(a, b))
             .collect(),
         ),
         None,
diff --git a/application/apps/indexer/processor/src/search/extractor.rs b/application/apps/indexer/processor/src/search/extractor.rs
index 83ae3e65ea..108dc10723 100644
--- a/application/apps/indexer/processor/src/search/extractor.rs
+++ b/application/apps/indexer/processor/src/search/extractor.rs
@@ -3,49 +3,31 @@ use grep_regex::RegexMatcher;
 use grep_searcher::{sinks::UTF8, Searcher};
 use itertools::Itertools;
 use regex::Regex;
-use serde::{Deserialize, Serialize};
 use std::{
     path::{Path, PathBuf},
     str::FromStr,
 };
 
-#[derive(Debug, Clone, Serialize, Deserialize)]
-pub struct ExtractedMatchValue {
-    pub index: u64,
-    /// (filter index, extracted value)
-    pub values: Vec<(usize, Vec<String>)>,
-}
-
-impl ExtractedMatchValue {
-    pub fn new(index: u64, input: &str, filters: &[Regex]) -> Self {
-        Self {
-            index,
-            values: ExtractedMatchValue::extract(input, filters),
-        }
-    }
-
-    pub fn extract(input: &str, filters: &[Regex]) -> Vec<(usize, Vec<String>)> {
-        let mut values: Vec<(usize, Vec<String>)> = vec![];
-        for (filter_index, filter) in filters.iter().enumerate() {
-            for caps in filter.captures_iter(input) {
-                let mut matches: Vec<String> = caps
-                    .iter()
-                    .flatten()
-                    .map(|m| m.as_str().to_owned())
-                    .collect();
-                if matches.len() <= 1 {
-                    // warn here
-                } else {
-                    // 0 always - whole match
-                    matches.remove(0);
-                    values.push((filter_index, matches));
-                }
+fn get_extracted_value(index: u64, input: &str, filters: &[Regex]) -> stypes::ExtractedMatchValue {
+    let mut values: Vec<(usize, Vec<String>)> = vec![];
+    for (filter_index, filter) in filters.iter().enumerate() {
+        for caps in filter.captures_iter(input) {
+            // Element on 0 always is the whole match. Here we don't need it
+            let matches: Vec<String> = caps
+                .iter()
+                .flatten()
+                .map(|m| m.as_str().to_owned())
+                .skip(1)
+                .collect();
+            if matches.is_empty() {
+                warn!("Filter doesn't give matches on matches extracting")
+            } else {
+                values.push((filter_index, matches));
             }
         }
-        values
     }
+    stypes::ExtractedMatchValue { index, values }
 }
-
 pub struct MatchesExtractor {
     pub file_path: PathBuf,
     filters: Vec<SearchFilter>,
@@ -66,8 +48,7 @@ impl MatchesExtractor {
         }
     }
 
-    /// TODO: add description
-    pub fn extract_matches(&self) -> Result<Vec<ExtractedMatchValue>, SearchError> {
+    pub fn extract_matches(&self) -> Result<Vec<stypes::ExtractedMatchValue>, SearchError> {
         if self.filters.is_empty() {
             return Err(SearchError::Input(
                 "Cannot search without filters".to_owned(),
@@ -75,7 +56,7 @@ impl MatchesExtractor {
         }
         let combined_regex: String =
             format!("({})", self.filters.iter().map(filter::as_regex).join("|"));
-        let mut values: Vec<ExtractedMatchValue> = vec![];
+        let mut values: Vec<stypes::ExtractedMatchValue> = vec![];
         let mut regexs: Vec<Regex> = vec![];
         for filter in self.filters.iter() {
             regexs.push(
@@ -95,7 +76,7 @@ impl MatchesExtractor {
                 &regex_matcher,
                 &self.file_path,
                 UTF8(|lnum, line| {
-                    values.push(ExtractedMatchValue::new(lnum - 1, line, &regexs));
+                    values.push(get_extracted_value(lnum - 1, line, &regexs));
                     Ok(true)
                 }),
             )
diff --git a/application/apps/indexer/processor/src/search/searchers/regular.rs b/application/apps/indexer/processor/src/search/searchers/regular.rs
index e8f5b584f9..43b2661583 100644
--- a/application/apps/indexer/processor/src/search/searchers/regular.rs
+++ b/application/apps/indexer/processor/src/search/searchers/regular.rs
@@ -1,5 +1,5 @@
 use crate::{
-    map::{FilterMatch, FiltersStats},
+    map::FiltersStats,
     search::{error::SearchError, filter, filter::SearchFilter},
 };
 use regex::Regex;
@@ -14,11 +14,12 @@ use uuid::Uuid;
 
 use super::{BaseSearcher, SearchState};
 
-pub type SearchResults = Result<(Range<usize>, Vec<FilterMatch>, FiltersStats), SearchError>;
+pub type SearchResults =
+    Result<(Range<usize>, Vec<stypes::FilterMatch>, FiltersStats), SearchError>;
 
 #[derive(Debug)]
 struct Results {
-    indexes: Option<Vec<FilterMatch>>,
+    indexes: Option<Vec<stypes::FilterMatch>>,
     stats: Option<FiltersStats>,
 }
 
@@ -89,7 +90,7 @@ impl SearchState for RegularSearchState {
 }
 
 fn collect(row: u64, line: &str, state: &mut RegularSearchState) {
-    let mut line_indexes = FilterMatch::new(row, vec![]);
+    let mut line_indexes = stypes::FilterMatch::new(row, vec![]);
     let mut matched_rows = vec![];
     for (index, re) in state.matchers.iter().enumerate() {
         if re.is_match(line) {
diff --git a/application/apps/indexer/processor/src/search/searchers/tests_regular.rs b/application/apps/indexer/processor/src/search/searchers/tests_regular.rs
index 3875368944..e0773ec83d 100644
--- a/application/apps/indexer/processor/src/search/searchers/tests_regular.rs
+++ b/application/apps/indexer/processor/src/search/searchers/tests_regular.rs
@@ -1,9 +1,6 @@
-use crate::{
-    map::FilterMatch,
-    search::{
-        filter::SearchFilter,
-        searchers::{regular::RegularSearchState, BaseSearcher},
-    },
+use crate::search::{
+    filter::SearchFilter,
+    searchers::{regular::RegularSearchState, BaseSearcher},
 };
 use std::io::{Error, ErrorKind, Write};
 use tokio_util::sync::CancellationToken;
@@ -22,7 +19,10 @@ const LOGS: &[&str] = &[
 ];
 
 // create tmp file with content, apply search
-fn filtered(content: &str, filters: Vec<SearchFilter>) -> Result<Vec<FilterMatch>, std::io::Error> {
+fn filtered(
+    content: &str,
+    filters: Vec<SearchFilter>,
+) -> Result<Vec<stypes::FilterMatch>, std::io::Error> {
     let mut tmp_file = tempfile::NamedTempFile::new()?;
     let input_file = tmp_file.as_file_mut();
     input_file.write_all(content.as_bytes())?;
diff --git a/application/apps/indexer/session/Cargo.toml b/application/apps/indexer/session/Cargo.toml
index 3f345443cc..a589e39906 100644
--- a/application/apps/indexer/session/Cargo.toml
+++ b/application/apps/indexer/session/Cargo.toml
@@ -8,8 +8,8 @@ edition = "2021"
 blake3 = "1.5"
 crossbeam-channel.workspace = true
 dirs.workspace = true
-dlt-core = { workspace = true, features = ["statistics"] }
-envvars = "0.1"
+dlt-core = { workspace = true, features = ["statistics", "serde-support"] }
+envvars = { workspace = true }
 file-tools = { path = "../addons/file-tools" }
 futures.workspace = true
 indexer_base = { path = "../indexer_base" }
@@ -20,6 +20,7 @@ mime_guess = "2.0"
 parsers = { path = "../parsers" }
 processor = { path = "../processor" }
 rustc-hash = "2.1"
+stypes = { path = "../stypes", features=["rustcore"] }
 serde = { workspace = true , features = ["derive"] }
 serde_json.workspace = true
 serialport = "4.6"
@@ -29,7 +30,7 @@ tokio = { workspace = true , features = ["full"] }
 tokio-stream.workspace = true
 tokio-util.workspace = true
 uuid = { workspace = true , features = ["serde", "v4"] }
-walkdir = "2.5"
+walkdir.workspace = true
 
 [dev-dependencies]
 lazy_static.workspace = true
diff --git a/application/apps/indexer/session/src/events.rs b/application/apps/indexer/session/src/events.rs
deleted file mode 100644
index 6ee34da790..0000000000
--- a/application/apps/indexer/session/src/events.rs
+++ /dev/null
@@ -1,379 +0,0 @@
-use crate::{
-    progress::{Progress, Severity, Ticks},
-    state::AttachmentInfo,
-};
-use crossbeam_channel as cc;
-use processor::{grabber::GrabError, search::error::SearchError};
-use serde::{Deserialize, Serialize};
-use std::collections::HashMap;
-use thiserror::Error;
-use uuid::Uuid;
-
-use crate::state::{attachments::AttachmentsError, values::ValuesError};
-
-#[derive(Debug, Serialize, Deserialize, Clone)]
-pub enum NativeErrorKind {
-    /// The file in question does not exist
-    FileNotFound,
-    /// The file type is not currently supported
-    UnsupportedFileType,
-    ComputationFailed,
-    Configuration,
-    Interrupted,
-    OperationSearch,
-    NotYetImplemented,
-    ChannelError,
-    Io,
-    Grabber,
-}
-
-#[derive(Debug, Serialize, Deserialize, Clone)]
-pub struct NativeError {
-    pub severity: Severity,
-    pub kind: NativeErrorKind,
-    pub message: Option<String>,
-}
-
-impl NativeError {
-    pub fn channel(msg: &str) -> Self {
-        NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::ChannelError,
-            message: Some(String::from(msg)),
-        }
-    }
-}
-
-impl From<AttachmentsError> for NativeError {
-    fn from(err: AttachmentsError) -> Self {
-        NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::Io,
-            message: Some(err.to_string()),
-        }
-    }
-}
-
-impl From<ValuesError> for NativeError {
-    fn from(err: ValuesError) -> Self {
-        NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::Io,
-            message: Some(err.to_string()),
-        }
-    }
-}
-
-impl From<ComputationError> for NativeError {
-    fn from(err: ComputationError) -> Self {
-        NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::Io,
-            message: Some(err.to_string()),
-        }
-    }
-}
-
-impl From<std::io::Error> for NativeError {
-    fn from(err: std::io::Error) -> Self {
-        NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::Io,
-            message: Some(err.to_string()),
-        }
-    }
-}
-
-impl From<sources::Error> for NativeError {
-    fn from(err: sources::Error) -> Self {
-        NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::ComputationFailed,
-            message: Some(format!("Fail create source: {err}")),
-        }
-    }
-}
-
-impl From<tokio::sync::mpsc::error::SendError<CallbackEvent>> for NativeError {
-    fn from(err: tokio::sync::mpsc::error::SendError<CallbackEvent>) -> Self {
-        NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::ComputationFailed,
-            message: Some(format!("Callback channel is broken: {err}")),
-        }
-    }
-}
-
-impl From<GrabError> for NativeError {
-    fn from(err: GrabError) -> Self {
-        match err {
-            GrabError::IoOperation(e) => NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::ComputationFailed,
-                message: Some(e),
-            },
-            GrabError::Config(msg) => NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::Configuration,
-                message: Some(msg),
-            },
-            GrabError::Interrupted => NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::Interrupted,
-                message: None,
-            },
-            GrabError::InvalidRange { .. } => NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::ComputationFailed,
-                message: Some("Invalid Range".to_string()),
-            },
-            GrabError::Communication(s) => NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::ComputationFailed,
-                message: Some(s),
-            },
-            GrabError::NotInitialize => NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::ComputationFailed,
-                message: Some("Grabbing failed, not initialized".to_owned()),
-            },
-            GrabError::Unsupported(s) => NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::ComputationFailed,
-                message: Some(format!("File type is not supported: {s}")),
-            },
-        }
-    }
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-pub struct OperationDone {
-    pub uuid: Uuid,
-    pub result: Option<String>,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-pub enum CallbackEvent {
-    /**
-     * Triggered on update of stream (session) file
-     * @event StreamUpdated { rows: usize }
-     * rows - count of rows, which can be requested with method [grab]
-     * >> Scope: session
-     * >> Kind: repeated
-     */
-    StreamUpdated(u64),
-    /**
-     * Triggered on file has been read complitely. After this event session starts tail
-     * @event FileRead
-     * >> Scope: session
-     * >> Kind: once
-     */
-    FileRead,
-    /**
-     * Triggered on update of search result data
-     * @event SearchUpdated { rows: usize }
-     * rows - count of rows, which can be requested with method [grabSearchResult]
-     * >> Scope: session
-     * >> Kind: repeated
-     */
-    SearchUpdated {
-        found: u64,
-        stat: HashMap<String, u64>,
-    },
-    /**
-     * Triggered on update of indexed map
-     * @event IndexedMapUpdated { len: u64 }
-     * len - count of rows, which can be requested with method [grabSearchResult]
-     * >> Scope: session
-     * >> Kind: repeated
-     */
-    IndexedMapUpdated { len: u64 },
-    /**
-     * Triggered on update of search result data
-     * @event SearchMapUpdated { Option<String> }
-     * includes JSON String of Vec<u64> - map of all matches in search
-     * also called with each search update if there are streaming
-     * None - map is dropped
-     * >> Scope: session
-     * >> Kind: repeated
-     */
-    SearchMapUpdated(Option<String>),
-    /**
-     * Triggered on update of search values data. Used for charts
-     * @event SearchValuesUpdated
-     * in search with values also called with each search update if there are streaming
-     * true - map is dropped
-     * >> Scope: session
-     * >> Kind: repeated
-     */
-    SearchValuesUpdated(Option<HashMap<u8, (f64, f64)>>),
-    /**
-     * Triggered with new attachment has been detected
-     * len - number of already detected attachments (in session)
-     * uuid - UUID of new attachment
-     * >> Scope: async operation
-     * >> Kind: repeated
-     */
-    AttachmentsUpdated {
-        len: u64,
-        attachment: AttachmentInfo,
-    },
-    /**
-     * Triggered on progress of async operation
-     * @event Progress: { total: usize, done: usize }
-     * >> Scope: async operation
-     * >> Kind: repeated
-     */
-    Progress { uuid: Uuid, progress: Progress },
-    /**
-     * Triggered on error in the scope of session
-     * >> Scope: session
-     * >> Kind: repeated
-     */
-    SessionError(NativeError),
-    /**
-     * Triggered on error in the scope proccessing an async operation
-     * >> Scope: session, async operation
-     * >> Kind: repeated
-     */
-    OperationError { uuid: Uuid, error: NativeError },
-    /**
-     * Operations is created; task is spawned.
-     * This even is triggered always
-     * Triggered on all continues asynch operation like observe
-     * >> Scope: async operation
-     * >> Kind: repeated
-     */
-    OperationStarted(Uuid),
-    /**
-     * All initializations are done and operation is processing now.
-     * There are no guarantees an event would be triggered. It depends
-     * on each specific operation. This event can be triggered multiple
-     * times in the scope of one operation (for example concat).
-     * Could be triggered on continues asynch operation like observe
-     * >> Scope: async operation
-     * >> Kind: repeated
-     */
-    OperationProcessing(Uuid),
-    /**
-     * Triggered on some asynch operation is done
-     * >> Scope: async operation
-     * >> Kind: repeated
-     */
-    OperationDone(OperationDone),
-    /**
-     * Triggered on session is destroyed
-     * >> Scope: session
-     * >> Kind: once
-     */
-    SessionDestroyed,
-}
-
-impl CallbackEvent {
-    pub fn no_search_results() -> Self {
-        CallbackEvent::SearchUpdated {
-            found: 0,
-            stat: HashMap::new(),
-        }
-    }
-
-    pub fn search_results(found: u64, stat: HashMap<String, u64>) -> Self {
-        CallbackEvent::SearchUpdated { found, stat }
-    }
-}
-
-impl std::fmt::Display for CallbackEvent {
-    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
-        match self {
-            Self::StreamUpdated(len) => write!(f, "StreamUpdated({len})"),
-            Self::FileRead => write!(f, "FileRead"),
-            Self::SearchUpdated { found, stat: _ } => write!(f, "SearchUpdated({found})"),
-            Self::IndexedMapUpdated { len } => write!(f, "IndexedMapUpdated({len})"),
-            Self::SearchMapUpdated(_) => write!(f, "SearchMapUpdated"),
-            Self::SearchValuesUpdated(_) => write!(f, "SearchValuesUpdated"),
-            Self::AttachmentsUpdated { len, attachment: _ } => {
-                write!(f, "AttachmentsUpdated: {}", len)
-            }
-            Self::Progress {
-                uuid: _,
-                progress: _,
-            } => write!(f, "Progress"),
-            Self::SessionError(err) => write!(f, "SessionError: {err:?}"),
-            Self::OperationError { uuid, error } => {
-                write!(f, "OperationError: {uuid}: {error:?}")
-            }
-            Self::OperationStarted(uuid) => write!(f, "OperationStarted: {uuid}"),
-            Self::OperationProcessing(uuid) => write!(f, "OperationProcessing: {uuid}"),
-            Self::OperationDone(info) => write!(f, "OperationDone: {}", info.uuid),
-            Self::SessionDestroyed => write!(f, "SessionDestroyed"),
-        }
-    }
-}
-
-#[derive(Debug, Serialize, Deserialize, Clone)]
-pub enum LifecycleTransition {
-    Started { uuid: Uuid, alias: String },
-    Ticks { uuid: Uuid, ticks: Ticks },
-    Stopped(Uuid),
-}
-
-impl LifecycleTransition {
-    pub fn uuid(&self) -> Uuid {
-        match self {
-            Self::Started { uuid, alias: _ } => *uuid,
-            Self::Ticks { uuid, ticks: _ } => *uuid,
-            Self::Stopped(uuid) => *uuid,
-        }
-    }
-
-    pub fn started(uuid: &Uuid, alias: &str) -> Self {
-        LifecycleTransition::Started {
-            uuid: *uuid,
-            alias: alias.to_owned(),
-        }
-    }
-
-    pub fn stopped(uuid: &Uuid) -> Self {
-        LifecycleTransition::Stopped(*uuid)
-    }
-
-    pub fn ticks(uuid: &Uuid, ticks: Ticks) -> Self {
-        LifecycleTransition::Ticks { uuid: *uuid, ticks }
-    }
-}
-
-#[derive(Error, Debug, Serialize)]
-pub enum ComputationError {
-    #[error("Destination path should be defined to stream from MassageProducer")]
-    DestinationPath,
-    #[error("Fail to create session")]
-    SessionCreatingFail,
-    #[error("Native communication error ({0})")]
-    Communication(String),
-    #[error("Operation not supported ({0})")]
-    OperationNotSupported(String),
-    #[error("IO error ({0})")]
-    IoOperation(String),
-    #[error("Invalid data error")]
-    InvalidData,
-    #[error("Invalid arguments")]
-    InvalidArgs(String),
-    #[error("Error during processing: ({0})")]
-    Process(String),
-    #[error("Wrong usage of API: ({0})")]
-    Protocol(String),
-    #[error("Search related error")]
-    SearchError(SearchError),
-    #[error("start method canbe called just once")]
-    MultipleInitCall,
-    #[error("Session is destroyed or not inited yet")]
-    SessionUnavailable,
-    #[error("{0:?}")]
-    NativeError(NativeError),
-    #[error("Grabbing content not possible: {0:?}")]
-    Grabbing(#[from] GrabError),
-    #[error("Sending data to source error: {0:?}")]
-    Sde(String),
-}
-
-pub type SyncChannel<T> = (cc::Sender<T>, cc::Receiver<T>);
diff --git a/application/apps/indexer/session/src/handlers/export_raw.rs b/application/apps/indexer/session/src/handlers/export_raw.rs
index 38bf52c4b8..e98c606caa 100644
--- a/application/apps/indexer/session/src/handlers/export_raw.rs
+++ b/application/apps/indexer/session/src/handlers/export_raw.rs
@@ -1,9 +1,4 @@
-use crate::{
-    events::{NativeError, NativeErrorKind},
-    operations::OperationResult,
-    progress::Severity,
-    state::SessionStateAPI,
-};
+use crate::{operations::OperationResult, state::SessionStateAPI};
 use indexer_base::config::IndexSection;
 use log::debug;
 use parsers::{
@@ -18,7 +13,6 @@ use sources::{
         pcap::{legacy::PcapLegacyByteSource, ng::PcapngByteSource},
         raw::BinaryByteSource,
     },
-    factory::{FileFormat, ParserType},
     producer::MessageProducer,
     ByteSource,
 };
@@ -37,9 +31,9 @@ pub async fn execute_export(
     debug!("RUST: ExportRaw operation is requested");
     let observed = state.get_executed_holder().await?;
     if !observed.is_file_based_export_possible() {
-        return Err(NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::Configuration,
+        return Err(stypes::NativeError {
+            severity: stypes::Severity::ERROR,
+            kind: stypes::NativeErrorKind::Configuration,
             message: Some(String::from(
                 "For current collection of observing operation raw export isn't possible.",
             )),
@@ -81,19 +75,19 @@ pub async fn execute_export(
 async fn assing_source(
     src: &PathBuf,
     dest: &Path,
-    parser: &ParserType,
-    file_format: &FileFormat,
+    parser: &stypes::ParserType,
+    file_format: &stypes::FileFormat,
     sections: &Vec<IndexSection>,
     read_to_end: bool,
     cancel: &CancellationToken,
-) -> Result<Option<usize>, NativeError> {
-    let reader = File::open(src).map_err(|e| NativeError {
-        severity: Severity::ERROR,
-        kind: NativeErrorKind::Io,
+) -> Result<Option<usize>, stypes::NativeError> {
+    let reader = File::open(src).map_err(|e| stypes::NativeError {
+        severity: stypes::Severity::ERROR,
+        kind: stypes::NativeErrorKind::Io,
         message: Some(format!("Fail open file {}: {}", src.to_string_lossy(), e)),
     })?;
     match file_format {
-        FileFormat::Binary | FileFormat::Text => {
+        stypes::FileFormat::Binary | stypes::FileFormat::Text => {
             export(
                 dest,
                 parser,
@@ -104,7 +98,7 @@ async fn assing_source(
             )
             .await
         }
-        FileFormat::PcapNG => {
+        stypes::FileFormat::PcapNG => {
             export(
                 dest,
                 parser,
@@ -115,7 +109,7 @@ async fn assing_source(
             )
             .await
         }
-        FileFormat::PcapLegacy => {
+        stypes::FileFormat::PcapLegacy => {
             export(
                 dest,
                 parser,
@@ -131,14 +125,14 @@ async fn assing_source(
 
 async fn export<S: ByteSource>(
     dest: &Path,
-    parser: &ParserType,
+    parser: &stypes::ParserType,
     source: S,
     sections: &Vec<IndexSection>,
     read_to_end: bool,
     cancel: &CancellationToken,
-) -> Result<Option<usize>, NativeError> {
+) -> Result<Option<usize>, stypes::NativeError> {
     match parser {
-        ParserType::SomeIp(settings) => {
+        stypes::ParserType::SomeIp(settings) => {
             let parser = if let Some(files) = settings.fibex_file_paths.as_ref() {
                 SomeipParser::from_fibex_files(files.iter().map(PathBuf::from).collect())
             } else {
@@ -155,7 +149,7 @@ async fn export<S: ByteSource>(
             )
             .await
         }
-        ParserType::Dlt(settings) => {
+        stypes::ParserType::Dlt(settings) => {
             let fmt_options = Some(FormatOptions::from(settings.tz.as_ref()));
             let parser = DltParser::new(
                 settings.filter_config.as_ref().map(|f| f.into()),
@@ -175,7 +169,7 @@ async fn export<S: ByteSource>(
             )
             .await
         }
-        ParserType::Text => {
+        stypes::ParserType::Text(()) => {
             let mut producer = MessageProducer::new(StringTokenizer {}, source, None);
             export_runner(
                 Box::pin(producer.as_stream()),
@@ -197,7 +191,7 @@ pub async fn export_runner<S, T>(
     read_to_end: bool,
     text_file: bool,
     cancel: &CancellationToken,
-) -> Result<Option<usize>, NativeError>
+) -> Result<Option<usize>, stypes::NativeError>
 where
     T: LogMessage + Sized,
     S: futures::Stream<Item = Box<[(usize, MessageStreamItem<T>)]>> + Unpin,
@@ -207,9 +201,9 @@ where
         .map_or_else(
             |err| match err {
                 ExportError::Cancelled => Ok(None),
-                _ => Err(NativeError {
-                    severity: Severity::ERROR,
-                    kind: NativeErrorKind::UnsupportedFileType,
+                _ => Err(stypes::NativeError {
+                    severity: stypes::Severity::ERROR,
+                    kind: stypes::NativeErrorKind::UnsupportedFileType,
                     message: Some(format!("{err}")),
                 }),
             },
diff --git a/application/apps/indexer/session/src/handlers/extract.rs b/application/apps/indexer/session/src/handlers/extract.rs
index f9cb72434f..b4f6eff1fa 100644
--- a/application/apps/indexer/session/src/handlers/extract.rs
+++ b/application/apps/indexer/session/src/handlers/extract.rs
@@ -1,19 +1,12 @@
-use crate::{
-    events::{NativeError, NativeErrorKind},
-    operations::OperationResult,
-    progress::Severity,
-};
+use crate::operations::OperationResult;
 
-use processor::search::{
-    extractor::{ExtractedMatchValue, MatchesExtractor},
-    filter::SearchFilter,
-};
+use processor::search::{extractor::MatchesExtractor, filter::SearchFilter};
 use std::path::Path;
 
 pub fn handle<'a, I>(
     target_file_path: &Path,
     filters: I,
-) -> OperationResult<Vec<ExtractedMatchValue>>
+) -> OperationResult<Vec<stypes::ExtractedMatchValue>>
 where
     I: Iterator<Item = &'a SearchFilter>,
 {
@@ -21,9 +14,9 @@ where
     extractor
         .extract_matches()
         .map(Some)
-        .map_err(|e| NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::OperationSearch,
+        .map_err(|e| stypes::NativeError {
+            severity: stypes::Severity::ERROR,
+            kind: stypes::NativeErrorKind::OperationSearch,
             message: Some(format!(
                 "Fail to execute extract search result operation. Error: {e}"
             )),
diff --git a/application/apps/indexer/session/src/handlers/observe.rs b/application/apps/indexer/session/src/handlers/observe.rs
index d09da6b09f..582de9269e 100644
--- a/application/apps/indexer/session/src/handlers/observe.rs
+++ b/application/apps/indexer/session/src/handlers/observe.rs
@@ -1,38 +1,33 @@
 use crate::{
-    events::{NativeError, NativeErrorKind},
     handlers::observing,
     operations::{OperationAPI, OperationResult},
-    progress::Severity,
     state::SessionStateAPI,
 };
 use log::error;
-use sources::{
-    factory::{ObserveOptions, ObserveOrigin, ParserType},
-    producer::SdeReceiver,
-};
+use sources::producer::SdeReceiver;
 
 pub async fn start_observing(
     operation_api: OperationAPI,
     state: SessionStateAPI,
-    mut options: ObserveOptions,
+    mut options: stypes::ObserveOptions,
     rx_sde: Option<SdeReceiver>,
 ) -> OperationResult<()> {
-    if let ParserType::Dlt(ref mut settings) = options.parser {
+    if let stypes::ParserType::Dlt(ref mut settings) = options.parser {
         settings.load_fibex_metadata();
     };
     if let Err(err) = state.add_executed_observe(options.clone()).await {
         error!("Fail to store observe options: {:?}", err);
     }
     match &options.origin {
-        ObserveOrigin::File(uuid, file_origin, filename) => {
+        stypes::ObserveOrigin::File(uuid, file_origin, filename) => {
             let (is_text, session_file_origin) = (
-                matches!(options.parser, ParserType::Text),
+                matches!(options.parser, stypes::ParserType::Text(())),
                 state.get_session_file_origin().await?,
             );
             match session_file_origin {
-                Some(origin) if origin.is_linked() => Err(NativeError {
-                    severity: Severity::ERROR,
-                    kind: NativeErrorKind::Configuration,
+                Some(origin) if origin.is_linked() => Err(stypes::NativeError {
+                    severity: stypes::Severity::ERROR,
+                    kind: stypes::NativeErrorKind::Configuration,
                     message: Some(String::from(
                         "Cannot observe file, because session is linked to other text file",
                     )),
@@ -61,18 +56,18 @@ pub async fn start_observing(
                 }
             }
         }
-        ObserveOrigin::Concat(files) => {
+        stypes::ObserveOrigin::Concat(files) => {
             if files.is_empty() {
-                Err(NativeError {
-                    severity: Severity::ERROR,
-                    kind: NativeErrorKind::Configuration,
+                Err(stypes::NativeError {
+                    severity: stypes::Severity::ERROR,
+                    kind: stypes::NativeErrorKind::Configuration,
                     message: Some(String::from("No files are defined for Concat operation")),
                 })
             } else {
                 observing::concat::concat_files(operation_api, state, files, &options.parser).await
             }
         }
-        ObserveOrigin::Stream(uuid, transport) => {
+        stypes::ObserveOrigin::Stream(uuid, transport) => {
             observing::stream::observe_stream(
                 operation_api,
                 state,
diff --git a/application/apps/indexer/session/src/handlers/observing/concat.rs b/application/apps/indexer/session/src/handlers/observing/concat.rs
index 846baa1a19..c12073962b 100644
--- a/application/apps/indexer/session/src/handlers/observing/concat.rs
+++ b/application/apps/indexer/session/src/handlers/observing/concat.rs
@@ -1,15 +1,10 @@
 use crate::{
-    events::{NativeError, NativeErrorKind},
     operations::{OperationAPI, OperationResult},
-    progress::Severity,
     state::SessionStateAPI,
 };
-use sources::{
-    binary::{
-        pcap::{legacy::PcapLegacyByteSource, ng::PcapngByteSource},
-        raw::BinaryByteSource,
-    },
-    factory::{FileFormat, ParserType},
+use sources::binary::{
+    pcap::{legacy::PcapLegacyByteSource, ng::PcapngByteSource},
+    raw::BinaryByteSource,
 };
 use std::{fs::File, path::PathBuf};
 
@@ -17,8 +12,8 @@ use std::{fs::File, path::PathBuf};
 pub async fn concat_files(
     operation_api: OperationAPI,
     state: SessionStateAPI,
-    files: &[(String, FileFormat, PathBuf)],
-    parser: &ParserType,
+    files: &[(String, stypes::FileFormat, PathBuf)],
+    parser: &stypes::ParserType,
 ) -> OperationResult<()> {
     for file in files.iter() {
         let (uuid, _file_type, _filename) = file;
@@ -26,18 +21,18 @@ pub async fn concat_files(
     }
     for file in files.iter() {
         let (uuid, file_type, filename) = file;
-        let source_id = state.get_source(uuid).await?.ok_or(NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::Io,
+        let source_id = state.get_source(uuid).await?.ok_or(stypes::NativeError {
+            severity: stypes::Severity::ERROR,
+            kind: stypes::NativeErrorKind::Io,
             message: Some(format!(
                 "Cannot find source id for file {} with alias {}",
                 filename.to_string_lossy(),
                 uuid,
             )),
         })?;
-        let input_file = File::open(filename).map_err(|e| NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::Io,
+        let input_file = File::open(filename).map_err(|e| stypes::NativeError {
+            severity: stypes::Severity::ERROR,
+            kind: stypes::NativeErrorKind::Io,
             message: Some(format!(
                 "Fail open file {}: {}",
                 filename.to_string_lossy(),
@@ -45,7 +40,7 @@ pub async fn concat_files(
             )),
         })?;
         match file_type {
-            FileFormat::Binary => {
+            stypes::FileFormat::Binary => {
                 super::run_source(
                     operation_api.clone(),
                     state.clone(),
@@ -57,7 +52,7 @@ pub async fn concat_files(
                 )
                 .await?
             }
-            FileFormat::PcapLegacy => {
+            stypes::FileFormat::PcapLegacy => {
                 super::run_source(
                     operation_api.clone(),
                     state.clone(),
@@ -69,7 +64,7 @@ pub async fn concat_files(
                 )
                 .await?
             }
-            FileFormat::PcapNG => {
+            stypes::FileFormat::PcapNG => {
                 super::run_source(
                     operation_api.clone(),
                     state.clone(),
@@ -81,7 +76,7 @@ pub async fn concat_files(
                 )
                 .await?
             }
-            FileFormat::Text => {
+            stypes::FileFormat::Text => {
                 super::run_source(
                     operation_api.clone(),
                     state.clone(),
diff --git a/application/apps/indexer/session/src/handlers/observing/file.rs b/application/apps/indexer/session/src/handlers/observing/file.rs
index ee71a223bc..3bdf2aed0c 100644
--- a/application/apps/indexer/session/src/handlers/observing/file.rs
+++ b/application/apps/indexer/session/src/handlers/observing/file.rs
@@ -1,16 +1,11 @@
 use crate::{
-    events::{NativeError, NativeErrorKind},
     operations::{OperationAPI, OperationResult},
-    progress::Severity,
     state::SessionStateAPI,
     tail,
 };
-use sources::{
-    binary::{
-        pcap::{legacy::PcapLegacyByteSource, ng::PcapngByteSource},
-        raw::BinaryByteSource,
-    },
-    factory::{FileFormat, ParserType},
+use sources::binary::{
+    pcap::{legacy::PcapLegacyByteSource, ng::PcapngByteSource},
+    raw::BinaryByteSource,
 };
 use std::{fs::File, path::Path};
 use tokio::{
@@ -23,9 +18,9 @@ pub async fn observe_file<'a>(
     operation_api: OperationAPI,
     state: SessionStateAPI,
     uuid: &str,
-    file_format: &FileFormat,
+    file_format: &stypes::FileFormat,
     filename: &Path,
-    parser: &'a ParserType,
+    parser: &'a stypes::ParserType,
 ) -> OperationResult<()> {
     let source_id = state.add_source(uuid).await?;
     let (tx_tail, mut rx_tail): (
@@ -33,7 +28,7 @@ pub async fn observe_file<'a>(
         Receiver<Result<(), tail::Error>>,
     ) = channel(1);
     match file_format {
-        FileFormat::Binary => {
+        stypes::FileFormat::Binary => {
             let source = BinaryByteSource::new(input_file(filename)?);
             let (_, listening) = join!(
                 tail::track(filename, tx_tail, operation_api.cancellation_token()),
@@ -49,7 +44,7 @@ pub async fn observe_file<'a>(
             );
             listening
         }
-        FileFormat::PcapLegacy => {
+        stypes::FileFormat::PcapLegacy => {
             let source = PcapLegacyByteSource::new(input_file(filename)?)?;
             let (_, listening) = join!(
                 tail::track(filename, tx_tail, operation_api.cancellation_token()),
@@ -65,7 +60,7 @@ pub async fn observe_file<'a>(
             );
             listening
         }
-        FileFormat::PcapNG => {
+        stypes::FileFormat::PcapNG => {
             let source = PcapngByteSource::new(input_file(filename)?)?;
             let (_, listening) = join!(
                 tail::track(filename, tx_tail, operation_api.cancellation_token()),
@@ -81,7 +76,7 @@ pub async fn observe_file<'a>(
             );
             listening
         }
-        FileFormat::Text => {
+        stypes::FileFormat::Text => {
             state.set_session_file(Some(filename.to_path_buf())).await?;
             // Grab main file content
             state.update_session(source_id).await?;
@@ -95,9 +90,9 @@ pub async fn observe_file<'a>(
                     let result = select! {
                         res = async move {
                             while let Some(update) = rx_tail.recv().await {
-                                update.map_err(|err| NativeError {
-                                    severity: Severity::ERROR,
-                                    kind: NativeErrorKind::Interrupted,
+                                update.map_err(|err| stypes::NativeError {
+                                    severity: stypes::Severity::ERROR,
+                                    kind: stypes::NativeErrorKind::Interrupted,
                                     message: Some(err.to_string()),
                                 })?;
                                 state.update_session(source_id).await?;
@@ -112,9 +107,9 @@ pub async fn observe_file<'a>(
             );
             result
                 .and_then(|_| {
-                    tracker.map_err(|e| NativeError {
-                        severity: Severity::ERROR,
-                        kind: NativeErrorKind::Interrupted,
+                    tracker.map_err(|e| stypes::NativeError {
+                        severity: stypes::Severity::ERROR,
+                        kind: stypes::NativeErrorKind::Interrupted,
                         message: Some(format!("Tailing error: {e}")),
                     })
                 })
@@ -123,10 +118,10 @@ pub async fn observe_file<'a>(
     }
 }
 
-fn input_file(filename: &Path) -> Result<File, NativeError> {
-    File::open(filename).map_err(|e| NativeError {
-        severity: Severity::ERROR,
-        kind: NativeErrorKind::Io,
+fn input_file(filename: &Path) -> Result<File, stypes::NativeError> {
+    File::open(filename).map_err(|e| stypes::NativeError {
+        severity: stypes::Severity::ERROR,
+        kind: stypes::NativeErrorKind::Io,
         message: Some(format!(
             "Fail open file {}: {}",
             filename.to_string_lossy(),
diff --git a/application/apps/indexer/session/src/handlers/observing/mod.rs b/application/apps/indexer/session/src/handlers/observing/mod.rs
index 94569c2c41..57c285bc5d 100644
--- a/application/apps/indexer/session/src/handlers/observing/mod.rs
+++ b/application/apps/indexer/session/src/handlers/observing/mod.rs
@@ -13,7 +13,6 @@ use parsers::{
     LogMessage, MessageStreamItem, ParseYield, Parser,
 };
 use sources::{
-    factory::ParserType,
     producer::{MessageProducer, SdeReceiver},
     ByteSource,
 };
@@ -41,7 +40,7 @@ pub async fn run_source<S: ByteSource>(
     state: SessionStateAPI,
     source: S,
     source_id: u16,
-    parser: &ParserType,
+    parser: &stypes::ParserType,
     rx_sde: Option<SdeReceiver>,
     rx_tail: Option<Receiver<Result<(), tail::Error>>>,
 ) -> OperationResult<()> {
@@ -73,12 +72,12 @@ async fn run_source_intern<S: ByteSource>(
     state: SessionStateAPI,
     source: S,
     source_id: u16,
-    parser: &ParserType,
+    parser: &stypes::ParserType,
     rx_sde: Option<SdeReceiver>,
     rx_tail: Option<Receiver<Result<(), tail::Error>>>,
 ) -> OperationResult<()> {
     match parser {
-        ParserType::SomeIp(settings) => {
+        stypes::ParserType::SomeIp(settings) => {
             let someip_parser = match &settings.fibex_file_paths {
                 Some(paths) => {
                     SomeipParser::from_fibex_files(paths.iter().map(PathBuf::from).collect())
@@ -88,11 +87,11 @@ async fn run_source_intern<S: ByteSource>(
             let producer = MessageProducer::new(someip_parser, source, rx_sde);
             run_producer(operation_api, state, source_id, producer, rx_tail).await
         }
-        ParserType::Text => {
+        stypes::ParserType::Text(()) => {
             let producer = MessageProducer::new(StringTokenizer {}, source, rx_sde);
             run_producer(operation_api, state, source_id, producer, rx_tail).await
         }
-        ParserType::Dlt(settings) => {
+        stypes::ParserType::Dlt(settings) => {
             let fmt_options = Some(FormatOptions::from(settings.tz.as_ref()));
             let someip_metadata = settings.fibex_file_paths.as_ref().and_then(|paths| {
                 FibexSomeipMetadata::from_fibex_files(paths.iter().map(PathBuf::from).collect())
diff --git a/application/apps/indexer/session/src/handlers/observing/stream.rs b/application/apps/indexer/session/src/handlers/observing/stream.rs
index 3da365a801..8e4c7ca2c9 100644
--- a/application/apps/indexer/session/src/handlers/observing/stream.rs
+++ b/application/apps/indexer/session/src/handlers/observing/stream.rs
@@ -1,13 +1,10 @@
 use crate::{
-    events::{NativeError, NativeErrorKind},
     handlers::observing,
     operations::{OperationAPI, OperationResult},
-    progress::Severity,
     state::SessionStateAPI,
 };
 use sources::{
     command::process::ProcessSource,
-    factory::{ParserType, Transport},
     producer::SdeReceiver,
     serial::serialport::SerialSource,
     socket::{tcp::TcpSource, udp::UdpSource},
@@ -17,18 +14,18 @@ pub async fn observe_stream<'a>(
     operation_api: OperationAPI,
     state: SessionStateAPI,
     uuid: &str,
-    transport: &Transport,
-    parser: &'a ParserType,
+    transport: &stypes::Transport,
+    parser: &'a stypes::ParserType,
     rx_sde: Option<SdeReceiver>,
 ) -> OperationResult<()> {
     let source_id = state.add_source(uuid).await?;
     match transport {
-        Transport::UDP(settings) => {
+        stypes::Transport::UDP(settings) => {
             let udp_source = UdpSource::new(&settings.bind_addr, settings.multicast.clone())
                 .await
-                .map_err(|e| NativeError {
-                    severity: Severity::ERROR,
-                    kind: NativeErrorKind::Interrupted,
+                .map_err(|e| stypes::NativeError {
+                    severity: stypes::Severity::ERROR,
+                    kind: stypes::NativeErrorKind::Interrupted,
                     message: Some(format!("{e}")),
                 })?;
             observing::run_source(
@@ -42,12 +39,12 @@ pub async fn observe_stream<'a>(
             )
             .await
         }
-        Transport::TCP(settings) => {
+        stypes::Transport::TCP(settings) => {
             let tcp_source = TcpSource::new(settings.bind_addr.clone())
                 .await
-                .map_err(|e| NativeError {
-                    severity: Severity::ERROR,
-                    kind: NativeErrorKind::Interrupted,
+                .map_err(|e| stypes::NativeError {
+                    severity: stypes::Severity::ERROR,
+                    kind: stypes::NativeErrorKind::Interrupted,
                     message: Some(format!("{e}")),
                 })?;
             observing::run_source(
@@ -61,10 +58,10 @@ pub async fn observe_stream<'a>(
             )
             .await
         }
-        Transport::Serial(settings) => {
-            let serial_source = SerialSource::new(settings).map_err(|e| NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::Interrupted,
+        stypes::Transport::Serial(settings) => {
+            let serial_source = SerialSource::new(settings).map_err(|e| stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Interrupted,
                 message: Some(format!("{e}")),
             })?;
             observing::run_source(
@@ -78,16 +75,16 @@ pub async fn observe_stream<'a>(
             )
             .await
         }
-        Transport::Process(settings) => {
+        stypes::Transport::Process(settings) => {
             let process_source = ProcessSource::new(
                 settings.command.clone(),
                 settings.cwd.clone(),
                 settings.envs.clone(),
             )
             .await
-            .map_err(|e| NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::Interrupted,
+            .map_err(|e| stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Interrupted,
                 message: Some(format!("{e}")),
             })?;
             observing::run_source(
diff --git a/application/apps/indexer/session/src/handlers/search.rs b/application/apps/indexer/session/src/handlers/search.rs
index 403196deca..f9b690e7c0 100644
--- a/application/apps/indexer/session/src/handlers/search.rs
+++ b/application/apps/indexer/session/src/handlers/search.rs
@@ -1,12 +1,10 @@
 use crate::{
-    events::{NativeError, NativeErrorKind},
     operations::{OperationAPI, OperationResult},
-    progress::Severity,
     state::SessionStateAPI,
 };
 use log::debug;
 use processor::{
-    map::{FilterMatch, FiltersStats},
+    map::FiltersStats,
     search::{
         filter::SearchFilter,
         searchers::{self, regular::RegularSearchHolder},
@@ -37,11 +35,14 @@ pub async fn execute_search(
     state.drop_search().await?;
     let (rows, read_bytes) = state.get_stream_len().await?;
     let mut holder = state.get_search_holder(operation_api.id()).await?;
-    if let Err(err) = holder.setup(filters.clone()).map_err(|e| NativeError {
-        severity: Severity::ERROR,
-        kind: NativeErrorKind::OperationSearch,
-        message: Some(format!("Fail to setup search terms: {e}")),
-    }) {
+    if let Err(err) = holder
+        .setup(filters.clone())
+        .map_err(|e| stypes::NativeError {
+            severity: stypes::Severity::ERROR,
+            kind: stypes::NativeErrorKind::OperationSearch,
+            message: Some(format!("Fail to setup search terms: {e}")),
+        })
+    {
         state
             .set_search_holder(Some(holder), operation_api.id())
             .await?;
@@ -70,11 +71,11 @@ pub async fn execute_search(
                 (
                     Range<usize>,
                     usize,
-                    Vec<FilterMatch>,
+                    Vec<stypes::FilterMatch>,
                     FiltersStats,
                     RegularSearchHolder,
                 ),
-                (Option<RegularSearchHolder>, NativeError),
+                (Option<RegularSearchHolder>, stypes::NativeError),
             >,
         > = select! {
             res = async {
@@ -87,17 +88,17 @@ pub async fn execute_search(
                     {
                         Ok(recv_results) => {
                             break recv_results.map_or(
-                                Err((None, NativeError {
-                                    severity: Severity::ERROR,
-                                    kind: NativeErrorKind::OperationSearch,
+                                Err((None, stypes::NativeError {
+                                    severity: stypes::Severity::ERROR,
+                                    kind: stypes::NativeErrorKind::OperationSearch,
                                     message: Some("Fail to receive search results".to_string()),
                                 })),
                                 |(holder, search_results)| {
                                     match search_results {
                                         Ok((processed, matches, stats)) => Ok((processed, matches.len(), matches, stats, holder)),
-                                        Err(err) => Err((Some(holder), NativeError {
-                                            severity: Severity::ERROR,
-                                            kind: NativeErrorKind::OperationSearch,
+                                        Err(err) => Err((Some(holder), stypes::NativeError {
+                                            severity: stypes::Severity::ERROR,
+                                            kind: stypes::NativeErrorKind::OperationSearch,
                                             message: Some(format!(
                                                 "Fail to execute search. Error: {err}"
                                             )),
diff --git a/application/apps/indexer/session/src/handlers/search_values.rs b/application/apps/indexer/session/src/handlers/search_values.rs
index 04dbc54ad4..d3456371ef 100644
--- a/application/apps/indexer/session/src/handlers/search_values.rs
+++ b/application/apps/indexer/session/src/handlers/search_values.rs
@@ -1,7 +1,5 @@
 use crate::{
-    events::{NativeError, NativeErrorKind},
     operations::{OperationAPI, OperationResult},
-    progress::Severity,
     state::SessionStateAPI,
 };
 use log::debug;
@@ -31,11 +29,14 @@ pub async fn execute_value_search(
     state.drop_search_values().await?;
     let (rows, read_bytes) = state.get_stream_len().await?;
     let mut holder = state.get_search_values_holder(operation_api.id()).await?;
-    if let Err(err) = holder.setup(filters.clone()).map_err(|e| NativeError {
-        severity: Severity::ERROR,
-        kind: NativeErrorKind::OperationSearch,
-        message: Some(format!("Fail to setup filters: {e}")),
-    }) {
+    if let Err(err) = holder
+        .setup(filters.clone())
+        .map_err(|e| stypes::NativeError {
+            severity: stypes::Severity::ERROR,
+            kind: stypes::NativeErrorKind::OperationSearch,
+            message: Some(format!("Fail to setup filters: {e}")),
+        })
+    {
         state
             .set_search_values_holder(Some(holder), operation_api.id())
             .await?;
@@ -65,7 +66,7 @@ pub async fn execute_value_search(
                     HashMap<u8, Vec<(u64, f64)>>,
                     ValueSearchHolder,
                 ),
-                (Option<ValueSearchHolder>, NativeError),
+                (Option<ValueSearchHolder>, stypes::NativeError),
             >,
         > = select! {
             res = async {
@@ -78,17 +79,17 @@ pub async fn execute_value_search(
                     {
                         Ok(recv_results) => {
                             break recv_results.map_or(
-                                Err((None, NativeError {
-                                    severity: Severity::ERROR,
-                                    kind: NativeErrorKind::OperationSearch,
+                                Err((None, stypes::NativeError {
+                                    severity: stypes::Severity::ERROR,
+                                    kind: stypes::NativeErrorKind::OperationSearch,
                                     message: Some("Fail to receive search values results".to_string()),
                                 })),
                                 |(holder, search_results)| {
                                     match search_results {
                                         Ok((processed, values)) => Ok((processed, values, holder)),
-                                        Err(err) => Err((Some(holder), NativeError {
-                                            severity: Severity::ERROR,
-                                            kind: NativeErrorKind::OperationSearch,
+                                        Err(err) => Err((Some(holder), stypes::NativeError {
+                                            severity: stypes::Severity::ERROR,
+                                            kind: stypes::NativeErrorKind::OperationSearch,
                                             message: Some(format!(
                                                 "Fail to execute search values. Error: {err}"
                                             )),
diff --git a/application/apps/indexer/session/src/handlers/sleep.rs b/application/apps/indexer/session/src/handlers/sleep.rs
index ce671276b9..9c32961d6f 100644
--- a/application/apps/indexer/session/src/handlers/sleep.rs
+++ b/application/apps/indexer/session/src/handlers/sleep.rs
@@ -1,30 +1,24 @@
 use crate::operations::{OperationAPI, OperationResult};
-use serde::{Deserialize, Serialize};
 use tokio::{select, time};
 
-#[derive(Debug, Serialize, Deserialize)]
-pub struct SleepResult {
-    pub sleep_well: bool,
-}
-
 pub async fn handle(
     operation_api: &OperationAPI,
     ms: u64,
     ignore_cancellation: bool,
-) -> OperationResult<SleepResult> {
+) -> OperationResult<stypes::ResultSleep> {
     if ignore_cancellation {
         time::sleep(time::Duration::from_millis(ms)).await;
-        Ok(Some(SleepResult { sleep_well: true }))
+        Ok(Some(stypes::ResultSleep { sleep_well: true }))
     } else {
         let canceler = operation_api.cancellation_token();
         select! {
             _ = async move {
                 time::sleep(time::Duration::from_millis(ms)).await;
             } => {
-                Ok(Some( SleepResult { sleep_well: true }))
+                Ok(Some( stypes::ResultSleep { sleep_well: true }))
             },
             _ = canceler.cancelled() => {
-                Ok(Some( SleepResult { sleep_well: false }))
+                Ok(Some( stypes::ResultSleep { sleep_well: false }))
             }
         }
     }
diff --git a/application/apps/indexer/session/src/lib.rs b/application/apps/indexer/session/src/lib.rs
index 4d7b8f16b2..81f6bb982d 100644
--- a/application/apps/indexer/session/src/lib.rs
+++ b/application/apps/indexer/session/src/lib.rs
@@ -1,4 +1,3 @@
-pub mod events;
 mod handlers;
 pub mod operations;
 pub mod paths;
@@ -10,18 +9,14 @@ pub mod tracker;
 pub mod unbound;
 
 use std::sync::Mutex;
-
-pub use sources::factory;
 use tokio::sync::mpsc;
 
-use crate::events::LifecycleTransition;
-
 extern crate lazy_static;
 
 lazy_static::lazy_static! {
     pub static ref TRACKER_CHANNEL: Mutex<(
-        mpsc::UnboundedSender<LifecycleTransition>,
-        Option<mpsc::UnboundedReceiver<LifecycleTransition>>
+        mpsc::UnboundedSender<stypes::LifecycleTransition>,
+        Option<mpsc::UnboundedReceiver<stypes::LifecycleTransition>>
     )> = {
         let (tx, rx) = mpsc::unbounded_channel();
         Mutex::new((tx, Some(rx)))
diff --git a/application/apps/indexer/session/src/operations.rs b/application/apps/indexer/session/src/operations.rs
index 66844f3d8c..1b042386af 100644
--- a/application/apps/indexer/session/src/operations.rs
+++ b/application/apps/indexer/session/src/operations.rs
@@ -1,18 +1,9 @@
-use crate::{
-    events::{CallbackEvent, ComputationError, NativeError, NativeErrorKind, OperationDone},
-    handlers,
-    progress::Severity,
-    state::SessionStateAPI,
-    tracker::OperationTrackerAPI,
-};
+use crate::{handlers, state::SessionStateAPI, tracker::OperationTrackerAPI};
 use log::{debug, error, warn};
 use merging::merger::FileMergeOptions;
 use processor::search::filter::SearchFilter;
 use serde::Serialize;
-use sources::{
-    factory::ObserveOptions,
-    producer::{SdeReceiver, SdeSender},
-};
+use sources::producer::{SdeReceiver, SdeSender};
 use std::{
     ops::RangeInclusive,
     path::PathBuf,
@@ -81,7 +72,7 @@ impl Operation {
 #[derive(Debug)]
 #[allow(clippy::large_enum_variant)]
 pub enum OperationKind {
-    Observe(ObserveOptions),
+    Observe(stypes::ObserveOptions),
     Search {
         filters: Vec<SearchFilter>,
     },
@@ -170,11 +161,11 @@ impl std::fmt::Display for OperationKind {
 #[derive(Debug, Serialize, Clone)]
 pub struct NoOperationResults;
 
-pub type OperationResult<T> = Result<Option<T>, NativeError>;
+pub type OperationResult<T> = Result<Option<T>, stypes::NativeError>;
 
 #[derive(Clone)]
 pub struct OperationAPI {
-    tx_callback_events: UnboundedSender<CallbackEvent>,
+    tx_callback_events: UnboundedSender<stypes::CallbackEvent>,
     operation_id: Uuid,
     state_api: SessionStateAPI,
     tracker_api: OperationTrackerAPI,
@@ -188,7 +179,7 @@ impl OperationAPI {
     pub fn new(
         state_api: SessionStateAPI,
         tracker_api: OperationTrackerAPI,
-        tx_callback_events: UnboundedSender<CallbackEvent>,
+        tx_callback_events: UnboundedSender<stypes::CallbackEvent>,
         operation_id: Uuid,
         cancellation_token: CancellationToken,
     ) -> Self {
@@ -210,7 +201,7 @@ impl OperationAPI {
         self.done_token.clone()
     }
 
-    pub fn emit(&self, event: CallbackEvent) {
+    pub fn emit(&self, event: stypes::CallbackEvent) {
         let event_log = format!("{event:?}");
         if let Err(err) = self.tx_callback_events.send(event) {
             error!("Fail to send event {}; error: {}", event_log, err)
@@ -218,11 +209,11 @@ impl OperationAPI {
     }
 
     pub fn started(&self) {
-        self.emit(CallbackEvent::OperationStarted(self.id()));
+        self.emit(stypes::CallbackEvent::OperationStarted(self.id()));
     }
 
     pub fn processing(&self) {
-        self.emit(CallbackEvent::OperationProcessing(self.id()));
+        self.emit(stypes::CallbackEvent::OperationProcessing(self.id()));
     }
 
     pub async fn finish<T>(&self, result: OperationResult<T>, alias: &str)
@@ -232,22 +223,22 @@ impl OperationAPI {
         let event = match result {
             Ok(result) => {
                 if let Some(result) = result.as_ref() {
-                    match serde_json::to_string(result) {
-                        Ok(serialized) => CallbackEvent::OperationDone(OperationDone {
+                    match stypes::serialize(result) {
+                        Ok(bytes) => stypes::CallbackEvent::OperationDone(stypes::OperationDone {
                             uuid: self.operation_id,
-                            result: Some(serialized),
+                            result: Some(bytes),
                         }),
-                        Err(err) => CallbackEvent::OperationError {
+                        Err(err) => stypes::CallbackEvent::OperationError {
                             uuid: self.operation_id,
-                            error: NativeError {
-                                severity: Severity::ERROR,
-                                kind: NativeErrorKind::ComputationFailed,
+                            error: stypes::NativeError {
+                                severity: stypes::Severity::ERROR,
+                                kind: stypes::NativeErrorKind::ComputationFailed,
                                 message: Some(format!("{err}")),
                             },
                         },
                     }
                 } else {
-                    CallbackEvent::OperationDone(OperationDone {
+                    stypes::CallbackEvent::OperationDone(stypes::OperationDone {
                         uuid: self.operation_id,
                         result: None,
                     })
@@ -258,7 +249,7 @@ impl OperationAPI {
                     "Operation {} done with error: {:?}",
                     self.operation_id, error
                 );
-                CallbackEvent::OperationError {
+                stypes::CallbackEvent::OperationError {
                     uuid: self.operation_id,
                     error,
                 }
@@ -284,7 +275,7 @@ impl OperationAPI {
         operation: Operation,
         tx_sde: Option<SdeSender>,
         rx_sde: Option<SdeReceiver>,
-    ) -> Result<(), NativeError> {
+    ) -> Result<(), stypes::NativeError> {
         let added = self
             .tracker_api
             .add_operation(
@@ -296,9 +287,9 @@ impl OperationAPI {
             )
             .await?;
         if !added {
-            return Err(NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::ComputationFailed,
+            return Err(stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::ComputationFailed,
                 message: Some(format!("Operation {} already exists", self.id())),
             });
         }
@@ -324,7 +315,9 @@ impl OperationAPI {
                 }
                 OperationKind::Search { filters } => {
                     api.finish(
-                        handlers::search::execute_search(&api, filters, state).await,
+                        handlers::search::execute_search(&api, filters, state)
+                            .await
+                            .map(|v| v.map(stypes::ResultU64)),
                         operation_str,
                     )
                     .await;
@@ -354,6 +347,7 @@ impl OperationAPI {
                                 api.cancellation_token(),
                             )
                             .await
+                            .map(stypes::ResultBool)
                             .ok()),
                         operation_str,
                     )
@@ -367,7 +361,8 @@ impl OperationAPI {
                             out_path,
                             ranges,
                         )
-                        .await,
+                        .await
+                        .map(|v| v.map(stypes::ResultBool)),
                         operation_str,
                     )
                     .await;
@@ -380,7 +375,8 @@ impl OperationAPI {
                         return;
                     };
                     api.finish(
-                        handlers::extract::handle(&session_file, filters.iter()),
+                        handlers::extract::handle(&session_file, filters.iter())
+                            .map(|v| v.map(stypes::ResultExtractedMatchValues)),
                         operation_str,
                     )
                     .await;
@@ -388,7 +384,11 @@ impl OperationAPI {
                 OperationKind::Map { dataset_len, range } => {
                     match state.get_scaled_map(dataset_len, range).await {
                         Ok(map) => {
-                            api.finish(Ok(Some(map)), operation_str).await;
+                            api.finish(
+                                Ok(Some(stypes::ResultScaledDistribution(map))),
+                                operation_str,
+                            )
+                            .await;
                         }
                         Err(err) => {
                             api.finish::<OperationResult<()>>(Err(err), operation_str)
@@ -399,7 +399,17 @@ impl OperationAPI {
                 OperationKind::Values { dataset_len, range } => {
                     match state.get_search_values(range, dataset_len).await {
                         Ok(map) => {
-                            api.finish(Ok(Some(map)), operation_str).await;
+                            api.finish(
+                                Ok(Some(stypes::ResultSearchValues(
+                                    map.into_iter()
+                                        .map(|(k, v)| {
+                                            (k, v.into_iter().map(|v| v.into()).collect())
+                                        })
+                                        .collect(),
+                                ))),
+                                operation_str,
+                            )
+                            .await;
                         }
                         Err(err) => {
                             api.finish::<OperationResult<()>>(Err(err), operation_str)
@@ -429,9 +439,9 @@ impl OperationAPI {
                                 .await;
                         } else {
                             api.finish::<OperationResult<()>>(
-                                Err(NativeError {
-                                    severity: Severity::WARNING,
-                                    kind: NativeErrorKind::Io,
+                                Err(stypes::NativeError {
+                                    severity: stypes::Severity::WARNING,
+                                    kind: stypes::NativeErrorKind::Io,
                                     message: Some(format!(
                                         "Fail to cancel operation {target}; operation isn't found"
                                     )),
@@ -443,9 +453,9 @@ impl OperationAPI {
                     }
                     Err(err) => {
                         api.finish::<OperationResult<()>>(
-                            Err(NativeError {
-                                severity: Severity::WARNING,
-                                kind: NativeErrorKind::Io,
+                            Err(stypes::NativeError {
+                                severity: stypes::Severity::WARNING,
+                                kind: stypes::NativeErrorKind::Io,
                                 message: Some(format!(
                                     "Fail to cancel operation {target}; error: {err:?}"
                                 )),
@@ -458,7 +468,7 @@ impl OperationAPI {
                 OperationKind::GetNearestPosition(position) => {
                     match state.get_nearest_position(position).await {
                         Ok(nearest) => {
-                            api.finish(Ok(nearest), operation_str).await;
+                            api.finish(Ok(Some(nearest)), operation_str).await;
                         }
                         Err(err) => {
                             api.finish::<OperationResult<()>>(Err(err), operation_str)
@@ -475,10 +485,10 @@ impl OperationAPI {
     }
 }
 
-pub fn uuid_from_str(operation_id: &str) -> Result<Uuid, ComputationError> {
+pub fn uuid_from_str(operation_id: &str) -> Result<Uuid, stypes::ComputationError> {
     match Uuid::parse_str(operation_id) {
         Ok(uuid) => Ok(uuid),
-        Err(e) => Err(ComputationError::Process(format!(
+        Err(e) => Err(stypes::ComputationError::Process(format!(
             "Fail to parse operation uuid from {operation_id}. Error: {e}"
         ))),
     }
@@ -488,7 +498,7 @@ pub async fn run(
     mut rx_operations: UnboundedReceiver<Operation>,
     state_api: SessionStateAPI,
     tracker_api: OperationTrackerAPI,
-    tx_callback_events: UnboundedSender<CallbackEvent>,
+    tx_callback_events: UnboundedSender<stypes::CallbackEvent>,
 ) {
     debug!("task is started");
     while let Some(operation) = rx_operations.recv().await {
@@ -508,7 +518,7 @@ pub async fn run(
                     (None, None)
                 };
             if let Err(err) = operation_api.execute(operation, tx_sde, rx_sde).await {
-                operation_api.emit(CallbackEvent::OperationError {
+                operation_api.emit(stypes::CallbackEvent::OperationError {
                     uuid: operation_api.id(),
                     error: err,
                 });
diff --git a/application/apps/indexer/session/src/paths.rs b/application/apps/indexer/session/src/paths.rs
index a4e02dc845..b57983b334 100644
--- a/application/apps/indexer/session/src/paths.rs
+++ b/application/apps/indexer/session/src/paths.rs
@@ -1,31 +1,27 @@
-use crate::{
-    events::{NativeError, NativeErrorKind},
-    progress::Severity,
-};
 use dirs;
 use std::path::PathBuf;
 
 const CHIPMUNK_HOME: &str = ".chipmunk";
 const CHIPMUNK_TMP: &str = "tmp";
 
-pub fn get_home_dir() -> Result<PathBuf, NativeError> {
+pub fn get_home_dir() -> Result<PathBuf, stypes::NativeError> {
     if let Some(home) = dirs::home_dir().take() {
         Ok(home.join(CHIPMUNK_HOME))
     } else {
-        Err(NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::Io,
+        Err(stypes::NativeError {
+            severity: stypes::Severity::ERROR,
+            kind: stypes::NativeErrorKind::Io,
             message: Some(String::from("Fail to find home folder")),
         })
     }
 }
 
-pub fn get_streams_dir() -> Result<PathBuf, NativeError> {
+pub fn get_streams_dir() -> Result<PathBuf, stypes::NativeError> {
     let streams = get_home_dir()?.join(CHIPMUNK_TMP);
     if !streams.exists() {
-        std::fs::create_dir_all(&streams).map_err(|e| NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::Io,
+        std::fs::create_dir_all(&streams).map_err(|e| stypes::NativeError {
+            severity: stypes::Severity::ERROR,
+            kind: stypes::NativeErrorKind::Io,
             message: Some(format!(
                 "Fail to create streams folder {}: {}",
                 streams.to_string_lossy(),
diff --git a/application/apps/indexer/session/src/progress.rs b/application/apps/indexer/session/src/progress.rs
index 002fa67c21..00926b60f2 100644
--- a/application/apps/indexer/session/src/progress.rs
+++ b/application/apps/indexer/session/src/progress.rs
@@ -1,9 +1,5 @@
-use crate::{
-    events::{ComputationError, LifecycleTransition},
-    TRACKER_CHANNEL,
-};
+use crate::TRACKER_CHANNEL;
 use log::{error, info};
-use serde::{Deserialize, Serialize};
 use std::collections::HashMap;
 use tokio::{
     select,
@@ -14,77 +10,25 @@ use tokio::{
 };
 use uuid::Uuid;
 
-#[derive(Debug, Serialize, Deserialize)]
-pub struct Notification {
-    pub severity: Severity,
-    pub content: String,
-    pub line: Option<usize>,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-#[serde(tag = "type")]
-pub enum Progress {
-    Ticks(Ticks),
-    Notification(Notification),
-    Stopped,
-}
-
-#[derive(Debug, Serialize, Deserialize, Clone, Default)]
-pub struct Ticks {
-    pub count: u64,
-    pub state: Option<String>,
-    pub total: Option<u64>,
-}
-impl Ticks {
-    pub fn done(&self) -> bool {
-        match self.total {
-            Some(total) => self.count == total,
-            None => false,
-        }
-    }
-
-    pub fn new() -> Self {
-        Ticks {
-            count: 0,
-            state: None,
-            total: None,
-        }
-    }
-}
-
-#[allow(clippy::upper_case_acronyms)]
-#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone)]
-pub enum Severity {
-    WARNING,
-    ERROR,
-}
-
-impl Severity {
-    pub fn as_str(&self) -> &str {
-        match self {
-            Severity::WARNING => "WARNING",
-            Severity::ERROR => "ERROR",
-        }
-    }
-}
-
 /// Commands used to control/query the progress tracking
 #[derive(Debug)]
 pub enum ProgressCommand {
-    Content(oneshot::Sender<Result<String, ComputationError>>),
-    Abort(oneshot::Sender<Result<(), ComputationError>>),
+    Content(oneshot::Sender<Result<String, stypes::ComputationError>>),
+    Abort(oneshot::Sender<Result<(), stypes::ComputationError>>),
 }
 
 #[derive(Clone, Debug)]
 pub struct ProgressProviderAPI {
-    tx: UnboundedSender<LifecycleTransition>,
+    tx: UnboundedSender<stypes::LifecycleTransition>,
 }
 
 impl ProgressProviderAPI {
-    pub fn new() -> Result<Self, ComputationError> {
+    pub fn new() -> Result<Self, stypes::ComputationError> {
         let tx = {
             let tx_rx = TRACKER_CHANNEL.lock().map_err(|e| {
-                ComputationError::Communication(format!("Cannot init channels from mutex: {e}"))
+                stypes::ComputationError::Communication(format!(
+                    "Cannot init channels from mutex: {e}"
+                ))
             })?;
             tx_rx.0.clone()
             // scope will release Mutex lock
@@ -95,7 +39,7 @@ impl ProgressProviderAPI {
     pub fn started(&self, alias: &str, uuid: &Uuid) {
         if self
             .tx
-            .send(LifecycleTransition::started(uuid, alias))
+            .send(stypes::LifecycleTransition::started(uuid, alias))
             .is_err()
         {
             error!("Fail to report LifecycleTransition::Started. Channel is closed");
@@ -103,15 +47,19 @@ impl ProgressProviderAPI {
     }
 
     pub fn stopped(&self, uuid: &Uuid) {
-        if self.tx.send(LifecycleTransition::stopped(uuid)).is_err() {
+        if self
+            .tx
+            .send(stypes::LifecycleTransition::stopped(uuid))
+            .is_err()
+        {
             error!("Fail to report LifecycleTransition::Stopped. Channel is closed");
         }
     }
 
-    pub fn progress(&self, uuid: &Uuid, ticks: Ticks) {
+    pub fn progress(&self, uuid: &Uuid, ticks: stypes::Ticks) {
         if self
             .tx
-            .send(LifecycleTransition::ticks(uuid, ticks))
+            .send(stypes::LifecycleTransition::ticks(uuid, ticks))
             .is_err()
         {
             error!("Fail to report LifecycleTransition::Ticks. Channel is closed");
@@ -138,29 +86,33 @@ impl ProgressTrackerAPI {
         &self,
         command: ProgressCommand,
         rx_response: oneshot::Receiver<T>,
-    ) -> Result<T, ComputationError> {
+    ) -> Result<T, stypes::ComputationError> {
         let api_str = format!("{command:?}");
         self.tx_api.send(command).map_err(|e| {
-            ComputationError::Communication(format!("Failed to send to Api::{api_str}; error: {e}"))
+            stypes::ComputationError::Communication(format!(
+                "Failed to send to Api::{api_str}; error: {e}"
+            ))
         })?;
         rx_response.await.map_err(|_| {
-            ComputationError::Communication(format!("Failed to get response from Api::{api_str}"))
+            stypes::ComputationError::Communication(format!(
+                "Failed to get response from Api::{api_str}"
+            ))
         })
     }
 
-    pub async fn content(&self) -> Result<String, ComputationError> {
+    pub async fn content(&self) -> Result<String, stypes::ComputationError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(ProgressCommand::Content(tx), rx)
             .await?
     }
 
-    pub async fn abort(&self) -> Result<(), ComputationError> {
+    pub async fn abort(&self) -> Result<(), stypes::ComputationError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(ProgressCommand::Abort(tx), rx).await?
     }
 }
 
-fn log_if_err(res: Result<(), SendError<LifecycleTransition>>) {
+fn log_if_err(res: Result<(), SendError<stypes::LifecycleTransition>>) {
     if res.is_err() {
         error!("Fail to send event into lifecycle_events_channel. Channel is closed");
     }
@@ -172,17 +124,20 @@ fn log_if_err(res: Result<(), SendError<LifecycleTransition>>) {
 /// At any time, we can then track the progress of everything that is going on
 pub async fn run_tracking(
     mut command_rx: UnboundedReceiver<ProgressCommand>,
-) -> Result<mpsc::Receiver<LifecycleTransition>, ComputationError> {
-    let mut ongoing_operations: HashMap<Uuid, Ticks> = HashMap::new();
+) -> Result<mpsc::Receiver<stypes::LifecycleTransition>, stypes::ComputationError> {
+    let mut ongoing_operations: HashMap<Uuid, stypes::Ticks> = HashMap::new();
     let lifecycle_events_channel = mpsc::channel(1);
 
     let mut lifecycle_events = {
         let mut tx_rx = TRACKER_CHANNEL.lock().map_err(|e| {
-            ComputationError::Communication(format!("Cannot init channels from mutex: {e}"))
+            stypes::ComputationError::Communication(format!("Cannot init channels from mutex: {e}"))
         })?;
-        tx_rx.1.take().ok_or(ComputationError::Communication(
-            "ProgressTracker channel already taken".to_string(),
-        ))?
+        tx_rx
+            .1
+            .take()
+            .ok_or(stypes::ComputationError::Communication(
+                "ProgressTracker channel already taken".to_string(),
+            ))?
     };
 
     tokio::spawn(async move {
@@ -192,7 +147,7 @@ pub async fn run_tracking(
                     match command {
                         Some(ProgressCommand::Content(result_channel)) => {
                             let res = serde_json::to_string(&ongoing_operations)
-                                .map_err(|e| ComputationError::Process(format!("{e}")));
+                                .map_err(|e| stypes::ComputationError::Process(format!("{e}")));
                             let _ = result_channel.send(res);
                         }
                         Some(ProgressCommand::Abort(result_channel)) => {
@@ -204,20 +159,20 @@ pub async fn run_tracking(
                 }
                 lifecycle_event = lifecycle_events.recv() => {
                     match lifecycle_event {
-                        Some(LifecycleTransition::Started { uuid, alias }) => {
+                        Some(stypes::LifecycleTransition::Started { uuid, alias }) => {
                             info!("job {alias} ({uuid}) started");
-                            ongoing_operations.insert(uuid, Ticks::new());
-                            log_if_err(lifecycle_events_channel.0.send(LifecycleTransition::started(&uuid, &alias)).await);
+                            ongoing_operations.insert(uuid, stypes::Ticks::default());
+                            log_if_err(lifecycle_events_channel.0.send(stypes::LifecycleTransition::started(&uuid, &alias)).await);
                         }
-                        Some(LifecycleTransition::Stopped(uuid)) => {
+                        Some(stypes::LifecycleTransition::Stopped(uuid)) => {
                             info!("job {uuid} stopped");
                             ongoing_operations.remove(&uuid);
-                            log_if_err(lifecycle_events_channel.0.send(LifecycleTransition::Stopped(uuid)).await);
+                            log_if_err(lifecycle_events_channel.0.send(stypes::LifecycleTransition::Stopped(uuid)).await);
                         }
-                        Some(LifecycleTransition::Ticks {uuid, ticks}) => {
+                        Some(stypes::LifecycleTransition::Ticks {uuid, ticks}) => {
                             info!("job {uuid} reported progress: {ticks:?}");
                             ongoing_operations.insert(uuid, ticks.clone());
-                            log_if_err(lifecycle_events_channel.0.send(LifecycleTransition::ticks(&uuid, ticks)).await);
+                            log_if_err(lifecycle_events_channel.0.send(stypes::LifecycleTransition::ticks(&uuid, ticks)).await);
                         }
                         None => break,
 
diff --git a/application/apps/indexer/session/src/session.rs b/application/apps/indexer/session/src/session.rs
index 5dbd1efda4..344ec84f5a 100644
--- a/application/apps/indexer/session/src/session.rs
+++ b/application/apps/indexer/session/src/session.rs
@@ -1,16 +1,14 @@
 use crate::{
-    events::{CallbackEvent, ComputationError},
     operations,
     operations::Operation,
     state,
-    state::{AttachmentInfo, GrabbedElement, IndexesMode, SessionStateAPI, SourceDefinition},
+    state::{IndexesMode, SessionStateAPI},
     tracker,
     tracker::OperationTrackerAPI,
 };
 use futures::Future;
 use log::{debug, error, warn};
 use processor::{grabber::LineRange, search::filter::SearchFilter};
-use sources::{factory::ObserveOptions, sde};
 use std::{ops::RangeInclusive, path::PathBuf};
 use tokio::{
     join,
@@ -48,13 +46,13 @@ impl Session {
     ///
     pub async fn new(
         uuid: Uuid,
-    ) -> Result<(Self, UnboundedReceiver<CallbackEvent>), ComputationError> {
+    ) -> Result<(Self, UnboundedReceiver<stypes::CallbackEvent>), stypes::ComputationError> {
         let (tx_operations, rx_operations): OperationsChannel = unbounded_channel();
         let (tracker_api, rx_tracker_api) = OperationTrackerAPI::new();
         let (state_api, rx_state_api) = SessionStateAPI::new(tracker_api.clone());
         let (tx_callback_events, rx_callback_events): (
-            UnboundedSender<CallbackEvent>,
-            UnboundedReceiver<CallbackEvent>,
+            UnboundedSender<stypes::CallbackEvent>,
+            UnboundedReceiver<stypes::CallbackEvent>,
         ) = unbounded_channel();
         let session = Self {
             uuid,
@@ -123,7 +121,7 @@ impl Session {
             debug!("Session task is finished");
         });
         if tx.send(handle).is_err() {
-            Err(ComputationError::SessionCreatingFail)
+            Err(stypes::ComputationError::SessionCreatingFail)
         } else {
             Ok((session, rx_callback_events))
         }
@@ -133,7 +131,7 @@ impl Session {
         tx_operations: &UnboundedSender<Operation>,
         destroying: &CancellationToken,
         name: &str,
-        f: impl Future<Output = Result<(), crate::events::NativeError>> + Send + 'static,
+        f: impl Future<Output = Result<(), stypes::NativeError>> + Send + 'static,
     ) {
         if let Err(err) = f.await {
             error!("State loop exits with error:: {:?}", err);
@@ -152,70 +150,76 @@ impl Session {
         self.state.clone()
     }
 
-    pub async fn grab(&self, range: LineRange) -> Result<Vec<GrabbedElement>, ComputationError> {
+    pub async fn grab(
+        &self,
+        range: LineRange,
+    ) -> Result<stypes::GrabbedElementList, stypes::ComputationError> {
         self.state
             .grab(range)
             .await
-            .map_err(ComputationError::NativeError)
+            .map(|els| els.into())
+            .map_err(stypes::ComputationError::NativeError)
     }
 
     pub async fn grab_indexed(
         &self,
         range: RangeInclusive<u64>,
-    ) -> Result<Vec<GrabbedElement>, ComputationError> {
+    ) -> Result<stypes::GrabbedElementList, stypes::ComputationError> {
         self.state
             .grab_indexed(range)
             .await
-            .map_err(ComputationError::NativeError)
+            .map(|els| els.into())
+            .map_err(stypes::ComputationError::NativeError)
     }
 
-    pub async fn set_indexing_mode(&self, mode: u8) -> Result<(), ComputationError> {
+    pub async fn set_indexing_mode(&self, mode: u8) -> Result<(), stypes::ComputationError> {
         self.state
             .set_indexing_mode(match mode {
                 0u8 => IndexesMode::Regular,
                 1u8 => IndexesMode::Breadcrumbs,
-                _ => return Err(ComputationError::InvalidData),
+                _ => return Err(stypes::ComputationError::InvalidData),
             })
             .await
-            .map_err(ComputationError::NativeError)
+            .map_err(stypes::ComputationError::NativeError)
     }
 
-    pub async fn get_indexed_len(&self) -> Result<usize, ComputationError> {
+    pub async fn get_indexed_len(&self) -> Result<usize, stypes::ComputationError> {
         self.state
             .get_indexed_len()
             .await
-            .map_err(ComputationError::NativeError)
+            .map_err(stypes::ComputationError::NativeError)
     }
 
     pub async fn get_around_indexes(
         &self,
         position: u64,
-    ) -> Result<(Option<u64>, Option<u64>), ComputationError> {
+    ) -> Result<stypes::AroundIndexes, stypes::ComputationError> {
         self.state
             .get_around_indexes(position)
             .await
-            .map_err(ComputationError::NativeError)
+            .map(|v| v.into())
+            .map_err(stypes::ComputationError::NativeError)
     }
 
-    pub async fn add_bookmark(&self, row: u64) -> Result<(), ComputationError> {
+    pub async fn add_bookmark(&self, row: u64) -> Result<(), stypes::ComputationError> {
         self.state
             .add_bookmark(row)
             .await
-            .map_err(ComputationError::NativeError)
+            .map_err(stypes::ComputationError::NativeError)
     }
 
-    pub async fn set_bookmarks(&self, rows: Vec<u64>) -> Result<(), ComputationError> {
+    pub async fn set_bookmarks(&self, rows: Vec<u64>) -> Result<(), stypes::ComputationError> {
         self.state
             .set_bookmarks(rows)
             .await
-            .map_err(ComputationError::NativeError)
+            .map_err(stypes::ComputationError::NativeError)
     }
 
-    pub async fn remove_bookmark(&self, row: u64) -> Result<(), ComputationError> {
+    pub async fn remove_bookmark(&self, row: u64) -> Result<(), stypes::ComputationError> {
         self.state
             .remove_bookmark(row)
             .await
-            .map_err(ComputationError::NativeError)
+            .map_err(stypes::ComputationError::NativeError)
     }
 
     pub async fn expand_breadcrumbs(
@@ -223,69 +227,71 @@ impl Session {
         seporator: u64,
         offset: u64,
         above: bool,
-    ) -> Result<(), ComputationError> {
+    ) -> Result<(), stypes::ComputationError> {
         self.state
             .expand_breadcrumbs(seporator, offset, above)
             .await
-            .map_err(ComputationError::NativeError)
+            .map_err(stypes::ComputationError::NativeError)
     }
 
     pub async fn grab_search(
         &self,
         range: LineRange,
-    ) -> Result<Vec<GrabbedElement>, ComputationError> {
+    ) -> Result<stypes::GrabbedElementList, stypes::ComputationError> {
         self.state
             .grab_search(range)
             .await
-            .map_err(ComputationError::NativeError)
+            .map(|els| els.into())
+            .map_err(stypes::ComputationError::NativeError)
     }
 
     pub async fn grab_ranges(
         &self,
         ranges: Vec<RangeInclusive<u64>>,
-    ) -> Result<Vec<GrabbedElement>, ComputationError> {
+    ) -> Result<stypes::GrabbedElementList, stypes::ComputationError> {
         self.state
             .grab_ranges(ranges)
             .await
-            .map_err(ComputationError::NativeError)
+            .map(|els| els.into())
+            .map_err(stypes::ComputationError::NativeError)
     }
 
-    pub fn abort(&self, operation_id: Uuid, target: Uuid) -> Result<(), ComputationError> {
+    pub fn abort(&self, operation_id: Uuid, target: Uuid) -> Result<(), stypes::ComputationError> {
         self.tx_operations
             .send(Operation::new(
                 operation_id,
                 operations::OperationKind::Cancel { target },
             ))
-            .map_err(|e| ComputationError::Communication(e.to_string()))
+            .map_err(|e| stypes::ComputationError::Communication(e.to_string()))
     }
 
     pub async fn send_into_sde(
         &self,
         target: Uuid,
-        msg: sde::SdeRequest,
-    ) -> Result<sde::SdeResponse, ComputationError> {
+        msg: stypes::SdeRequest,
+    ) -> Result<stypes::SdeResponse, stypes::ComputationError> {
         let (tx_response, rx_response) = oneshot::channel();
         if let Some(tx_sde) = self
             .tracker
             .get_sde_sender(target)
             .await
-            .map_err(|e| ComputationError::IoOperation(format!("{e:?}")))?
+            .map_err(|e| stypes::ComputationError::IoOperation(format!("{e:?}")))?
         {
             tx_sde.send((msg, tx_response)).map_err(|_| {
-                ComputationError::Communication(String::from(
+                stypes::ComputationError::Communication(String::from(
                     "Fail to send message into SDE channel",
                 ))
             })?;
             rx_response
                 .await
                 .map_err(|_| {
-                    ComputationError::Communication(String::from(
+                    stypes::ComputationError::Communication(String::from(
                         "Fail to get response from SDE channel",
                     ))
                 })?
-                .map_err(ComputationError::Sde)
+                .map_err(stypes::ComputationError::Sde)
         } else {
-            Err(ComputationError::IoOperation(String::from(
+            Err(stypes::ComputationError::IoOperation(String::from(
                 "No SDE channel",
             )))
         }
@@ -296,18 +302,18 @@ impl Session {
         tx_operations: &UnboundedSender<Operation>,
         destroyed: Option<&CancellationToken>,
         destroying: &CancellationToken,
-    ) -> Result<(), ComputationError> {
+    ) -> Result<(), stypes::ComputationError> {
         destroying.cancel();
         tx_operations
             .send(Operation::new(operation_id, operations::OperationKind::End))
-            .map_err(|e| ComputationError::Communication(e.to_string()))?;
+            .map_err(|e| stypes::ComputationError::Communication(e.to_string()))?;
         if let Some(destroyed) = destroyed {
             destroyed.cancelled().await;
         }
         Ok(())
     }
 
-    pub async fn stop(&self, operation_id: Uuid) -> Result<(), ComputationError> {
+    pub async fn stop(&self, operation_id: Uuid) -> Result<(), stypes::ComputationError> {
         Session::send_stop_signal(
             operation_id,
             &self.tx_operations,
@@ -317,39 +323,40 @@ impl Session {
         .await
     }
 
-    pub async fn get_stream_len(&self) -> Result<usize, ComputationError> {
+    pub async fn get_stream_len(&self) -> Result<usize, stypes::ComputationError> {
         self.state
             .get_stream_len()
             .await
-            .map_err(ComputationError::NativeError)
+            .map_err(stypes::ComputationError::NativeError)
             .map(|(rows, _bytes)| rows as usize)
     }
 
-    pub async fn get_search_result_len(&self) -> Result<usize, ComputationError> {
+    pub async fn get_search_result_len(&self) -> Result<usize, stypes::ComputationError> {
         self.state
             .get_search_result_len()
             .await
-            .map_err(ComputationError::NativeError)
+            .map_err(stypes::ComputationError::NativeError)
     }
 
     pub fn observe(
         &self,
         operation_id: Uuid,
-        options: ObserveOptions,
-    ) -> Result<(), ComputationError> {
+        options: stypes::ObserveOptions,
+    ) -> Result<(), stypes::ComputationError> {
         self.tx_operations
             .send(Operation::new(
                 operation_id,
                 operations::OperationKind::Observe(options),
             ))
-            .map_err(|e| ComputationError::Communication(e.to_string()))
+            .map_err(|e| stypes::ComputationError::Communication(e.to_string()))
     }
 
-    pub async fn get_sources(&self) -> Result<Vec<SourceDefinition>, ComputationError> {
+    pub async fn get_sources(&self) -> Result<stypes::Sources, stypes::ComputationError> {
         self.state
             .get_sources_definitions()
             .await
-            .map_err(ComputationError::NativeError)
+            .map(|v| v.into())
+            .map_err(stypes::ComputationError::NativeError)
     }
 
     /// Exports data to the specified output path with the given parameters. This method is used to export
@@ -366,9 +373,9 @@ impl Session {
     ///
     /// # Returns
     ///
-    /// * `Result<(), ComputationError>`:
+    /// * `Result<(), stypes::ComputationError>`:
     ///     - `Ok(())` if the export is successful.
-    ///     - `Err(ComputationError)` if an error occurs during the export process.
+    ///     - `Err(stypes::ComputationError)` if an error occurs during the export process.
     ///
     pub fn export(
         &self,
@@ -378,7 +385,7 @@ impl Session {
         columns: Vec<usize>,
         spliter: Option<String>,
         delimiter: Option<String>,
-    ) -> Result<(), ComputationError> {
+    ) -> Result<(), stypes::ComputationError> {
         self.tx_operations
             .send(Operation::new(
                 operation_id,
@@ -390,7 +397,7 @@ impl Session {
                     delimiter,
                 },
             ))
-            .map_err(|e| ComputationError::Communication(e.to_string()))
+            .map_err(|e| stypes::ComputationError::Communication(e.to_string()))
     }
 
     pub fn export_raw(
@@ -398,66 +405,66 @@ impl Session {
         operation_id: Uuid,
         out_path: PathBuf,
         ranges: Vec<RangeInclusive<u64>>,
-    ) -> Result<(), ComputationError> {
+    ) -> Result<(), stypes::ComputationError> {
         self.tx_operations
             .send(Operation::new(
                 operation_id,
                 operations::OperationKind::ExportRaw { out_path, ranges },
             ))
-            .map_err(|e| ComputationError::Communication(e.to_string()))
+            .map_err(|e| stypes::ComputationError::Communication(e.to_string()))
     }
 
-    pub async fn is_raw_export_available(&self) -> Result<bool, ComputationError> {
+    pub async fn is_raw_export_available(&self) -> Result<bool, stypes::ComputationError> {
         self.state
             .is_raw_export_available()
             .await
-            .map_err(ComputationError::NativeError)
+            .map_err(stypes::ComputationError::NativeError)
     }
 
     pub fn apply_search_filters(
         &self,
         operation_id: Uuid,
         filters: Vec<SearchFilter>,
-    ) -> Result<(), ComputationError> {
+    ) -> Result<(), stypes::ComputationError> {
         self.tx_operations
             .send(Operation::new(
                 operation_id,
                 operations::OperationKind::Search { filters },
             ))
-            .map_err(|e| ComputationError::Communication(e.to_string()))
+            .map_err(|e| stypes::ComputationError::Communication(e.to_string()))
     }
 
     pub fn apply_search_values_filters(
         &self,
         operation_id: Uuid,
         filters: Vec<String>,
-    ) -> Result<(), ComputationError> {
+    ) -> Result<(), stypes::ComputationError> {
         self.tx_operations
             .send(Operation::new(
                 operation_id,
                 operations::OperationKind::SearchValues { filters },
             ))
-            .map_err(|e| ComputationError::Communication(e.to_string()))
+            .map_err(|e| stypes::ComputationError::Communication(e.to_string()))
     }
 
-    pub async fn drop_search(&self) -> Result<bool, ComputationError> {
+    pub async fn drop_search(&self) -> Result<bool, stypes::ComputationError> {
         self.state
             .drop_search()
             .await
-            .map_err(ComputationError::NativeError)
+            .map_err(stypes::ComputationError::NativeError)
     }
 
     pub fn extract_matches(
         &self,
         operation_id: Uuid,
         filters: Vec<SearchFilter>,
-    ) -> Result<(), ComputationError> {
+    ) -> Result<(), stypes::ComputationError> {
         self.tx_operations
             .send(Operation::new(
                 operation_id,
                 operations::OperationKind::Extract { filters },
             ))
-            .map_err(|e| ComputationError::Communication(e.to_string()))
+            .map_err(|e| stypes::ComputationError::Communication(e.to_string()))
     }
 
     pub fn get_map(
@@ -465,13 +472,13 @@ impl Session {
         operation_id: Uuid,
         dataset_len: u16,
         range: Option<(u64, u64)>,
-    ) -> Result<(), ComputationError> {
+    ) -> Result<(), stypes::ComputationError> {
         self.tx_operations
             .send(Operation::new(
                 operation_id,
                 operations::OperationKind::Map { dataset_len, range },
             ))
-            .map_err(|e| ComputationError::Communication(e.to_string()))
+            .map_err(|e| stypes::ComputationError::Communication(e.to_string()))
     }
 
     pub fn get_values(
@@ -479,40 +486,44 @@ impl Session {
         operation_id: Uuid,
         dataset_len: u16,
         range: Option<RangeInclusive<u64>>,
-    ) -> Result<(), ComputationError> {
+    ) -> Result<(), stypes::ComputationError> {
         self.tx_operations
             .send(Operation::new(
                 operation_id,
                 operations::OperationKind::Values { dataset_len, range },
             ))
-            .map_err(|e| ComputationError::Communication(e.to_string()))
+            .map_err(|e| stypes::ComputationError::Communication(e.to_string()))
     }
 
     pub fn get_nearest_to(
         &self,
         operation_id: Uuid,
         position_in_stream: u64,
-    ) -> Result<(), ComputationError> {
+    ) -> Result<(), stypes::ComputationError> {
         self.tx_operations
             .send(Operation::new(
                 operation_id,
                 operations::OperationKind::GetNearestPosition(position_in_stream),
             ))
-            .map_err(|e| ComputationError::Communication(e.to_string()))
+            .map_err(|e| stypes::ComputationError::Communication(e.to_string()))
     }
 
-    pub async fn get_attachments(&self) -> Result<Vec<AttachmentInfo>, ComputationError> {
+    pub async fn get_attachments(
+        &self,
+    ) -> Result<stypes::AttachmentList, stypes::ComputationError> {
         self.state
             .get_attachments()
             .await
-            .map_err(ComputationError::NativeError)
+            .map(|v| v.into())
+            .map_err(stypes::ComputationError::NativeError)
     }
 
-    pub async fn get_indexed_ranges(&self) -> Result<Vec<RangeInclusive<u64>>, ComputationError> {
+    pub async fn get_indexed_ranges(&self) -> Result<stypes::Ranges, stypes::ComputationError> {
         self.state
             .get_indexed_ranges()
             .await
-            .map_err(ComputationError::NativeError)
+            .map(|v| v.into())
+            .map_err(stypes::ComputationError::NativeError)
     }
 
     /// Used for debug goals
@@ -521,26 +532,26 @@ impl Session {
         operation_id: Uuid,
         ms: u64,
         ignore_cancellation: bool,
-    ) -> Result<(), ComputationError> {
+    ) -> Result<(), stypes::ComputationError> {
         self.tx_operations
             .send(Operation::new(
                 operation_id,
                 operations::OperationKind::Sleep(ms, ignore_cancellation),
             ))
-            .map_err(|e| ComputationError::Communication(e.to_string()))
+            .map_err(|e| stypes::ComputationError::Communication(e.to_string()))
     }
 
     /// Used for debug goals
-    pub async fn trigger_state_error(&self) -> Result<(), ComputationError> {
+    pub async fn trigger_state_error(&self) -> Result<(), stypes::ComputationError> {
         self.state
             .shutdown_with_error()
-            .map_err(ComputationError::NativeError)
+            .map_err(stypes::ComputationError::NativeError)
     }
 
     /// Used for debug goals
-    pub async fn trigger_tracker_error(&self) -> Result<(), ComputationError> {
+    pub async fn trigger_tracker_error(&self) -> Result<(), stypes::ComputationError> {
         self.tracker
             .shutdown_with_error()
-            .map_err(ComputationError::NativeError)
+            .map_err(stypes::ComputationError::NativeError)
     }
 }
diff --git a/application/apps/indexer/session/src/state/api.rs b/application/apps/indexer/session/src/state/api.rs
index e89bcb5a06..d0a0305f44 100644
--- a/application/apps/indexer/session/src/state/api.rs
+++ b/application/apps/indexer/session/src/state/api.rs
@@ -1,12 +1,8 @@
+use super::values::graph::CandlePoint;
 use crate::{
-    events::NativeError,
     state::{
-        indexes::controller::Mode as IndexesMode,
-        observed::Observed,
-        session_file::{GrabbedElement, SessionFileOrigin},
-        source_ids::SourceDefinition,
-        values::ValuesError,
-        AttachmentInfo,
+        indexes::controller::Mode as IndexesMode, observed::Observed,
+        session_file::SessionFileOrigin, values::ValuesError,
     },
     tracker::OperationTrackerAPI,
 };
@@ -14,11 +10,11 @@ use log::error;
 use parsers;
 use processor::{
     grabber::LineRange,
-    map::{FilterMatch, FiltersStats, NearestPosition, ScaledDistribution},
+    map::{FiltersStats, ScaledDistribution},
     search::searchers::{regular::RegularSearchHolder, values::ValueSearchHolder},
 };
-use sources::factory::ObserveOptions;
 use std::{collections::HashMap, fmt::Display, ops::RangeInclusive, path::PathBuf};
+use stypes::GrabbedElement;
 use tokio::sync::{
     mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender},
     oneshot,
@@ -26,20 +22,29 @@ use tokio::sync::{
 use tokio_util::sync::CancellationToken;
 use uuid::Uuid;
 
-use super::values::graph::CandlePoint;
-
 pub enum Api {
-    SetSessionFile((Option<PathBuf>, oneshot::Sender<Result<(), NativeError>>)),
-    GetSessionFile(oneshot::Sender<Result<PathBuf, NativeError>>),
-    WriteSessionFile((u16, String, oneshot::Sender<Result<(), NativeError>>)),
-    FlushSessionFile(oneshot::Sender<Result<(), NativeError>>),
-    GetSessionFileOrigin(oneshot::Sender<Result<Option<SessionFileOrigin>, NativeError>>),
-    UpdateSession((u16, oneshot::Sender<Result<bool, NativeError>>)),
+    SetSessionFile(
+        (
+            Option<PathBuf>,
+            oneshot::Sender<Result<(), stypes::NativeError>>,
+        ),
+    ),
+    GetSessionFile(oneshot::Sender<Result<PathBuf, stypes::NativeError>>),
+    WriteSessionFile(
+        (
+            u16,
+            String,
+            oneshot::Sender<Result<(), stypes::NativeError>>,
+        ),
+    ),
+    FlushSessionFile(oneshot::Sender<Result<(), stypes::NativeError>>),
+    GetSessionFileOrigin(oneshot::Sender<Result<Option<SessionFileOrigin>, stypes::NativeError>>),
+    UpdateSession((u16, oneshot::Sender<Result<bool, stypes::NativeError>>)),
     AddSource((String, oneshot::Sender<u16>)),
     GetSource((String, oneshot::Sender<Option<u16>>)),
-    GetSourcesDefinitions(oneshot::Sender<Vec<SourceDefinition>>),
+    GetSourcesDefinitions(oneshot::Sender<Vec<stypes::SourceDefinition>>),
     #[allow(clippy::large_enum_variant)]
-    AddExecutedObserve((ObserveOptions, oneshot::Sender<()>)),
+    AddExecutedObserve((stypes::ObserveOptions, oneshot::Sender<()>)),
     GetExecutedHolder(oneshot::Sender<Observed>),
     IsRawExportAvailable(oneshot::Sender<bool>),
     /// Export operation containing parameters for exporting data.
@@ -72,50 +77,55 @@ pub enum Api {
         /// Used to stop export operation
         cancel: CancellationToken,
         /// Used to send operation status result
-        tx_response: oneshot::Sender<Result<bool, NativeError>>,
+        tx_response: oneshot::Sender<Result<bool, stypes::NativeError>>,
     },
     FileRead(oneshot::Sender<()>),
     Grab(
         (
             LineRange,
-            oneshot::Sender<Result<Vec<GrabbedElement>, NativeError>>,
+            oneshot::Sender<Result<Vec<GrabbedElement>, stypes::NativeError>>,
         ),
     ),
     GrabIndexed(
         (
             RangeInclusive<u64>,
-            oneshot::Sender<Result<Vec<GrabbedElement>, NativeError>>,
+            oneshot::Sender<Result<Vec<GrabbedElement>, stypes::NativeError>>,
+        ),
+    ),
+    SetIndexingMode(
+        (
+            IndexesMode,
+            oneshot::Sender<Result<(), stypes::NativeError>>,
         ),
     ),
-    SetIndexingMode((IndexesMode, oneshot::Sender<Result<(), NativeError>>)),
     GetIndexedMapLen(oneshot::Sender<usize>),
     #[allow(clippy::type_complexity)]
     GetDistancesAroundIndex(
         (
             u64,
-            oneshot::Sender<Result<(Option<u64>, Option<u64>), NativeError>>,
+            oneshot::Sender<Result<(Option<u64>, Option<u64>), stypes::NativeError>>,
         ),
     ),
-    AddBookmark((u64, oneshot::Sender<Result<(), NativeError>>)),
-    SetBookmarks((Vec<u64>, oneshot::Sender<Result<(), NativeError>>)),
-    RemoveBookmark((u64, oneshot::Sender<Result<(), NativeError>>)),
+    AddBookmark((u64, oneshot::Sender<Result<(), stypes::NativeError>>)),
+    SetBookmarks((Vec<u64>, oneshot::Sender<Result<(), stypes::NativeError>>)),
+    RemoveBookmark((u64, oneshot::Sender<Result<(), stypes::NativeError>>)),
     ExpandBreadcrumbs {
         seporator: u64,
         offset: u64,
         above: bool,
-        tx_response: oneshot::Sender<Result<(), NativeError>>,
+        tx_response: oneshot::Sender<Result<(), stypes::NativeError>>,
     },
     GrabSearch(
         (
             LineRange,
-            oneshot::Sender<Result<Vec<GrabbedElement>, NativeError>>,
+            oneshot::Sender<Result<Vec<GrabbedElement>, stypes::NativeError>>,
         ),
     ),
     #[allow(clippy::type_complexity)]
     GrabRanges(
         (
             Vec<RangeInclusive<u64>>,
-            oneshot::Sender<Result<Vec<GrabbedElement>, NativeError>>,
+            oneshot::Sender<Result<Vec<GrabbedElement>, stypes::NativeError>>,
         ),
     ),
     GetStreamLen(oneshot::Sender<(u64, u64)>),
@@ -123,22 +133,22 @@ pub enum Api {
     GetSearchHolder(
         (
             Uuid,
-            oneshot::Sender<Result<RegularSearchHolder, NativeError>>,
+            oneshot::Sender<Result<RegularSearchHolder, stypes::NativeError>>,
         ),
     ),
     SetSearchHolder(
         (
             Option<RegularSearchHolder>,
             Uuid,
-            oneshot::Sender<Result<(), NativeError>>,
+            oneshot::Sender<Result<(), stypes::NativeError>>,
         ),
     ),
     DropSearch(oneshot::Sender<bool>),
-    GetNearestPosition((u64, oneshot::Sender<Option<NearestPosition>>)),
+    GetNearestPosition((u64, oneshot::Sender<stypes::ResultNearestPosition>)),
     GetScaledMap((u16, Option<(u64, u64)>, oneshot::Sender<ScaledDistribution>)),
     SetMatches(
         (
-            Option<Vec<FilterMatch>>,
+            Option<Vec<stypes::FilterMatch>>,
             Option<FiltersStats>,
             oneshot::Sender<()>,
         ),
@@ -146,14 +156,14 @@ pub enum Api {
     GetSearchValuesHolder(
         (
             Uuid,
-            oneshot::Sender<Result<ValueSearchHolder, NativeError>>,
+            oneshot::Sender<Result<ValueSearchHolder, stypes::NativeError>>,
         ),
     ),
     SetSearchValuesHolder(
         (
             Option<ValueSearchHolder>,
             Uuid,
-            oneshot::Sender<Result<(), NativeError>>,
+            oneshot::Sender<Result<(), stypes::NativeError>>,
         ),
     ),
     SetSearchValues(HashMap<u8, Vec<(u64, f64)>>, oneshot::Sender<()>),
@@ -172,7 +182,7 @@ pub enum Api {
     NotifyCancelingOperation(Uuid),
     NotifyCanceledOperation(Uuid),
     AddAttachment(parsers::Attachment),
-    GetAttachments(oneshot::Sender<Vec<AttachmentInfo>>),
+    GetAttachments(oneshot::Sender<Vec<stypes::AttachmentInfo>>),
     // Used for tests of error handeling
     ShutdownWithError,
     Shutdown,
@@ -261,17 +271,17 @@ impl SessionStateAPI {
         &self,
         api: Api,
         rx_response: oneshot::Receiver<T>,
-    ) -> Result<T, NativeError> {
+    ) -> Result<T, stypes::NativeError> {
         let api_str = api.to_string();
         self.tx_api.send(api).map_err(|e| {
-            NativeError::channel(&format!("Failed to send to Api::{api_str}; error: {e}"))
+            stypes::NativeError::channel(&format!("Failed to send to Api::{api_str}; error: {e}"))
         })?;
         rx_response.await.map_err(|_| {
-            NativeError::channel(&format!("Failed to get response from Api::{api_str}"))
+            stypes::NativeError::channel(&format!("Failed to get response from Api::{api_str}"))
         })
     }
 
-    pub async fn grab(&self, range: LineRange) -> Result<Vec<GrabbedElement>, NativeError> {
+    pub async fn grab(&self, range: LineRange) -> Result<Vec<GrabbedElement>, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::Grab((range.clone(), tx)), rx)
             .await?
@@ -280,19 +290,19 @@ impl SessionStateAPI {
     pub async fn grab_indexed(
         &self,
         range: RangeInclusive<u64>,
-    ) -> Result<Vec<GrabbedElement>, NativeError> {
+    ) -> Result<Vec<GrabbedElement>, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::GrabIndexed((range, tx)), rx)
             .await?
     }
 
-    pub async fn set_indexing_mode(&self, mode: IndexesMode) -> Result<(), NativeError> {
+    pub async fn set_indexing_mode(&self, mode: IndexesMode) -> Result<(), stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::SetIndexingMode((mode, tx)), rx)
             .await?
     }
 
-    pub async fn get_indexed_len(&self) -> Result<usize, NativeError> {
+    pub async fn get_indexed_len(&self) -> Result<usize, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::GetIndexedMapLen(tx), rx).await
     }
@@ -300,24 +310,24 @@ impl SessionStateAPI {
     pub async fn get_around_indexes(
         &self,
         position: u64,
-    ) -> Result<(Option<u64>, Option<u64>), NativeError> {
+    ) -> Result<(Option<u64>, Option<u64>), stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::GetDistancesAroundIndex((position, tx)), rx)
             .await?
     }
 
-    pub async fn add_bookmark(&self, row: u64) -> Result<(), NativeError> {
+    pub async fn add_bookmark(&self, row: u64) -> Result<(), stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::AddBookmark((row, tx)), rx).await?
     }
 
-    pub async fn set_bookmarks(&self, rows: Vec<u64>) -> Result<(), NativeError> {
+    pub async fn set_bookmarks(&self, rows: Vec<u64>) -> Result<(), stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::SetBookmarks((rows, tx)), rx)
             .await?
     }
 
-    pub async fn remove_bookmark(&self, row: u64) -> Result<(), NativeError> {
+    pub async fn remove_bookmark(&self, row: u64) -> Result<(), stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::RemoveBookmark((row, tx)), rx)
             .await?
@@ -328,7 +338,7 @@ impl SessionStateAPI {
         seporator: u64,
         offset: u64,
         above: bool,
-    ) -> Result<(), NativeError> {
+    ) -> Result<(), stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(
             Api::ExpandBreadcrumbs {
@@ -342,7 +352,10 @@ impl SessionStateAPI {
         .await?
     }
 
-    pub async fn grab_search(&self, range: LineRange) -> Result<Vec<GrabbedElement>, NativeError> {
+    pub async fn grab_search(
+        &self,
+        range: LineRange,
+    ) -> Result<Vec<GrabbedElement>, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::GrabSearch((range, tx)), rx)
             .await?
@@ -351,18 +364,18 @@ impl SessionStateAPI {
     pub async fn grab_ranges(
         &self,
         ranges: Vec<RangeInclusive<u64>>,
-    ) -> Result<Vec<GrabbedElement>, NativeError> {
+    ) -> Result<Vec<GrabbedElement>, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::GrabRanges((ranges, tx)), rx)
             .await?
     }
 
-    pub async fn get_stream_len(&self) -> Result<(u64, u64), NativeError> {
+    pub async fn get_stream_len(&self) -> Result<(u64, u64), stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::GetStreamLen(tx), rx).await
     }
 
-    pub async fn get_search_result_len(&self) -> Result<usize, NativeError> {
+    pub async fn get_search_result_len(&self) -> Result<usize, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::GetSearchResultLen(tx), rx).await
     }
@@ -370,7 +383,7 @@ impl SessionStateAPI {
     pub async fn get_nearest_position(
         &self,
         position: u64,
-    ) -> Result<Option<NearestPosition>, NativeError> {
+    ) -> Result<stypes::ResultNearestPosition, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::GetNearestPosition((position, tx)), rx)
             .await
@@ -380,76 +393,90 @@ impl SessionStateAPI {
         &self,
         dataset_len: u16,
         range: Option<(u64, u64)>,
-    ) -> Result<ScaledDistribution, NativeError> {
+    ) -> Result<ScaledDistribution, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::GetScaledMap((dataset_len, range, tx)), rx)
             .await
     }
 
-    pub async fn set_session_file(&self, filename: Option<PathBuf>) -> Result<(), NativeError> {
+    pub async fn set_session_file(
+        &self,
+        filename: Option<PathBuf>,
+    ) -> Result<(), stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::SetSessionFile((filename, tx)), rx)
             .await?
     }
 
-    pub async fn get_session_file(&self) -> Result<PathBuf, NativeError> {
+    pub async fn get_session_file(&self) -> Result<PathBuf, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::GetSessionFile(tx), rx).await?
     }
 
-    pub async fn write_session_file(&self, source_id: u16, msg: String) -> Result<(), NativeError> {
+    pub async fn write_session_file(
+        &self,
+        source_id: u16,
+        msg: String,
+    ) -> Result<(), stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::WriteSessionFile((source_id, msg, tx)), rx)
             .await?
     }
 
-    pub async fn flush_session_file(&self) -> Result<(), NativeError> {
+    pub async fn flush_session_file(&self) -> Result<(), stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::FlushSessionFile(tx), rx).await?
     }
 
-    pub async fn get_session_file_origin(&self) -> Result<Option<SessionFileOrigin>, NativeError> {
+    pub async fn get_session_file_origin(
+        &self,
+    ) -> Result<Option<SessionFileOrigin>, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::GetSessionFileOrigin(tx), rx)
             .await?
     }
 
-    pub async fn update_session(&self, source_id: u16) -> Result<bool, NativeError> {
+    pub async fn update_session(&self, source_id: u16) -> Result<bool, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::UpdateSession((source_id, tx)), rx)
             .await?
     }
 
-    pub async fn add_source(&self, uuid: &str) -> Result<u16, NativeError> {
+    pub async fn add_source(&self, uuid: &str) -> Result<u16, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::AddSource((uuid.to_owned(), tx)), rx)
             .await
     }
 
-    pub async fn get_source(&self, uuid: &str) -> Result<Option<u16>, NativeError> {
+    pub async fn get_source(&self, uuid: &str) -> Result<Option<u16>, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::GetSource((uuid.to_owned(), tx)), rx)
             .await
     }
 
-    pub async fn get_sources_definitions(&self) -> Result<Vec<SourceDefinition>, NativeError> {
+    pub async fn get_sources_definitions(
+        &self,
+    ) -> Result<Vec<stypes::SourceDefinition>, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::GetSourcesDefinitions(tx), rx)
             .await
     }
 
-    pub async fn add_executed_observe(&self, options: ObserveOptions) -> Result<(), NativeError> {
+    pub async fn add_executed_observe(
+        &self,
+        options: stypes::ObserveOptions,
+    ) -> Result<(), stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::AddExecutedObserve((options, tx)), rx)
             .await
     }
 
-    pub async fn get_executed_holder(&self) -> Result<Observed, NativeError> {
+    pub async fn get_executed_holder(&self) -> Result<Observed, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::GetExecutedHolder(tx), rx).await
     }
 
-    pub async fn is_raw_export_available(&self) -> Result<bool, NativeError> {
+    pub async fn is_raw_export_available(&self) -> Result<bool, stypes::NativeError> {
         let (tx_response, rx) = oneshot::channel();
         self.exec_operation(Api::IsRawExportAvailable(tx_response), rx)
             .await
@@ -481,7 +508,7 @@ impl SessionStateAPI {
         spliter: Option<String>,
         delimiter: Option<String>,
         cancel: CancellationToken,
-    ) -> Result<bool, NativeError> {
+    ) -> Result<bool, stypes::NativeError> {
         let (tx_response, rx) = oneshot::channel();
         self.exec_operation(
             Api::ExportSession {
@@ -498,12 +525,15 @@ impl SessionStateAPI {
         .await?
     }
 
-    pub async fn file_read(&self) -> Result<(), NativeError> {
+    pub async fn file_read(&self) -> Result<(), stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::FileRead(tx), rx).await
     }
 
-    pub async fn get_search_holder(&self, uuid: Uuid) -> Result<RegularSearchHolder, NativeError> {
+    pub async fn get_search_holder(
+        &self,
+        uuid: Uuid,
+    ) -> Result<RegularSearchHolder, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::GetSearchHolder((uuid, tx)), rx)
             .await?
@@ -513,42 +543,42 @@ impl SessionStateAPI {
         &self,
         holder: Option<RegularSearchHolder>,
         uuid: Uuid,
-    ) -> Result<(), NativeError> {
+    ) -> Result<(), stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::SetSearchHolder((holder, uuid, tx)), rx)
             .await?
     }
 
-    pub async fn drop_search(&self) -> Result<bool, NativeError> {
+    pub async fn drop_search(&self) -> Result<bool, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::DropSearch(tx), rx).await
     }
 
     pub async fn set_matches(
         &self,
-        matches: Option<Vec<FilterMatch>>,
+        matches: Option<Vec<stypes::FilterMatch>>,
         stats: Option<FiltersStats>,
-    ) -> Result<(), NativeError> {
+    ) -> Result<(), stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::SetMatches((matches, stats, tx)), rx)
             .await
     }
 
-    pub async fn canceling_operation(&self, uuid: Uuid) -> Result<(), NativeError> {
+    pub async fn canceling_operation(&self, uuid: Uuid) -> Result<(), stypes::NativeError> {
         self.tx_api
             .send(Api::NotifyCancelingOperation(uuid))
             .map_err(|e| {
-                NativeError::channel(&format!(
+                stypes::NativeError::channel(&format!(
                     "fail to send to Api::NotifyCancelingOperation; error: {e}",
                 ))
             })
     }
 
-    pub async fn canceled_operation(&self, uuid: Uuid) -> Result<(), NativeError> {
+    pub async fn canceled_operation(&self, uuid: Uuid) -> Result<(), stypes::NativeError> {
         self.tx_api
             .send(Api::NotifyCanceledOperation(uuid))
             .map_err(|e| {
-                NativeError::channel(&format!(
+                stypes::NativeError::channel(&format!(
                     "Failed to send to Api::NotifyCanceledOperation; error: {e}",
                 ))
             })
@@ -557,7 +587,7 @@ impl SessionStateAPI {
     pub async fn get_search_values_holder(
         &self,
         uuid: Uuid,
-    ) -> Result<ValueSearchHolder, NativeError> {
+    ) -> Result<ValueSearchHolder, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::GetSearchValuesHolder((uuid, tx)), rx)
             .await?
@@ -567,7 +597,7 @@ impl SessionStateAPI {
         &self,
         holder: Option<ValueSearchHolder>,
         uuid: Uuid,
-    ) -> Result<(), NativeError> {
+    ) -> Result<(), stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::SetSearchValuesHolder((holder, uuid, tx)), rx)
             .await?
@@ -576,7 +606,7 @@ impl SessionStateAPI {
     pub async fn set_search_values(
         &self,
         values: HashMap<u8, Vec<(u64, f64)>>,
-    ) -> Result<(), NativeError> {
+    ) -> Result<(), stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::SetSearchValues(values, tx), rx)
             .await
@@ -586,24 +616,26 @@ impl SessionStateAPI {
         &self,
         frame: Option<RangeInclusive<u64>>,
         width: u16,
-    ) -> Result<HashMap<u8, Vec<CandlePoint>>, NativeError> {
+    ) -> Result<HashMap<u8, Vec<CandlePoint>>, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::GetSearchValues((frame, width, tx)), rx)
             .await?
             .map_err(|e| e.into())
     }
 
-    pub async fn drop_search_values(&self) -> Result<bool, NativeError> {
+    pub async fn drop_search_values(&self) -> Result<bool, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::DropSearchValues(tx), rx).await
     }
 
-    pub async fn get_indexed_ranges(&self) -> Result<Vec<RangeInclusive<u64>>, NativeError> {
+    pub async fn get_indexed_ranges(
+        &self,
+    ) -> Result<Vec<RangeInclusive<u64>>, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::GetIndexedRanges(tx), rx).await
     }
 
-    pub async fn close_session(&self) -> Result<(), NativeError> {
+    pub async fn close_session(&self) -> Result<(), stypes::NativeError> {
         self.closing_token.cancel();
         if let Err(err) = self.tracker.cancel_all().await {
             error!("Fail to correctly stop tracker: {err:?}");
@@ -612,33 +644,37 @@ impl SessionStateAPI {
         self.exec_operation(Api::CloseSession(tx), rx).await
     }
 
-    pub async fn set_debug(&self, debug: bool) -> Result<(), NativeError> {
+    pub async fn set_debug(&self, debug: bool) -> Result<(), stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::SetDebugMode((debug, tx)), rx)
             .await
     }
 
-    pub fn shutdown(&self) -> Result<(), NativeError> {
+    pub fn shutdown(&self) -> Result<(), stypes::NativeError> {
         self.tx_api.send(Api::Shutdown).map_err(|e| {
-            NativeError::channel(&format!("fail to send to Api::Shutdown; error: {e}",))
+            stypes::NativeError::channel(&format!("fail to send to Api::Shutdown; error: {e}",))
         })
     }
 
-    pub fn shutdown_with_error(&self) -> Result<(), NativeError> {
+    pub fn shutdown_with_error(&self) -> Result<(), stypes::NativeError> {
         self.tx_api.send(Api::ShutdownWithError).map_err(|e| {
-            NativeError::channel(&format!(
+            stypes::NativeError::channel(&format!(
                 "fail to send to Api::ShutdownWithError; error: {e}",
             ))
         })
     }
 
-    pub fn add_attachment(&self, origin: parsers::Attachment) -> Result<(), NativeError> {
+    pub fn add_attachment(&self, origin: parsers::Attachment) -> Result<(), stypes::NativeError> {
         self.tx_api.send(Api::AddAttachment(origin)).map_err(|e| {
-            NativeError::channel(&format!("fail to send to Api::AddAttachment; error: {e}",))
+            stypes::NativeError::channel(
+                &format!("fail to send to Api::AddAttachment; error: {e}",),
+            )
         })
     }
 
-    pub async fn get_attachments(&self) -> Result<Vec<AttachmentInfo>, NativeError> {
+    pub async fn get_attachments(
+        &self,
+    ) -> Result<Vec<stypes::AttachmentInfo>, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(Api::GetAttachments(tx), rx).await
     }
diff --git a/application/apps/indexer/session/src/state/attachments.rs b/application/apps/indexer/session/src/state/attachments.rs
index 3b16d3350f..fa467b359a 100644
--- a/application/apps/indexer/session/src/state/attachments.rs
+++ b/application/apps/indexer/session/src/state/attachments.rs
@@ -1,6 +1,5 @@
 use mime_guess;
 use parsers::{self};
-use serde::{Deserialize, Serialize};
 use std::{
     collections::HashMap,
     fs::{create_dir, File},
@@ -21,17 +20,14 @@ pub enum AttachmentsError {
     SessionNotCreated,
 }
 
-#[derive(Debug, Clone, Serialize, Deserialize)]
-pub struct AttachmentInfo {
-    pub uuid: Uuid,
-    // This entity will be propagated into JS world side, to avoid unusual naming file_path,
-    // would be used filepath instead
-    pub filepath: PathBuf,
-    pub name: String,
-    pub ext: Option<String>,
-    pub size: usize,
-    pub mime: Option<String>,
-    pub messages: Vec<usize>,
+impl From<AttachmentsError> for stypes::NativeError {
+    fn from(err: AttachmentsError) -> Self {
+        stypes::NativeError {
+            severity: stypes::Severity::ERROR,
+            kind: stypes::NativeErrorKind::Io,
+            message: Some(err.to_string()),
+        }
+    }
 }
 
 const FILE_NAME_INDEXES_LIMIT: usize = 1000;
@@ -94,11 +90,24 @@ fn get_valid_file_path(dest: &Path, origin: &str) -> Result<PathBuf, io::Error>
     }
 }
 
-impl AttachmentInfo {
-    pub fn from(
+#[derive(Debug)]
+pub struct Attachments {
+    attachments: HashMap<Uuid, stypes::AttachmentInfo>,
+    dest: Option<PathBuf>,
+}
+
+impl Attachments {
+    pub fn new() -> Self {
+        Attachments {
+            attachments: HashMap::new(),
+            dest: None,
+        }
+    }
+
+    pub fn get_attch_from(
         origin: parsers::Attachment,
         store_folder: &PathBuf,
-    ) -> Result<AttachmentInfo, AttachmentsError> {
+    ) -> Result<stypes::AttachmentInfo, AttachmentsError> {
         if !store_folder.exists() {
             create_dir(store_folder).map_err(AttachmentsError::Io)?;
         }
@@ -107,7 +116,7 @@ impl AttachmentInfo {
             get_valid_file_path(store_folder, &origin.name).map_err(AttachmentsError::Io)?;
         let mut attachment_file = File::create(&attachment_path)?;
         attachment_file.write_all(&origin.data)?;
-        Ok(AttachmentInfo {
+        Ok(stypes::AttachmentInfo {
             uuid,
             filepath: attachment_path,
             name: origin.name.clone(),
@@ -121,21 +130,6 @@ impl AttachmentInfo {
             messages: origin.messages,
         })
     }
-}
-
-#[derive(Debug)]
-pub struct Attachments {
-    attachments: HashMap<Uuid, AttachmentInfo>,
-    dest: Option<PathBuf>,
-}
-
-impl Attachments {
-    pub fn new() -> Self {
-        Attachments {
-            attachments: HashMap::new(),
-            dest: None,
-        }
-    }
 
     pub fn set_dest_path(&mut self, dest: PathBuf) -> bool {
         if let (Some(parent), Some(file_stem)) = (dest.parent(), dest.file_stem()) {
@@ -159,10 +153,10 @@ impl Attachments {
     pub fn add(
         &mut self,
         attachment: parsers::Attachment,
-    ) -> Result<AttachmentInfo, AttachmentsError> {
+    ) -> Result<stypes::AttachmentInfo, AttachmentsError> {
         if let Some(dest) = self.dest.as_ref() {
             let uuid = Uuid::new_v4();
-            let a = AttachmentInfo::from(attachment, dest)?;
+            let a = Self::get_attch_from(attachment, dest)?;
             self.attachments.insert(uuid, a.clone());
             Ok(a)
         } else {
@@ -170,11 +164,11 @@ impl Attachments {
         }
     }
 
-    pub fn get(&self) -> Vec<AttachmentInfo> {
+    pub fn get(&self) -> Vec<stypes::AttachmentInfo> {
         self.attachments
             .values()
             .cloned()
-            .collect::<Vec<AttachmentInfo>>()
+            .collect::<Vec<stypes::AttachmentInfo>>()
     }
 }
 
diff --git a/application/apps/indexer/session/src/state/indexes/controller.rs b/application/apps/indexer/session/src/state/indexes/controller.rs
index ac9df029cb..d83286b5c0 100644
--- a/application/apps/indexer/session/src/state/indexes/controller.rs
+++ b/application/apps/indexer/session/src/state/indexes/controller.rs
@@ -1,10 +1,5 @@
 use super::{frame::Frame, map::Map, nature::Nature};
-use crate::{
-    events::{CallbackEvent, NativeError},
-    state::GrabbedElement,
-};
 use log::error;
-use processor::map::FilterMatch;
 use std::ops::RangeInclusive;
 use tokio::sync::mpsc::UnboundedSender;
 
@@ -22,11 +17,11 @@ pub enum Mode {
 pub struct Controller {
     map: Map,
     mode: Mode,
-    tx_callback_events: Option<UnboundedSender<CallbackEvent>>,
+    tx_callback_events: Option<UnboundedSender<stypes::CallbackEvent>>,
 }
 
 impl Controller {
-    pub(crate) fn new(tx_callback_events: Option<UnboundedSender<CallbackEvent>>) -> Self {
+    pub(crate) fn new(tx_callback_events: Option<UnboundedSender<stypes::CallbackEvent>>) -> Self {
         Self {
             map: Map::new(),
             mode: Mode::Regular,
@@ -34,7 +29,7 @@ impl Controller {
         }
     }
 
-    pub(crate) fn set_mode(&mut self, mode: Mode) -> Result<(), NativeError> {
+    pub(crate) fn set_mode(&mut self, mode: Mode) -> Result<(), stypes::NativeError> {
         if self.mode == mode {
             return Ok(());
         }
@@ -57,7 +52,7 @@ impl Controller {
         Ok(())
     }
 
-    pub(crate) fn add_bookmark(&mut self, row: u64) -> Result<(), NativeError> {
+    pub(crate) fn add_bookmark(&mut self, row: u64) -> Result<(), stypes::NativeError> {
         if matches!(self.mode, Mode::Breadcrumbs) {
             self.map.breadcrumbs_insert_and_update(
                 &[row],
@@ -72,7 +67,7 @@ impl Controller {
         Ok(())
     }
 
-    pub(crate) fn remove_bookmark(&mut self, row: u64) -> Result<(), NativeError> {
+    pub(crate) fn remove_bookmark(&mut self, row: u64) -> Result<(), stypes::NativeError> {
         if matches!(self.mode, Mode::Breadcrumbs) {
             self.map
                 .breadcrumbs_drop_and_update(&[row], Nature::BOOKMARK)?;
@@ -84,7 +79,7 @@ impl Controller {
         Ok(())
     }
 
-    pub(crate) fn set_bookmarks(&mut self, rows: Vec<u64>) -> Result<(), NativeError> {
+    pub(crate) fn set_bookmarks(&mut self, rows: Vec<u64>) -> Result<(), stypes::NativeError> {
         if matches!(self.mode, Mode::Breadcrumbs) {
             self.map
                 .breadcrumbs_drop_and_update(&rows, Nature::BOOKMARK)?;
@@ -102,7 +97,7 @@ impl Controller {
         Ok(())
     }
 
-    pub(crate) fn set_stream_len(&mut self, len: u64) -> Result<(), NativeError> {
+    pub(crate) fn set_stream_len(&mut self, len: u64) -> Result<(), stypes::NativeError> {
         self.map.set_stream_len(
             len,
             MIN_BREADCRUMBS_DISTANCE,
@@ -113,7 +108,7 @@ impl Controller {
         Ok(())
     }
 
-    pub(crate) fn drop_search(&mut self) -> Result<(), NativeError> {
+    pub(crate) fn drop_search(&mut self) -> Result<(), stypes::NativeError> {
         self.map.clean(
             Nature::SEARCH
                 .union(Nature::BREADCRUMB)
@@ -129,8 +124,8 @@ impl Controller {
 
     pub(crate) fn set_search_results(
         &mut self,
-        matches: &[FilterMatch],
-    ) -> Result<(), NativeError> {
+        matches: &[stypes::FilterMatch],
+    ) -> Result<(), stypes::NativeError> {
         self.map.clean(
             Nature::SEARCH
                 .union(Nature::BREADCRUMB)
@@ -148,8 +143,8 @@ impl Controller {
 
     pub(crate) fn append_search_results(
         &mut self,
-        matches: &[FilterMatch],
-    ) -> Result<(), NativeError> {
+        matches: &[stypes::FilterMatch],
+    ) -> Result<(), stypes::NativeError> {
         if matches!(self.mode, Mode::Breadcrumbs) {
             self.map.breadcrumbs_insert_and_update(
                 &matches.iter().map(|f| f.index).collect::<Vec<u64>>(),
@@ -170,11 +165,11 @@ impl Controller {
     pub(crate) fn get_around_indexes(
         &mut self,
         position: &u64,
-    ) -> Result<(Option<u64>, Option<u64>), NativeError> {
+    ) -> Result<(Option<u64>, Option<u64>), stypes::NativeError> {
         self.map.get_around_indexes(position)
     }
 
-    pub(crate) fn naturalize(&self, elements: &mut [GrabbedElement]) {
+    pub(crate) fn naturalize(&self, elements: &mut [stypes::GrabbedElement]) {
         self.map.naturalize(elements);
     }
 
@@ -183,13 +178,16 @@ impl Controller {
         seporator: u64,
         offset: u64,
         above: bool,
-    ) -> Result<(), NativeError> {
+    ) -> Result<(), stypes::NativeError> {
         self.map.breadcrumbs_expand(seporator, offset, above)?;
         self.notify();
         Ok(())
     }
 
-    pub(crate) fn frame(&mut self, range: &mut RangeInclusive<u64>) -> Result<Frame, NativeError> {
+    pub(crate) fn frame(
+        &mut self,
+        range: &mut RangeInclusive<u64>,
+    ) -> Result<Frame, stypes::NativeError> {
         self.map.frame(range)
     }
 
@@ -208,7 +206,7 @@ impl Controller {
 
     fn notify(&self) {
         if let Some(tx) = self.tx_callback_events.as_ref() {
-            if let Err(err) = tx.send(CallbackEvent::IndexedMapUpdated {
+            if let Err(err) = tx.send(stypes::CallbackEvent::IndexedMapUpdated {
                 len: self.map.len() as u64,
             }) {
                 error!("Fail to send indexed map notification: {err:?}");
diff --git a/application/apps/indexer/session/src/state/indexes/frame.rs b/application/apps/indexer/session/src/state/indexes/frame.rs
index d09850a589..78e126383f 100644
--- a/application/apps/indexer/session/src/state/indexes/frame.rs
+++ b/application/apps/indexer/session/src/state/indexes/frame.rs
@@ -1,9 +1,4 @@
 use super::nature::Nature;
-use crate::{
-    events::{NativeError, NativeErrorKind},
-    progress::Severity,
-    state::GrabbedElement,
-};
 use std::ops::RangeInclusive;
 
 #[derive(Debug, Default)]
@@ -54,11 +49,14 @@ impl Frame {
         ranges
     }
 
-    pub fn naturalize(&self, elements: &mut [GrabbedElement]) -> Result<(), NativeError> {
+    pub fn naturalize(
+        &self,
+        elements: &mut [stypes::GrabbedElement],
+    ) -> Result<(), stypes::NativeError> {
         if elements.len() != self.indexes.len() {
-            return Err(NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::Grabber,
+            return Err(stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Grabber,
                 message: Some(format!(
                     "Fail to naturalize range. Indexes len: {}; elements len: {}.",
                     self.indexes.len(),
diff --git a/application/apps/indexer/session/src/state/indexes/keys.rs b/application/apps/indexer/session/src/state/indexes/keys.rs
index 48f3cd5310..a80da27272 100644
--- a/application/apps/indexer/session/src/state/indexes/keys.rs
+++ b/application/apps/indexer/session/src/state/indexes/keys.rs
@@ -1,7 +1,3 @@
-use crate::{
-    events::{NativeError, NativeErrorKind},
-    progress::Severity,
-};
 use std::ops::RangeInclusive;
 
 #[derive(Debug)]
@@ -66,16 +62,16 @@ impl Keys {
         Ok(())
     }
 
-    pub fn remove_from(&mut self, position_from: &u64) -> Result<Vec<u64>, NativeError> {
+    pub fn remove_from(&mut self, position_from: &u64) -> Result<Vec<u64>, stypes::NativeError> {
         self.sort();
-        let from_index = self
-            .keys
-            .binary_search(position_from)
-            .map_err(|_| NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::Grabber,
-                message: Some(format!("Cannot find index for position: {position_from}")),
-            })?;
+        let from_index =
+            self.keys
+                .binary_search(position_from)
+                .map_err(|_| stypes::NativeError {
+                    severity: stypes::Severity::ERROR,
+                    kind: stypes::NativeErrorKind::Grabber,
+                    message: Some(format!("Cannot find index for position: {position_from}")),
+                })?;
         if from_index + 1 < self.keys.len() {
             Ok(self.keys.drain((from_index + 1)..self.keys.len()).collect())
         } else {
@@ -100,19 +96,21 @@ impl Keys {
         self.sorted = false;
     }
 
-    pub fn get_index(&mut self, position: &u64) -> Result<usize, NativeError> {
+    pub fn get_index(&mut self, position: &u64) -> Result<usize, stypes::NativeError> {
         self.sort();
-        self.keys.binary_search(position).map_err(|_| NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::Grabber,
-            message: Some(format!("Cannot find index for position: {position}")),
-        })
-    }
-
-    pub fn get_position(&self, index: usize) -> Result<u64, NativeError> {
-        self.keys.get(index).copied().ok_or(NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::Grabber,
+        self.keys
+            .binary_search(position)
+            .map_err(|_| stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Grabber,
+                message: Some(format!("Cannot find index for position: {position}")),
+            })
+    }
+
+    pub fn get_position(&self, index: usize) -> Result<u64, stypes::NativeError> {
+        self.keys.get(index).copied().ok_or(stypes::NativeError {
+            severity: stypes::Severity::ERROR,
+            kind: stypes::NativeErrorKind::Grabber,
             message: Some(format!("Cannot find position for index: {index}")),
         })
     }
@@ -120,15 +118,18 @@ impl Keys {
     pub fn get_positions_around(
         &mut self,
         position: &u64,
-    ) -> Result<(Option<u64>, Option<u64>), NativeError> {
+    ) -> Result<(Option<u64>, Option<u64>), stypes::NativeError> {
         let mut before: Option<u64> = None;
         let mut after: Option<u64> = None;
         self.sort();
-        let key = self.keys.binary_search(position).map_err(|_| NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::Grabber,
-            message: Some(format!("Cannot index for position: {position}")),
-        })?;
+        let key = self
+            .keys
+            .binary_search(position)
+            .map_err(|_| stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Grabber,
+                message: Some(format!("Cannot index for position: {position}")),
+            })?;
         if key > 0 {
             before = Some(self.keys[key - 1]);
         }
diff --git a/application/apps/indexer/session/src/state/indexes/map.rs b/application/apps/indexer/session/src/state/indexes/map.rs
index 07c524a5ac..0de2567e73 100644
--- a/application/apps/indexer/session/src/state/indexes/map.rs
+++ b/application/apps/indexer/session/src/state/indexes/map.rs
@@ -1,9 +1,4 @@
 use super::{frame::Frame, keys::Keys, nature::Nature};
-use crate::{
-    events::{NativeError, NativeErrorKind},
-    progress::Severity,
-    state::GrabbedElement,
-};
 use log::error;
 use rustc_hash::FxHashMap;
 use std::{cmp, ops::RangeInclusive};
@@ -134,7 +129,7 @@ impl Map {
         self.insert(&positions, nature);
     }
 
-    fn remove_from(&mut self, position: &u64) -> Result<(), NativeError> {
+    fn remove_from(&mut self, position: &u64) -> Result<(), stypes::NativeError> {
         let removed = self.keys.remove_from(position)?;
         removed.iter().for_each(|position| {
             self.indexes.remove(position);
@@ -173,7 +168,7 @@ impl Map {
         self.indexes_remove(&mut to_be_removed);
     }
 
-    pub fn naturalize(&self, elements: &mut [GrabbedElement]) {
+    pub fn naturalize(&self, elements: &mut [stypes::GrabbedElement]) {
         elements.iter_mut().for_each(|el| {
             if let Some(nature) = self.indexes.get(&(el.pos as u64)) {
                 el.set_nature(nature.bits());
@@ -186,7 +181,7 @@ impl Map {
     pub fn get_around_indexes(
         &mut self,
         position: &u64,
-    ) -> Result<(Option<u64>, Option<u64>), NativeError> {
+    ) -> Result<(Option<u64>, Option<u64>), stypes::NativeError> {
         self.keys.get_positions_around(position)
     }
 
@@ -195,13 +190,13 @@ impl Map {
         range: RangeInclusive<u64>,
         min_distance: u64,
         min_offset: u64,
-    ) -> Result<(), NativeError> {
+    ) -> Result<(), stypes::NativeError> {
         let start_pos = *range.start();
         let end_pos = *range.end();
         if end_pos >= self.stream_len {
-            return Err(NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::Grabber,
+            return Err(stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Grabber,
                 message: Some(format!(
                     "Out of range. Invalid index: {end_pos}. Map len: {};",
                     self.indexes.len()
@@ -252,7 +247,7 @@ impl Map {
         &mut self,
         min_distance: u64,
         min_offset: u64,
-    ) -> Result<(), NativeError> {
+    ) -> Result<(), stypes::NativeError> {
         self.clean(Nature::BREADCRUMB);
         self.clean(Nature::BREADCRUMB_SEPORATOR);
         self.clean(Nature::EXPANDED);
@@ -263,9 +258,9 @@ impl Map {
         if keys.is_empty() {
             return Ok(());
         }
-        let first_postion = *self.keys.first().ok_or(NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::Grabber,
+        let first_postion = *self.keys.first().ok_or(stypes::NativeError {
+            severity: stypes::Severity::ERROR,
+            kind: stypes::NativeErrorKind::Grabber,
             message: Some(String::from(
                 "Keys vector is empty. Cannot extract first position",
             )),
@@ -278,9 +273,9 @@ impl Map {
         for pair in keys.windows(2) {
             let [from, to]: [u64; 2] = pair.try_into().unwrap();
             if from >= to {
-                return Err(NativeError {
-                    severity: Severity::ERROR,
-                    kind: NativeErrorKind::Grabber,
+                return Err(stypes::NativeError {
+                    severity: stypes::Severity::ERROR,
+                    kind: stypes::NativeErrorKind::Grabber,
                     message: Some(format!("Map map is broken. Fail to compare previous and next elements. Prev: {from}; next: {to}",)),
                 });
             }
@@ -290,9 +285,9 @@ impl Map {
                 min_offset,
             )?;
         }
-        let last_position = *self.keys.last().ok_or(NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::Grabber,
+        let last_position = *self.keys.last().ok_or(stypes::NativeError {
+            severity: stypes::Severity::ERROR,
+            kind: stypes::NativeErrorKind::Grabber,
             message: Some(String::from(
                 "Keys vector is empty. Cannot extract last position",
             )),
@@ -311,14 +306,14 @@ impl Map {
         nature: Nature,
         min_distance: u64,
         min_offset: u64,
-    ) -> Result<(), NativeError> {
+    ) -> Result<(), stypes::NativeError> {
         if self.stream_len == 0 {
             return Ok(());
         }
         if nature.is_breadcrumb() || nature.is_seporator() {
-            return Err(NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::Grabber,
+            return Err(stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Grabber,
                 message: Some(String::from("Cannot insert Nature::BREADCRUMB | Nature::BREADCRUMB_SEPORATOR to modify indexed map")),
             });
         }
@@ -359,23 +354,23 @@ impl Map {
         &mut self,
         positions: &[u64],
         nature: Nature,
-    ) -> Result<(), NativeError> {
+    ) -> Result<(), stypes::NativeError> {
         if self.stream_len == 0 {
             return Ok(());
         }
         for position in positions.iter() {
             if let Some(index) = self.indexes.get_mut(position) {
                 if !index.contains(&nature) {
-                    return Err(NativeError {
-                        severity: Severity::ERROR,
-                        kind: NativeErrorKind::Grabber,
+                    return Err(stypes::NativeError {
+                        severity: stypes::Severity::ERROR,
+                        kind: stypes::NativeErrorKind::Grabber,
                         message: Some(format!("Index doesn't include target nature {nature:?}")),
                     });
                 }
                 if index.cross(Nature::BREADCRUMB.union(Nature::BREADCRUMB_SEPORATOR)) {
-                    return Err(NativeError {
-                        severity: Severity::ERROR,
-                        kind: NativeErrorKind::Grabber,
+                    return Err(stypes::NativeError {
+                        severity: stypes::Severity::ERROR,
+                        kind: stypes::NativeErrorKind::Grabber,
                         message: Some(String::from("Cannot drop Nature::BREADCRUMB | Nature::BREADCRUMB_SEPORATOR | Nature::Search to modify indexed map")),
                     });
                 }
@@ -383,9 +378,9 @@ impl Map {
                     index.set_if_cross(Nature::EXPANDED, Nature::BREADCRUMB);
                 }
             } else {
-                return Err(NativeError {
-                    severity: Severity::ERROR,
-                    kind: NativeErrorKind::Grabber,
+                return Err(stypes::NativeError {
+                    severity: stypes::Severity::ERROR,
+                    kind: stypes::NativeErrorKind::Grabber,
                     message: Some(String::from("Fail to find Index for position {position}")),
                 });
             }
@@ -399,7 +394,7 @@ impl Map {
         to: u64,
         min_distance: u64,
         min_offset: u64,
-    ) -> Result<(), NativeError> {
+    ) -> Result<(), stypes::NativeError> {
         self.remove_if(from, Nature::BREADCRUMB);
         self.remove_if(to, Nature::BREADCRUMB);
         // If we already have breadcrumbs, which was expanded before by user, we don't need
@@ -458,16 +453,16 @@ impl Map {
         seporator: u64,
         offset: u64,
         above: bool,
-    ) -> Result<(), NativeError> {
-        let sep_index = self.indexes.get(&seporator).ok_or(NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::Grabber,
+    ) -> Result<(), stypes::NativeError> {
+        let sep_index = self.indexes.get(&seporator).ok_or(stypes::NativeError {
+            severity: stypes::Severity::ERROR,
+            kind: stypes::NativeErrorKind::Grabber,
             message: Some(format!("Index {seporator} cannot be found.",)),
         })?;
         if !sep_index.is_seporator() {
-            return Err(NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::Grabber,
+            return Err(stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Grabber,
                 message: Some(format!(
                     "Index {seporator} isn't Nature::BREADCRUMB_SEPORATOR.",
                 )),
@@ -475,9 +470,9 @@ impl Map {
         }
         let (before, after) = self.get_arround_positions(&seporator)?;
         if before.is_none() && after.is_none() {
-            return Err(NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::Grabber,
+            return Err(stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Grabber,
                 message: Some(format!(
                     "Fail to find indexes around Nature::BREADCRUMB_SEPORATOR on {seporator}"
                 )),
@@ -504,9 +499,9 @@ impl Map {
             };
             if update_after <= update_before {
                 // Some error during calculation
-                return Err(NativeError {
-                    severity: Severity::ERROR,
-                    kind: NativeErrorKind::Grabber,
+                return Err(stypes::NativeError {
+                    severity: stypes::Severity::ERROR,
+                    kind: stypes::NativeErrorKind::Grabber,
                     message: Some(String::from("Error during calculation Nature::BREADCRUMB_SEPORATOR: position before grander position after")),
                 });
             } else if update_after - update_before > 1 {
@@ -523,9 +518,9 @@ impl Map {
             );
             if seporator <= updated {
                 // Some error during calculation
-                return Err(NativeError {
-                    severity: Severity::ERROR,
-                    kind: NativeErrorKind::Grabber,
+                return Err(stypes::NativeError {
+                    severity: stypes::Severity::ERROR,
+                    kind: stypes::NativeErrorKind::Grabber,
                     message: Some(String::from("Error during calculation Nature::BREADCRUMB_SEPORATOR: position before grander position after")),
                 });
             } else if seporator - updated > 1 {
@@ -542,9 +537,9 @@ impl Map {
             );
             if seporator <= updated {
                 // Some error during calculation
-                return Err(NativeError {
-                    severity: Severity::ERROR,
-                    kind: NativeErrorKind::Grabber,
+                return Err(stypes::NativeError {
+                    severity: stypes::Severity::ERROR,
+                    kind: stypes::NativeErrorKind::Grabber,
                     message: Some(String::from("Error during calculation Nature::BREADCRUMB_SEPORATOR: position before grander position after")),
                 });
             } else if seporator - updated > 1 {
@@ -555,7 +550,7 @@ impl Map {
         Ok(())
     }
 
-    fn breadcrumbs_drop_before(&mut self, from: u64) -> Result<Option<u64>, NativeError> {
+    fn breadcrumbs_drop_before(&mut self, from: u64) -> Result<Option<u64>, stypes::NativeError> {
         let mut cursor: usize = self.keys.get_index(&from)?;
         let mut to_drop: Vec<u64> = vec![];
         let mut before: Option<u64> = None;
@@ -577,7 +572,7 @@ impl Map {
         Ok(before)
     }
 
-    fn breadcrumbs_drop_after(&mut self, from: u64) -> Result<Option<u64>, NativeError> {
+    fn breadcrumbs_drop_after(&mut self, from: u64) -> Result<Option<u64>, stypes::NativeError> {
         let len = self.indexes.keys().len();
         let mut cursor: usize = self.keys.get_index(&from)?;
         let mut to_drop: Vec<u64> = vec![];
@@ -604,7 +599,7 @@ impl Map {
     fn get_arround_positions(
         &mut self,
         position: &u64,
-    ) -> Result<(Option<u64>, Option<u64>), NativeError> {
+    ) -> Result<(Option<u64>, Option<u64>), stypes::NativeError> {
         let mut before: Option<u64> = None;
         let mut after: Option<u64> = None;
         let len = self.indexes.keys().len();
@@ -623,12 +618,12 @@ impl Map {
         from_key_index: usize,
         filter: Nature,
         walk_down: bool,
-    ) -> Result<Option<(&u64, &Nature)>, NativeError> {
+    ) -> Result<Option<(&u64, &Nature)>, stypes::NativeError> {
         let len = self.indexes.keys().len();
         if from_key_index >= len {
-            return Err(NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::Grabber,
+            return Err(stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Grabber,
                 message: Some(format!(
                     "Target from-key-index {from_key_index} is out of keys(); keys().len = {len}",
                 )),
@@ -691,7 +686,7 @@ impl Map {
         min_distance: u64,
         min_offset: u64,
         update_breadcrumbs: bool,
-    ) -> Result<(), NativeError> {
+    ) -> Result<(), stypes::NativeError> {
         self.stream_len = len;
         if self.stream_len == 0 {
             self.indexes.clear();
@@ -699,9 +694,9 @@ impl Map {
             return Ok(());
         }
         if update_breadcrumbs {
-            let last_postion = *self.keys.last().ok_or(NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::Grabber,
+            let last_postion = *self.keys.last().ok_or(stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Grabber,
                 message: Some(String::from(
                     "Keys vector is empty. Cannot extract last position",
                 )),
@@ -737,11 +732,11 @@ impl Map {
         self.len() == 0
     }
 
-    pub fn frame(&mut self, range: &mut RangeInclusive<u64>) -> Result<Frame, NativeError> {
+    pub fn frame(&mut self, range: &mut RangeInclusive<u64>) -> Result<Frame, stypes::NativeError> {
         if range.end() >= &(self.indexes.len() as u64) {
-            return Err(NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::Grabber,
+            return Err(stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Grabber,
                 message: Some(format!(
                     "Out of range. Map len: {}; requested: {range:?}",
                     self.indexes.len()
@@ -752,9 +747,9 @@ impl Map {
         let mut frame = Frame::new();
         for index in range {
             let position = self.keys.get_position(index as usize)?;
-            let nature = self.indexes.get(&position).ok_or(NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::Grabber,
+            let nature = self.indexes.get(&position).ok_or(stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Grabber,
                 message: Some(format!("Cannot find nature for {position}")),
             })?;
             frame.insert((position, *nature));
diff --git a/application/apps/indexer/session/src/state/indexes/nature.rs b/application/apps/indexer/session/src/state/indexes/nature.rs
index ac4601471d..4a1350c435 100644
--- a/application/apps/indexer/session/src/state/indexes/nature.rs
+++ b/application/apps/indexer/session/src/state/indexes/nature.rs
@@ -1,8 +1,3 @@
-use crate::{
-    events::{NativeError, NativeErrorKind},
-    progress::Severity,
-};
-
 #[derive(PartialEq, Eq, Debug, Clone, Copy)]
 pub struct Nature(u8);
 impl Nature {
@@ -18,12 +13,12 @@ impl Nature {
 }
 
 impl TryFrom<u8> for Nature {
-    type Error = NativeError;
+    type Error = stypes::NativeError;
     fn try_from(n: u8) -> Result<Self, Self::Error> {
         if 0b00111100 & n > 0 {
-            return Err(NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::Grabber,
+            return Err(stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Grabber,
                 message: Some(format!(
                     "Invalid index of Nature u8: {}",
                     Self::as_string(n)
diff --git a/application/apps/indexer/session/src/state/indexes/tests_controller.rs b/application/apps/indexer/session/src/state/indexes/tests_controller.rs
index e03ac83d2a..baffb51590 100644
--- a/application/apps/indexer/session/src/state/indexes/tests_controller.rs
+++ b/application/apps/indexer/session/src/state/indexes/tests_controller.rs
@@ -3,7 +3,6 @@ use super::{
     frame::Frame,
     nature::Nature,
 };
-use processor::map::FilterMatch;
 use std::ops::RangeInclusive;
 
 lazy_static::lazy_static! {
@@ -116,9 +115,9 @@ enum Action {
     // Checks a len of map usize - expected len
     CheckLen(usize),
     // Add searches into map
-    Search(Vec<FilterMatch>),
+    Search(Vec<stypes::FilterMatch>),
     // Append searches into map
-    AppendSearch(Vec<FilterMatch>),
+    AppendSearch(Vec<stypes::FilterMatch>),
     // Set mode
     SetMode(Mode),
     // Extending breadcrumbs
@@ -160,9 +159,9 @@ fn test() {
             vec![
                 Action::StreamLen(21),
                 Action::Search(vec![
-                    FilterMatch::new(0, vec![]),
-                    FilterMatch::new(10, vec![]),
-                    FilterMatch::new(20, vec![]),
+                    stypes::FilterMatch::new(0, vec![]),
+                    stypes::FilterMatch::new(10, vec![]),
+                    stypes::FilterMatch::new(20, vec![]),
                 ]),
                 Action::SetMode(Mode::Breadcrumbs),
                 Action::Frame((
@@ -190,9 +189,9 @@ fn test() {
             vec![
                 Action::StreamLen(21),
                 Action::Search(vec![
-                    FilterMatch::new(0, vec![]),
-                    FilterMatch::new(10, vec![]),
-                    FilterMatch::new(20, vec![]),
+                    stypes::FilterMatch::new(0, vec![]),
+                    stypes::FilterMatch::new(10, vec![]),
+                    stypes::FilterMatch::new(20, vec![]),
                 ]),
                 Action::SetMode(Mode::Breadcrumbs),
                 Action::AddBookmark(0),
@@ -242,9 +241,9 @@ fn test() {
             vec![
                 Action::StreamLen(30),
                 Action::Search(vec![
-                    FilterMatch::new(0, vec![]),
-                    FilterMatch::new(10, vec![]),
-                    FilterMatch::new(20, vec![]),
+                    stypes::FilterMatch::new(0, vec![]),
+                    stypes::FilterMatch::new(10, vec![]),
+                    stypes::FilterMatch::new(20, vec![]),
                 ]),
                 Action::SetMode(Mode::Breadcrumbs),
                 Action::AddBookmark(23),
@@ -310,9 +309,9 @@ fn test() {
             vec![
                 Action::StreamLen(21),
                 Action::Search(vec![
-                    FilterMatch::new(0, vec![]),
-                    FilterMatch::new(10, vec![]),
-                    FilterMatch::new(20, vec![]),
+                    stypes::FilterMatch::new(0, vec![]),
+                    stypes::FilterMatch::new(10, vec![]),
+                    stypes::FilterMatch::new(20, vec![]),
                 ]),
                 Action::SetMode(Mode::Breadcrumbs),
                 Action::AddBookmark(19),
@@ -360,9 +359,9 @@ fn test() {
             vec![
                 Action::StreamLen(21),
                 Action::Search(vec![
-                    FilterMatch::new(0, vec![]),
-                    FilterMatch::new(10, vec![]),
-                    FilterMatch::new(20, vec![]),
+                    stypes::FilterMatch::new(0, vec![]),
+                    stypes::FilterMatch::new(10, vec![]),
+                    stypes::FilterMatch::new(20, vec![]),
                 ]),
                 Action::SetMode(Mode::Breadcrumbs),
                 Action::AddBookmark(11),
@@ -410,9 +409,9 @@ fn test() {
             vec![
                 Action::StreamLen(31),
                 Action::Search(vec![
-                    FilterMatch::new(0, vec![]),
-                    FilterMatch::new(10, vec![]),
-                    FilterMatch::new(20, vec![]),
+                    stypes::FilterMatch::new(0, vec![]),
+                    stypes::FilterMatch::new(10, vec![]),
+                    stypes::FilterMatch::new(20, vec![]),
                 ]),
                 Action::SetMode(Mode::Breadcrumbs),
                 Action::Frame((
@@ -493,8 +492,8 @@ fn test() {
             vec![
                 Action::StreamLen(21),
                 Action::Search(vec![
-                    FilterMatch::new(10, vec![]),
-                    FilterMatch::new(20, vec![]),
+                    stypes::FilterMatch::new(10, vec![]),
+                    stypes::FilterMatch::new(20, vec![]),
                 ]),
                 Action::SetMode(Mode::Breadcrumbs),
                 Action::Frame((
@@ -521,9 +520,9 @@ fn test() {
             vec![
                 Action::StreamLen(21),
                 Action::Search(vec![
-                    FilterMatch::new(5, vec![]),
-                    FilterMatch::new(10, vec![]),
-                    FilterMatch::new(20, vec![]),
+                    stypes::FilterMatch::new(5, vec![]),
+                    stypes::FilterMatch::new(10, vec![]),
+                    stypes::FilterMatch::new(20, vec![]),
                 ]),
                 Action::SetMode(Mode::Breadcrumbs),
                 Action::Frame((
@@ -555,9 +554,9 @@ fn test() {
             vec![
                 Action::StreamLen(25),
                 Action::Search(vec![
-                    FilterMatch::new(5, vec![]),
-                    FilterMatch::new(10, vec![]),
-                    FilterMatch::new(20, vec![]),
+                    stypes::FilterMatch::new(5, vec![]),
+                    stypes::FilterMatch::new(10, vec![]),
+                    stypes::FilterMatch::new(20, vec![]),
                 ]),
                 Action::SetMode(Mode::Breadcrumbs),
                 Action::Frame((
@@ -593,9 +592,9 @@ fn test() {
             vec![
                 Action::StreamLen(22),
                 Action::Search(vec![
-                    FilterMatch::new(1, vec![]),
-                    FilterMatch::new(10, vec![]),
-                    FilterMatch::new(20, vec![]),
+                    stypes::FilterMatch::new(1, vec![]),
+                    stypes::FilterMatch::new(10, vec![]),
+                    stypes::FilterMatch::new(20, vec![]),
                 ]),
                 Action::SetMode(Mode::Breadcrumbs),
                 Action::Frame((
@@ -625,9 +624,9 @@ fn test() {
             vec![
                 Action::StreamLen(21),
                 Action::Search(vec![
-                    FilterMatch::new(0, vec![]),
-                    FilterMatch::new(10, vec![]),
-                    FilterMatch::new(20, vec![]),
+                    stypes::FilterMatch::new(0, vec![]),
+                    stypes::FilterMatch::new(10, vec![]),
+                    stypes::FilterMatch::new(20, vec![]),
                 ]),
                 Action::SetMode(Mode::Breadcrumbs),
                 Action::Frame((
@@ -654,7 +653,7 @@ fn test() {
             "012",
             vec![
                 Action::StreamLen(20),
-                Action::Search(vec![FilterMatch::new(10, vec![])]),
+                Action::Search(vec![stypes::FilterMatch::new(10, vec![])]),
                 Action::SetMode(Mode::Breadcrumbs),
                 Action::Frame((
                     None,
@@ -688,7 +687,7 @@ fn test() {
             "014",
             vec![
                 Action::StreamLen(21),
-                Action::Search(vec![FilterMatch::new(20, vec![])]),
+                Action::Search(vec![stypes::FilterMatch::new(20, vec![])]),
                 Action::CheckLen(1),
                 Action::AddBookmark(10),
                 Action::CheckLen(2),
@@ -717,9 +716,9 @@ fn test() {
             vec![
                 Action::StreamLen(51),
                 Action::Search(vec![
-                    FilterMatch::new(10, vec![]),
-                    FilterMatch::new(20, vec![]),
-                    FilterMatch::new(50, vec![]),
+                    stypes::FilterMatch::new(10, vec![]),
+                    stypes::FilterMatch::new(20, vec![]),
+                    stypes::FilterMatch::new(50, vec![]),
                 ]),
                 Action::CheckLen(3),
                 Action::SetMode(Mode::Breadcrumbs),
@@ -878,7 +877,7 @@ fn test() {
             "016",
             vec![
                 Action::StreamLen(20),
-                Action::Search(vec![FilterMatch::new(10, vec![])]),
+                Action::Search(vec![stypes::FilterMatch::new(10, vec![])]),
                 Action::CheckLen(1),
                 Action::SetMode(Mode::Breadcrumbs),
                 Action::Frame((
@@ -971,8 +970,8 @@ fn test() {
             vec![
                 Action::StreamLen(50),
                 Action::Search(vec![
-                    FilterMatch::new(0, vec![]),
-                    FilterMatch::new(20, vec![]),
+                    stypes::FilterMatch::new(0, vec![]),
+                    stypes::FilterMatch::new(20, vec![]),
                 ]),
                 Action::SetMode(Mode::Breadcrumbs),
                 Action::Frame((
@@ -1017,8 +1016,8 @@ fn test() {
                     ],
                 )),
                 Action::AppendSearch(vec![
-                    FilterMatch::new(30, vec![]),
-                    FilterMatch::new(40, vec![]),
+                    stypes::FilterMatch::new(30, vec![]),
+                    stypes::FilterMatch::new(40, vec![]),
                 ]),
                 Action::Frame((
                     None,
@@ -1062,8 +1061,8 @@ fn test() {
             vec![
                 Action::StreamLen(50),
                 Action::Search(vec![
-                    FilterMatch::new(20, vec![]),
-                    FilterMatch::new(40, vec![]),
+                    stypes::FilterMatch::new(20, vec![]),
+                    stypes::FilterMatch::new(40, vec![]),
                 ]),
                 Action::SetMode(Mode::Breadcrumbs),
                 Action::Frame((
@@ -1124,8 +1123,8 @@ fn test() {
             vec![
                 Action::StreamLen(41),
                 Action::Search(vec![
-                    FilterMatch::new(20, vec![]),
-                    FilterMatch::new(40, vec![]),
+                    stypes::FilterMatch::new(20, vec![]),
+                    stypes::FilterMatch::new(40, vec![]),
                 ]),
                 Action::SetMode(Mode::Breadcrumbs),
                 Action::Frame((
@@ -1221,8 +1220,8 @@ fn test() {
             vec![
                 Action::StreamLen(41),
                 Action::Search(vec![
-                    FilterMatch::new(20, vec![]),
-                    FilterMatch::new(40, vec![]),
+                    stypes::FilterMatch::new(20, vec![]),
+                    stypes::FilterMatch::new(40, vec![]),
                 ]),
                 Action::SetMode(Mode::Breadcrumbs),
                 Action::Frame((
@@ -1322,8 +1321,8 @@ fn test() {
             vec![
                 Action::StreamLen(41),
                 Action::Search(vec![
-                    FilterMatch::new(20, vec![]),
-                    FilterMatch::new(40, vec![]),
+                    stypes::FilterMatch::new(20, vec![]),
+                    stypes::FilterMatch::new(40, vec![]),
                 ]),
                 Action::SetMode(Mode::Breadcrumbs),
                 Action::Frame((
@@ -1479,7 +1478,7 @@ fn test() {
             "022",
             vec![
                 Action::StreamLen(100),
-                Action::Search(vec![FilterMatch::new(50, vec![])]),
+                Action::Search(vec![stypes::FilterMatch::new(50, vec![])]),
                 Action::SetMode(Mode::Breadcrumbs),
                 Action::Frame((
                     None,
diff --git a/application/apps/indexer/session/src/state/mod.rs b/application/apps/indexer/session/src/state/mod.rs
index 2572fe6513..1aa76e3eba 100644
--- a/application/apps/indexer/session/src/state/mod.rs
+++ b/application/apps/indexer/session/src/state/mod.rs
@@ -1,7 +1,3 @@
-use crate::{
-    events::{CallbackEvent, NativeError, NativeErrorKind},
-    progress::Severity,
-};
 use log::{debug, error};
 use parsers;
 use processor::{
@@ -30,7 +26,7 @@ mod source_ids;
 pub(crate) mod values;
 
 pub use api::{Api, SessionStateAPI};
-pub use attachments::{AttachmentInfo, Attachments};
+pub use attachments::{Attachments, AttachmentsError};
 pub use indexes::{
     controller::{Controller as Indexes, Mode as IndexesMode},
     frame::Frame,
@@ -39,9 +35,9 @@ pub use indexes::{
 };
 use observed::Observed;
 use searchers::{SearcherState, Searchers};
-pub use session_file::{GrabbedElement, SessionFile, SessionFileOrigin, SessionFileState};
-pub use source_ids::SourceDefinition;
-pub use values::Values;
+pub use session_file::{SessionFile, SessionFileOrigin, SessionFileState};
+use stypes::GrabbedElement;
+pub use values::{Values, ValuesError};
 
 #[derive(Debug)]
 pub enum Status {
@@ -64,7 +60,7 @@ pub struct SessionState {
 }
 
 impl SessionState {
-    fn new(tx_callback_events: UnboundedSender<CallbackEvent>) -> Self {
+    fn new(tx_callback_events: UnboundedSender<stypes::CallbackEvent>) -> Self {
         Self {
             session_file: SessionFile::new(),
             observed: Observed::new(),
@@ -82,7 +78,10 @@ impl SessionState {
         }
     }
 
-    fn handle_grab(&mut self, range: &LineRange) -> Result<Vec<GrabbedElement>, NativeError> {
+    fn handle_grab(
+        &mut self,
+        range: &LineRange,
+    ) -> Result<Vec<GrabbedElement>, stypes::NativeError> {
         let mut elements = self.session_file.grab(range)?;
         self.indexes.naturalize(&mut elements);
         Ok(elements)
@@ -91,7 +90,7 @@ impl SessionState {
     fn handle_grab_indexed(
         &mut self,
         mut range: RangeInclusive<u64>,
-    ) -> Result<Vec<GrabbedElement>, NativeError> {
+    ) -> Result<Vec<GrabbedElement>, stypes::NativeError> {
         let frame = self.indexes.frame(&mut range)?;
         let mut elements: Vec<GrabbedElement> = vec![];
         for range in frame.ranges().iter() {
@@ -102,13 +101,16 @@ impl SessionState {
         Ok(elements)
     }
 
-    fn handle_grab_search(&mut self, range: LineRange) -> Result<Vec<GrabbedElement>, NativeError> {
+    fn handle_grab_search(
+        &mut self,
+        range: LineRange,
+    ) -> Result<Vec<GrabbedElement>, stypes::NativeError> {
         let indexes = self
             .search_map
             .indexes(&range.range)
-            .map_err(|e| NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::Grabber,
+            .map_err(|e| stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Grabber,
                 message: Some(format!("{e}")),
             })?;
         let mut elements: Vec<GrabbedElement> = vec![];
@@ -140,7 +142,7 @@ impl SessionState {
     fn handle_grab_ranges(
         &mut self,
         ranges: Vec<RangeInclusive<u64>>,
-    ) -> Result<Vec<GrabbedElement>, NativeError> {
+    ) -> Result<Vec<GrabbedElement>, stypes::NativeError> {
         let mut elements: Vec<GrabbedElement> = vec![];
         for range in ranges.iter() {
             let mut session_elements = self.session_file.grab(&LineRange::from(range.clone()))?;
@@ -154,9 +156,9 @@ impl SessionState {
         &mut self,
         source_id: u16,
         state_cancellation_token: CancellationToken,
-        tx_callback_events: UnboundedSender<CallbackEvent>,
+        tx_callback_events: UnboundedSender<stypes::CallbackEvent>,
         msg: String,
-    ) -> Result<(), NativeError> {
+    ) -> Result<(), stypes::NativeError> {
         if matches!(
             self.session_file
                 .write(source_id, state_cancellation_token.clone(), msg)?,
@@ -172,8 +174,8 @@ impl SessionState {
     async fn handle_flush_session_file(
         &mut self,
         state_cancellation_token: CancellationToken,
-        tx_callback_events: UnboundedSender<CallbackEvent>,
-    ) -> Result<(), NativeError> {
+        tx_callback_events: UnboundedSender<stypes::CallbackEvent>,
+    ) -> Result<(), stypes::NativeError> {
         if matches!(
             self.session_file
                 .flush(state_cancellation_token.clone(), true)?,
@@ -189,8 +191,8 @@ impl SessionState {
         &mut self,
         source_id: u16,
         state_cancellation_token: CancellationToken,
-        tx_callback_events: UnboundedSender<CallbackEvent>,
-    ) -> Result<bool, NativeError> {
+        tx_callback_events: UnboundedSender<stypes::CallbackEvent>,
+    ) -> Result<bool, stypes::NativeError> {
         if let SessionFileState::Changed = self
             .session_file
             .update(source_id, state_cancellation_token.clone())?
@@ -206,13 +208,13 @@ impl SessionState {
     async fn update_searchers(
         &mut self,
         state_cancellation_token: CancellationToken,
-        tx_callback_events: UnboundedSender<CallbackEvent>,
-    ) -> Result<(), NativeError> {
+        tx_callback_events: UnboundedSender<stypes::CallbackEvent>,
+    ) -> Result<(), stypes::NativeError> {
         let rows = self.session_file.len();
         let bytes = self.session_file.read_bytes();
         self.search_map.set_stream_len(rows);
         self.indexes.set_stream_len(rows)?;
-        tx_callback_events.send(CallbackEvent::StreamUpdated(rows))?;
+        tx_callback_events.send(stypes::CallbackEvent::StreamUpdated(rows))?;
         match self
             .searchers
             .regular
@@ -220,14 +222,14 @@ impl SessionState {
         {
             Some(Ok((_processed, mut matches, stats))) => {
                 self.indexes.append_search_results(&matches)?;
-                let map_updates = SearchMap::map_as_str(&matches);
+                let updates: stypes::FilterMatchList = (&matches).into();
                 let found = self.search_map.append(&mut matches) as u64;
                 self.search_map.append_stats(stats);
-                tx_callback_events.send(CallbackEvent::search_results(
+                tx_callback_events.send(stypes::CallbackEvent::search_results(
                     found,
                     self.search_map.get_stats(),
                 ))?;
-                tx_callback_events.send(CallbackEvent::SearchMapUpdated(Some(map_updates)))?;
+                tx_callback_events.send(stypes::CallbackEvent::SearchMapUpdated(Some(updates)))?;
             }
             Some(Err(err)) => error!("Fail to append search: {}", err),
             None => (),
@@ -260,10 +262,10 @@ impl SessionState {
     ///
     /// # Returns
     ///
-    /// * `Result<bool, NativeError>`:
+    /// * `Result<bool, stypes::NativeError>`:
     ///     - `Ok(true)` if the export is successful.
     ///     - `Ok(false)` if the export was stopped with `cancel`.
-    ///     - `Err(NativeError)` if an error occurs during the export process.
+    ///     - `Err(stypes::NativeError)` if an error occurs during the export process.
     ///
     async fn handle_export_session(
         &mut self,
@@ -273,16 +275,17 @@ impl SessionState {
         spliter: Option<String>,
         delimiter: Option<String>,
         cancel: CancellationToken,
-    ) -> Result<bool, NativeError> {
-        let mut writer = BufWriter::new(File::create(&out_path).map_err(|e| NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::Io,
-            message: Some(format!(
-                "Fail to create writer for {}: {}",
-                out_path.to_string_lossy(),
-                e
-            )),
-        })?);
+    ) -> Result<bool, stypes::NativeError> {
+        let mut writer =
+            BufWriter::new(File::create(&out_path).map_err(|e| stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Io,
+                message: Some(format!(
+                    "Fail to create writer for {}: {}",
+                    out_path.to_string_lossy(),
+                    e
+                )),
+            })?);
         for (i, range) in ranges.iter().enumerate() {
             let modifier =
                 if let (Some(spliter), Some(delimiter)) = (spliter.as_ref(), delimiter.as_ref()) {
@@ -303,9 +306,9 @@ impl SessionState {
                 modifier,
             )?;
             if i != ranges.len() - 1 {
-                writer.write(b"\n").map_err(|e| NativeError {
-                    severity: Severity::ERROR,
-                    kind: NativeErrorKind::Io,
+                writer.write(b"\n").map_err(|e| stypes::NativeError {
+                    severity: stypes::Severity::ERROR,
+                    kind: stypes::NativeErrorKind::Io,
                     message: Some(format!(
                         "Fail to write to file {}: {}",
                         out_path.to_string_lossy(),
@@ -317,15 +320,18 @@ impl SessionState {
                 return Ok(false);
             }
         }
-        writer.flush().map_err(|e| NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::Io,
+        writer.flush().map_err(|e| stypes::NativeError {
+            severity: stypes::Severity::ERROR,
+            kind: stypes::NativeErrorKind::Io,
             message: Some(format!("Fail to write into file: {e:?}")),
         })?;
         Ok(true)
     }
 
-    fn handle_get_search_holder(&mut self, uuid: Uuid) -> Result<RegularSearchHolder, NativeError> {
+    fn handle_get_search_holder(
+        &mut self,
+        uuid: Uuid,
+    ) -> Result<RegularSearchHolder, stypes::NativeError> {
         match self.searchers.regular {
             SearcherState::Available(_) => {
                 use std::mem;
@@ -334,14 +340,14 @@ impl SessionState {
                 {
                     Ok(holder)
                 } else {
-                    Err(NativeError {
-                        severity: Severity::ERROR,
-                        kind: NativeErrorKind::Configuration,
+                    Err(stypes::NativeError {
+                        severity: stypes::Severity::ERROR,
+                        kind: stypes::NativeErrorKind::Configuration,
                         message: Some(String::from("Could not replace search holder in state")),
                     })
                 }
             }
-            SearcherState::InUse => Err(NativeError::channel("Search holder is in use")),
+            SearcherState::InUse => Err(stypes::NativeError::channel("Search holder is in use")),
             SearcherState::NotInited => {
                 let filename = self.session_file.filename()?;
                 self.searchers.regular.in_use();
@@ -353,7 +359,7 @@ impl SessionState {
     fn handle_get_search_values_holder(
         &mut self,
         uuid: Uuid,
-    ) -> Result<ValueSearchHolder, NativeError> {
+    ) -> Result<ValueSearchHolder, stypes::NativeError> {
         match self.searchers.values {
             SearcherState::Available(_) => {
                 use std::mem;
@@ -362,16 +368,18 @@ impl SessionState {
                 {
                     Ok(holder)
                 } else {
-                    Err(NativeError {
-                        severity: Severity::ERROR,
-                        kind: NativeErrorKind::Configuration,
+                    Err(stypes::NativeError {
+                        severity: stypes::Severity::ERROR,
+                        kind: stypes::NativeErrorKind::Configuration,
                         message: Some(String::from(
                             "Could not replace search values holder in state",
                         )),
                     })
                 }
             }
-            SearcherState::InUse => Err(NativeError::channel("Search values holder is in use")),
+            SearcherState::InUse => Err(stypes::NativeError::channel(
+                "Search values holder is in use",
+            )),
             SearcherState::NotInited => {
                 let filename = self.session_file.filename()?;
                 self.searchers.values.in_use();
@@ -383,10 +391,10 @@ impl SessionState {
     fn handle_add_attachment(
         &mut self,
         origin: parsers::Attachment,
-        tx_callback_events: UnboundedSender<CallbackEvent>,
-    ) -> Result<(), NativeError> {
+        tx_callback_events: UnboundedSender<stypes::CallbackEvent>,
+    ) -> Result<(), stypes::NativeError> {
         let attachment = self.attachments.add(origin)?;
-        tx_callback_events.send(CallbackEvent::AttachmentsUpdated {
+        tx_callback_events.send(stypes::CallbackEvent::AttachmentsUpdated {
             len: self.attachments.len() as u64,
             attachment,
         })?;
@@ -396,8 +404,8 @@ impl SessionState {
 
 pub async fn run(
     mut rx_api: UnboundedReceiver<Api>,
-    tx_callback_events: UnboundedSender<CallbackEvent>,
-) -> Result<(), NativeError> {
+    tx_callback_events: UnboundedSender<stypes::CallbackEvent>,
+) -> Result<(), stypes::NativeError> {
     let mut state = SessionState::new(tx_callback_events.clone());
     let state_cancellation_token = CancellationToken::new();
     debug!("task is started");
@@ -411,14 +419,14 @@ pub async fn run(
                     state.attachments.set_dest_path(filename);
                 }
                 tx_response.send(set_session_file_res).map_err(|_| {
-                    NativeError::channel("Failed to response to Api::SetSessionFile")
+                    stypes::NativeError::channel("Failed to response to Api::SetSessionFile")
                 })?;
             }
             Api::GetSessionFile(tx_response) => {
                 tx_response
                     .send(state.session_file.filename())
                     .map_err(|_| {
-                        NativeError::channel("Failed to respond to Api::GetSessionFile")
+                        stypes::NativeError::channel("Failed to respond to Api::GetSessionFile")
                     })?;
             }
             Api::WriteSessionFile((source_id, msg, tx_response)) => {
@@ -434,7 +442,7 @@ pub async fn run(
                             .await,
                     )
                     .map_err(|_| {
-                        NativeError::channel("Failed to respond to Api::WriteSessionFile")
+                        stypes::NativeError::channel("Failed to respond to Api::WriteSessionFile")
                     })?;
             }
             Api::FlushSessionFile(tx_response) => {
@@ -445,14 +453,16 @@ pub async fn run(
                     )
                     .await;
                 tx_response.send(res).map_err(|_| {
-                    NativeError::channel("Failed to respond to Api::FlushSessionFile")
+                    stypes::NativeError::channel("Failed to respond to Api::FlushSessionFile")
                 })?;
             }
             Api::GetSessionFileOrigin(tx_response) => {
                 tx_response
                     .send(Ok(state.session_file.filename.clone()))
                     .map_err(|_| {
-                        NativeError::channel("Failed to respond to Api::GetSessionFileOrigin")
+                        stypes::NativeError::channel(
+                            "Failed to respond to Api::GetSessionFileOrigin",
+                        )
                     })?;
             }
             Api::UpdateSession((source_id, tx_response)) => {
@@ -463,43 +473,51 @@ pub async fn run(
                         tx_callback_events.clone(),
                     )
                     .await;
-                tx_response
-                    .send(res)
-                    .map_err(|_| NativeError::channel("Failed to respond to Api::UpdateSession"))?;
+                tx_response.send(res).map_err(|_| {
+                    stypes::NativeError::channel("Failed to respond to Api::UpdateSession")
+                })?;
             }
             Api::AddSource((uuid, tx_response)) => {
                 tx_response
                     .send(state.session_file.sources.add_source(uuid))
-                    .map_err(|_| NativeError::channel("Failed to respond to Api::AddSource"))?;
+                    .map_err(|_| {
+                        stypes::NativeError::channel("Failed to respond to Api::AddSource")
+                    })?;
             }
             Api::GetSource((uuid, tx_response)) => {
                 tx_response
                     .send(state.session_file.sources.get_source(uuid))
-                    .map_err(|_| NativeError::channel("Failed to respond to Api::AddSource"))?;
+                    .map_err(|_| {
+                        stypes::NativeError::channel("Failed to respond to Api::AddSource")
+                    })?;
             }
             Api::GetSourcesDefinitions(tx_response) => {
                 tx_response
                     .send(state.session_file.sources.get_sources_definitions())
                     .map_err(|_| {
-                        NativeError::channel("Failed to respond to Api::GetSourcesDefinitions")
+                        stypes::NativeError::channel(
+                            "Failed to respond to Api::GetSourcesDefinitions",
+                        )
                     })?;
             }
             Api::AddExecutedObserve((options, tx_response)) => {
                 state.observed.add(options);
                 tx_response.send(()).map_err(|_| {
-                    NativeError::channel("Failed to respond to Api::AddExecutedObserve")
+                    stypes::NativeError::channel("Failed to respond to Api::AddExecutedObserve")
                 })?;
             }
             Api::GetExecutedHolder(tx_response) => {
                 tx_response.send(state.observed.clone()).map_err(|_| {
-                    NativeError::channel("Failed to respond to Api::GetExecutedHolder")
+                    stypes::NativeError::channel("Failed to respond to Api::GetExecutedHolder")
                 })?;
             }
             Api::IsRawExportAvailable(tx_response) => {
                 tx_response
                     .send(state.observed.is_file_based_export_possible())
                     .map_err(|_| {
-                        NativeError::channel("Failed to respond to Api::IsRawExportAvailable")
+                        stypes::NativeError::channel(
+                            "Failed to respond to Api::IsRawExportAvailable",
+                        )
                     })?;
             }
             Api::ExportSession {
@@ -514,54 +532,60 @@ pub async fn run(
                 let res = state
                     .handle_export_session(out_path, ranges, columns, spliter, delimiter, cancel)
                     .await;
-                tx_response
-                    .send(res)
-                    .map_err(|_| NativeError::channel("Failed to respond to Api::ExportSession"))?;
+                tx_response.send(res).map_err(|_| {
+                    stypes::NativeError::channel("Failed to respond to Api::ExportSession")
+                })?;
             }
             Api::Grab((range, tx_response)) => {
                 tx_response
                     .send(state.handle_grab(&range))
-                    .map_err(|_| NativeError::channel("Failed to respond to Api::Grab"))?;
+                    .map_err(|_| stypes::NativeError::channel("Failed to respond to Api::Grab"))?;
             }
             Api::GrabIndexed((range, tx_response)) => {
                 tx_response
                     .send(state.handle_grab_indexed(range))
-                    .map_err(|_| NativeError::channel("Failed to respond to Api::GrabIndexed"))?;
+                    .map_err(|_| {
+                        stypes::NativeError::channel("Failed to respond to Api::GrabIndexed")
+                    })?;
             }
             Api::SetIndexingMode((mode, tx_response)) => {
                 tx_response
                     .send(state.indexes.set_mode(mode))
                     .map_err(|_| {
-                        NativeError::channel("Failed to respond to Api::SetIndexingMode")
+                        stypes::NativeError::channel("Failed to respond to Api::SetIndexingMode")
                     })?;
             }
             Api::GetIndexedMapLen(tx_response) => {
                 tx_response.send(state.indexes.len()).map_err(|_| {
-                    NativeError::channel("Failed to respond to Api::GetIndexedMapLen")
+                    stypes::NativeError::channel("Failed to respond to Api::GetIndexedMapLen")
                 })?;
             }
             Api::GetDistancesAroundIndex((position, tx_response)) => {
                 tx_response
                     .send(state.indexes.get_around_indexes(&position))
                     .map_err(|_| {
-                        NativeError::channel("Failed to respond to Api::GetIndexedMapLen")
+                        stypes::NativeError::channel("Failed to respond to Api::GetIndexedMapLen")
                     })?;
             }
             Api::AddBookmark((row, tx_response)) => {
                 tx_response
                     .send(state.indexes.add_bookmark(row))
-                    .map_err(|_| NativeError::channel("Failed to respond to Api::AddBookmark"))?;
+                    .map_err(|_| {
+                        stypes::NativeError::channel("Failed to respond to Api::AddBookmark")
+                    })?;
             }
             Api::SetBookmarks((rows, tx_response)) => {
                 tx_response
                     .send(state.indexes.set_bookmarks(rows))
-                    .map_err(|_| NativeError::channel("Failed to respond to Api::SetBookmarks"))?;
+                    .map_err(|_| {
+                        stypes::NativeError::channel("Failed to respond to Api::SetBookmarks")
+                    })?;
             }
             Api::RemoveBookmark((row, tx_response)) => {
                 tx_response
                     .send(state.indexes.remove_bookmark(row))
                     .map_err(|_| {
-                        NativeError::channel("Failed to respond to Api::RemoveBookmark")
+                        stypes::NativeError::channel("Failed to respond to Api::RemoveBookmark")
                     })?;
             }
             Api::ExpandBreadcrumbs {
@@ -573,52 +597,62 @@ pub async fn run(
                 tx_response
                     .send(state.indexes.breadcrumbs_expand(seporator, offset, above))
                     .map_err(|_| {
-                        NativeError::channel("Failed to respond to Api::ExpandBreadcrumbs")
+                        stypes::NativeError::channel("Failed to respond to Api::ExpandBreadcrumbs")
                     })?;
             }
             Api::GrabSearch((range, tx_response)) => {
                 tx_response
                     .send(state.handle_grab_search(range))
-                    .map_err(|_| NativeError::channel("Failed to respond to Api::GrabSearch"))?;
+                    .map_err(|_| {
+                        stypes::NativeError::channel("Failed to respond to Api::GrabSearch")
+                    })?;
             }
             Api::GrabRanges((ranges, tx_response)) => {
                 tx_response
                     .send(state.handle_grab_ranges(ranges))
-                    .map_err(|_| NativeError::channel("Failed to respond to Api::GrabSearch"))?;
+                    .map_err(|_| {
+                        stypes::NativeError::channel("Failed to respond to Api::GrabSearch")
+                    })?;
             }
             Api::GetNearestPosition((position, tx_response)) => {
                 tx_response
-                    .send(state.search_map.nearest_to(position))
+                    .send(stypes::ResultNearestPosition(
+                        state.search_map.nearest_to(position),
+                    ))
                     .map_err(|_| {
-                        NativeError::channel("Failed to respond to Api::GetNearestPosition")
+                        stypes::NativeError::channel("Failed to respond to Api::GetNearestPosition")
                     })?;
             }
             Api::GetScaledMap((len, range, tx_response)) => {
                 tx_response
                     .send(state.search_map.scaled(len, range))
-                    .map_err(|_| NativeError::channel("Failed to respond to Api::GetScaledMap"))?;
+                    .map_err(|_| {
+                        stypes::NativeError::channel("Failed to respond to Api::GetScaledMap")
+                    })?;
             }
             Api::FileRead(tx_response) => {
-                tx_callback_events.send(CallbackEvent::FileRead)?;
-                tx_response
-                    .send(())
-                    .map_err(|_| NativeError::channel("Failed to respond to Api::FileRead"))?;
+                tx_callback_events.send(stypes::CallbackEvent::FileRead)?;
+                tx_response.send(()).map_err(|_| {
+                    stypes::NativeError::channel("Failed to respond to Api::FileRead")
+                })?;
             }
             Api::GetStreamLen(tx_response) => {
                 tx_response
                     .send((state.session_file.len(), state.session_file.read_bytes()))
-                    .map_err(|_| NativeError::channel("Failed to respond to Api::GetStreamLen"))?;
+                    .map_err(|_| {
+                        stypes::NativeError::channel("Failed to respond to Api::GetStreamLen")
+                    })?;
             }
             Api::GetSearchResultLen(tx_response) => {
                 tx_response.send(state.search_map.len()).map_err(|_| {
-                    NativeError::channel("Failed to respond to Api::GetSearchResultLen")
+                    stypes::NativeError::channel("Failed to respond to Api::GetSearchResultLen")
                 })?;
             }
             Api::GetSearchHolder((uuid, tx_response)) => {
                 tx_response
                     .send(state.handle_get_search_holder(uuid))
                     .map_err(|_| {
-                        NativeError::channel("Failed to respond to Api::GetSearchHolder")
+                        stypes::NativeError::channel("Failed to respond to Api::GetSearchHolder")
                     })?;
             }
             Api::SetSearchHolder((mut holder, _uuid_for_debug, tx_response)) => {
@@ -630,12 +664,12 @@ pub async fn run(
                     }
                     Ok(())
                 } else {
-                    Err(NativeError::channel(
+                    Err(stypes::NativeError::channel(
                         "Cannot set search holder - it wasn't in use",
                     ))
                 };
                 tx_response.send(result).map_err(|_| {
-                    NativeError::channel("Failed to respond to Api::SetSearchHolder")
+                    stypes::NativeError::channel("Failed to respond to Api::SetSearchHolder")
                 })?;
             }
             Api::DropSearch(tx_response) => {
@@ -647,34 +681,35 @@ pub async fn run(
                     state.indexes.drop_search()?;
                     true
                 };
-                tx_callback_events.send(CallbackEvent::no_search_results())?;
-                tx_callback_events.send(CallbackEvent::SearchMapUpdated(None))?;
-                tx_response
-                    .send(result)
-                    .map_err(|_| NativeError::channel("Failed to respond to Api::DropSearch"))?;
+                tx_callback_events.send(stypes::CallbackEvent::no_search_results())?;
+                tx_callback_events.send(stypes::CallbackEvent::SearchMapUpdated(None))?;
+                tx_response.send(result).map_err(|_| {
+                    stypes::NativeError::channel("Failed to respond to Api::DropSearch")
+                })?;
             }
             Api::SetMatches((matches, stats, tx_response)) => {
-                let update = matches
-                    .as_ref()
-                    .map(|matches| SearchMap::map_as_str(matches));
+                let update: Option<stypes::FilterMatchList> =
+                    matches.as_ref().map(|matches| matches.into());
                 if let Some(matches) = matches.as_ref() {
                     state.indexes.set_search_results(matches)?;
                 }
                 state.search_map.set(matches, stats);
-                tx_callback_events.send(CallbackEvent::SearchMapUpdated(update))?;
-                tx_callback_events.send(CallbackEvent::search_results(
+                tx_callback_events.send(stypes::CallbackEvent::SearchMapUpdated(update))?;
+                tx_callback_events.send(stypes::CallbackEvent::search_results(
                     state.search_map.len() as u64,
                     state.search_map.get_stats(),
                 ))?;
-                tx_response
-                    .send(())
-                    .map_err(|_| NativeError::channel("Failed to respond to Api::SetMatches"))?;
+                tx_response.send(()).map_err(|_| {
+                    stypes::NativeError::channel("Failed to respond to Api::SetMatches")
+                })?;
             }
             Api::GetSearchValuesHolder((uuid, tx_response)) => {
                 tx_response
                     .send(state.handle_get_search_values_holder(uuid))
                     .map_err(|_| {
-                        NativeError::channel("Failed to respond to Api::GetSearchValuesHolder")
+                        stypes::NativeError::channel(
+                            "Failed to respond to Api::GetSearchValuesHolder",
+                        )
                     })?;
             }
             Api::SetSearchValuesHolder((mut holder, _uuid_for_debug, tx_response)) => {
@@ -686,25 +721,27 @@ pub async fn run(
                     }
                     Ok(())
                 } else {
-                    Err(NativeError::channel(
+                    Err(stypes::NativeError::channel(
                         "Cannot set search values holder - it wasn't in use",
                     ))
                 };
                 tx_response.send(result).map_err(|_| {
-                    NativeError::channel("Failed to respond to Api::SetSearchValuesHolder")
+                    stypes::NativeError::channel("Failed to respond to Api::SetSearchValuesHolder")
                 })?;
             }
             Api::GetSearchValues((frame, width, tx_response)) => {
                 tx_response
                     .send(state.values.get(frame, width))
                     .map_err(|_| {
-                        NativeError::channel("Failed to respond to Api::SetSearchValuesHolder")
+                        stypes::NativeError::channel(
+                            "Failed to respond to Api::SetSearchValuesHolder",
+                        )
                     })?;
             }
             Api::SetSearchValues(values, tx_response) => {
                 state.values.set_values(values);
                 tx_response.send(()).map_err(|_| {
-                    NativeError::channel("Failed to respond to Api::SetSearchValues")
+                    stypes::NativeError::channel("Failed to respond to Api::SetSearchValues")
                 })?;
             }
             Api::DropSearchValues(tx_response) => {
@@ -716,14 +753,14 @@ pub async fn run(
                 };
                 state.values.drop();
                 tx_response.send(result).map_err(|_| {
-                    NativeError::channel("Failed to respond to Api::DropSearchValues")
+                    stypes::NativeError::channel("Failed to respond to Api::DropSearchValues")
                 })?;
             }
             Api::GetIndexedRanges(tx_response) => {
                 tx_response
                     .send(state.indexes.get_all_as_ranges())
                     .map_err(|_| {
-                        NativeError::channel("Failed to respond to Api::GetIndexedRanges")
+                        stypes::NativeError::channel("Failed to respond to Api::GetIndexedRanges")
                     })?;
             }
             Api::CloseSession(tx_response) => {
@@ -732,7 +769,7 @@ pub async fn run(
                 // Note: all operations would be canceled in close_session of API. We cannot do it here,
                 // because we would lock this loop if some operation needs access to state during cancellation.
                 if tx_response.send(()).is_err() {
-                    return Err(NativeError::channel(
+                    return Err(stypes::NativeError::channel(
                         "fail to response to Api::CloseSession",
                     ));
                 }
@@ -740,7 +777,7 @@ pub async fn run(
             Api::SetDebugMode((debug, tx_response)) => {
                 state.debug = debug;
                 if tx_response.send(()).is_err() {
-                    return Err(NativeError::channel(
+                    return Err(stypes::NativeError::channel(
                         "fail to response to Api::SetDebugMode",
                     ));
                 }
@@ -761,7 +798,7 @@ pub async fn run(
             }
             Api::GetAttachments(tx_response) => {
                 tx_response.send(state.attachments.get()).map_err(|_| {
-                    NativeError::channel("Failed to respond to Api::GetAttachments")
+                    stypes::NativeError::channel("Failed to respond to Api::GetAttachments")
                 })?;
             }
             Api::Shutdown => {
@@ -771,9 +808,9 @@ pub async fn run(
             }
             Api::ShutdownWithError => {
                 debug!("shutdown state loop with error for testing");
-                return Err(NativeError {
-                    severity: Severity::ERROR,
-                    kind: NativeErrorKind::Io,
+                return Err(stypes::NativeError {
+                    severity: stypes::Severity::ERROR,
+                    kind: stypes::NativeErrorKind::Io,
                     message: Some(String::from("Shutdown state loop with error for testing")),
                 });
             }
diff --git a/application/apps/indexer/session/src/state/observed.rs b/application/apps/indexer/session/src/state/observed.rs
index 3eed2707f2..58e106c5a6 100644
--- a/application/apps/indexer/session/src/state/observed.rs
+++ b/application/apps/indexer/session/src/state/observed.rs
@@ -1,9 +1,8 @@
-use sources::factory::{FileFormat, ObserveOptions, ObserveOrigin, ParserType};
 use std::path::PathBuf;
 
 #[derive(Debug, Clone)]
 pub struct Observed {
-    pub executed: Vec<ObserveOptions>,
+    pub executed: Vec<stypes::ObserveOptions>,
 }
 
 impl Observed {
@@ -11,34 +10,34 @@ impl Observed {
         Self { executed: vec![] }
     }
 
-    pub fn add(&mut self, options: ObserveOptions) {
+    pub fn add(&mut self, options: stypes::ObserveOptions) {
         self.executed.push(options);
     }
 
     pub fn is_file_based_export_possible(&self) -> bool {
         let mut possibility = true;
         self.executed.iter().for_each(|opt| {
-            if matches!(opt.origin, ObserveOrigin::Stream(..)) {
+            if matches!(opt.origin, stypes::ObserveOrigin::Stream(..)) {
                 possibility = false;
             }
         });
         possibility
     }
 
-    pub fn get_files(&self) -> Vec<(ParserType, FileFormat, PathBuf)> {
-        let mut files: Vec<(ParserType, FileFormat, PathBuf)> = vec![];
+    pub fn get_files(&self) -> Vec<(stypes::ParserType, stypes::FileFormat, PathBuf)> {
+        let mut files: Vec<(stypes::ParserType, stypes::FileFormat, PathBuf)> = vec![];
         self.executed.iter().for_each(|opt| match &opt.origin {
-            ObserveOrigin::File(_, file_format, filename) => {
+            stypes::ObserveOrigin::File(_, file_format, filename) => {
                 files.push((opt.parser.clone(), file_format.clone(), filename.clone()))
             }
-            ObserveOrigin::Concat(list) => {
+            stypes::ObserveOrigin::Concat(list) => {
                 files.append(
                     &mut list
                         .iter()
                         .map(|(_, file_format, filename)| {
                             (opt.parser.clone(), file_format.clone(), filename.clone())
                         })
-                        .collect::<Vec<(ParserType, FileFormat, PathBuf)>>(),
+                        .collect::<Vec<(stypes::ParserType, stypes::FileFormat, PathBuf)>>(),
                 );
             }
             _ => {}
diff --git a/application/apps/indexer/session/src/state/session_file.rs b/application/apps/indexer/session/src/state/session_file.rs
index 85853a99a5..2a7c119111 100644
--- a/application/apps/indexer/session/src/state/session_file.rs
+++ b/application/apps/indexer/session/src/state/session_file.rs
@@ -1,45 +1,23 @@
 use super::source_ids::SourceIDs;
-use crate::{
-    events::{NativeError, NativeErrorKind},
-    paths,
-    progress::Severity,
-};
+use crate::paths;
 use log::debug;
 use processor::{
     grabber::{Grabber, LineRange},
     text_source::TextFileSource,
 };
-use serde::{Deserialize, Serialize};
 use std::{
     fs::File,
     io::{BufWriter, Write},
     path::PathBuf,
     time::Instant,
 };
+use stypes::GrabbedElement;
 use tokio_util::sync::CancellationToken;
 use uuid::Uuid;
 
 pub const FLUSH_DATA_IN_MS: u128 = 500;
 pub const SESSION_FILE_EXTENSION: &str = "session";
 
-#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
-pub struct GrabbedElement {
-    #[serde(rename = "id")]
-    pub source_id: u16,
-    #[serde(rename = "c")]
-    pub content: String,
-    #[serde(rename = "p")]
-    pub pos: usize,
-    #[serde(rename = "n")]
-    pub nature: u8,
-}
-
-impl GrabbedElement {
-    pub fn set_nature(&mut self, nature: u8) {
-        self.nature = nature;
-    }
-}
-
 #[derive(Debug)]
 pub enum SessionFileState {
     Changed,
@@ -87,7 +65,7 @@ impl SessionFile {
         }
     }
 
-    pub fn init(&mut self, mut filename: Option<PathBuf>) -> Result<(), NativeError> {
+    pub fn init(&mut self, mut filename: Option<PathBuf>) -> Result<(), stypes::NativeError> {
         if self.grabber.is_none() {
             let filename = if let Some(filename) = filename.take() {
                 self.filename = Some(SessionFileOrigin::Linked(filename.clone()));
@@ -97,9 +75,9 @@ impl SessionFile {
                 let filename = streams.join(format!("{}.{SESSION_FILE_EXTENSION}", Uuid::new_v4()));
                 debug!("Session file setup: {}", filename.to_string_lossy());
                 self.writer = Some(BufWriter::new(File::create(&filename).map_err(|e| {
-                    NativeError {
-                        severity: Severity::ERROR,
-                        kind: NativeErrorKind::Io,
+                    stypes::NativeError {
+                        severity: stypes::Severity::ERROR,
+                        kind: stypes::NativeErrorKind::Io,
                         message: Some(format!(
                             "Fail to create session writer for {}: {}",
                             filename.to_string_lossy(),
@@ -152,7 +130,7 @@ impl SessionFile {
         source_id: u16,
         state_cancellation_token: CancellationToken,
         msg: String,
-    ) -> Result<SessionFileState, NativeError> {
+    ) -> Result<SessionFileState, stypes::NativeError> {
         if !self.sources.is_source_same(source_id) {
             self.flush(state_cancellation_token.clone(), false)?;
         }
@@ -165,9 +143,9 @@ impl SessionFile {
                 Ok(SessionFileState::MaybeChanged)
             }
         } else {
-            Err(NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::Grabber,
+            Err(stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Grabber,
                 message: Some(String::from(
                     "Session file isn't assigned yet, cannot flush",
                 )),
@@ -179,7 +157,7 @@ impl SessionFile {
         &mut self,
         state_cancellation_token: CancellationToken,
         drop_timestamp: bool,
-    ) -> Result<SessionFileState, NativeError> {
+    ) -> Result<SessionFileState, stypes::NativeError> {
         if drop_timestamp {
             self.last_message_timestamp = Instant::now();
         }
@@ -190,9 +168,9 @@ impl SessionFile {
                 state_cancellation_token,
             )
         } else {
-            Err(NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::Grabber,
+            Err(stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Grabber,
                 message: Some(String::from(
                     "Session file isn't assigned yet, cannot flush",
                 )),
@@ -204,10 +182,10 @@ impl SessionFile {
         &mut self,
         source_id: u16,
         state_cancellation_token: CancellationToken,
-    ) -> Result<SessionFileState, NativeError> {
-        let grabber = &mut (self.grabber.as_mut().ok_or(NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::Grabber,
+    ) -> Result<SessionFileState, stypes::NativeError> {
+        let grabber = &mut (self.grabber.as_mut().ok_or(stypes::NativeError {
+            severity: stypes::Severity::ERROR,
+            kind: stypes::NativeErrorKind::Grabber,
             message: Some(String::from("Grabber isn't inited")),
         })?);
         let prev = grabber.log_entry_count().unwrap_or(0) as u64;
@@ -222,17 +200,19 @@ impl SessionFile {
         })
     }
 
-    pub fn grab(&self, range: &LineRange) -> Result<Vec<GrabbedElement>, NativeError> {
-        let grabber = &mut (self.grabber.as_ref().ok_or(NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::Grabber,
+    pub fn grab(&self, range: &LineRange) -> Result<Vec<GrabbedElement>, stypes::NativeError> {
+        let grabber = &mut (self.grabber.as_ref().ok_or(stypes::NativeError {
+            severity: stypes::Severity::ERROR,
+            kind: stypes::NativeErrorKind::Grabber,
             message: Some(String::from("Grabber isn't inited")),
         })?);
-        let rows = grabber.grab_content(range).map_err(|e| NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::Grabber,
-            message: Some(format!("{e}")),
-        })?;
+        let rows = grabber
+            .grab_content(range)
+            .map_err(|e| stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Grabber,
+                message: Some(format!("{e}")),
+            })?;
         let mapped_ranges = self.sources.get_mapped_ranges(&range.range);
         let from = *range.range.start() as usize;
         Ok(rows
@@ -250,13 +230,13 @@ impl SessionFile {
             .collect())
     }
 
-    pub fn filename(&self) -> Result<PathBuf, NativeError> {
+    pub fn filename(&self) -> Result<PathBuf, stypes::NativeError> {
         if let Some(origin) = self.filename.as_ref() {
             Ok(origin.filename())
         } else {
-            Err(NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::Grabber,
+            Err(stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Grabber,
                 message: Some(String::from("Session file isn't created yet")),
             })
         }
@@ -278,33 +258,33 @@ impl SessionFile {
     ///
     /// # Returns
     ///
-    /// * `Result<(), NativeError>`:
+    /// * `Result<(), stypes::NativeError>`:
     ///     * `Ok(())` if the content is copied successfully.
-    ///     * `Err(NativeError)` if an error occurs during the copying process.
+    ///     * `Err(stypes::NativeError)` if an error occurs during the copying process.
     ///
     pub fn copy_content<W: std::io::Write>(
         &mut self,
         writer: &mut W,
         line_range: &LineRange,
         modifier: Option<impl Fn(String) -> String>,
-    ) -> Result<(), NativeError> {
-        let grabber = &mut (self.grabber.as_ref().ok_or(NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::Grabber,
+    ) -> Result<(), stypes::NativeError> {
+        let grabber = &mut (self.grabber.as_ref().ok_or(stypes::NativeError {
+            severity: stypes::Severity::ERROR,
+            kind: stypes::NativeErrorKind::Grabber,
             message: Some(String::from("Grabber isn't inited")),
         })?);
         grabber
             .copy_content(writer, line_range, modifier)
-            .map_err(|e| NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::Grabber,
+            .map_err(|e| stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Grabber,
                 message: Some(format!("{e}")),
             })
     }
 
     /// Cleans up the temporary generated files and for the session on its attachments if exist
     /// for none-linked sessions.
-    pub fn cleanup(&mut self) -> Result<(), NativeError> {
+    pub fn cleanup(&mut self) -> Result<(), stypes::NativeError> {
         if self.writer.is_none() {
             // Session is linked. No temporary files has been generated.
             return Ok(());
@@ -314,9 +294,9 @@ impl SessionFile {
         let filename = self.filename()?;
         debug!("cleaning up files: {:?}", filename);
         if filename.exists() {
-            std::fs::remove_file(&filename).map_err(|e| NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::Io,
+            std::fs::remove_file(&filename).map_err(|e| stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Io,
                 message: Some(format!(
                     "Removing session main file fialed. Error: {e}. Path: {}",
                     filename.display()
@@ -329,9 +309,9 @@ impl SessionFile {
             .to_str()
             .and_then(|file| file.strip_suffix(&format!(".{SESSION_FILE_EXTENSION}")))
             .map(PathBuf::from)
-            .ok_or_else(|| NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::Io,
+            .ok_or_else(|| stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Io,
                 message: Some("Session file name isn't UTF-8 valid".into()),
             })?;
 
@@ -341,9 +321,9 @@ impl SessionFile {
                 attachments_dir.display()
             );
 
-            std::fs::remove_dir_all(&attachments_dir).map_err(|err| NativeError {
-                severity: Severity::ERROR,
-                kind: NativeErrorKind::Io,
+            std::fs::remove_dir_all(&attachments_dir).map_err(|err| stypes::NativeError {
+                severity: stypes::Severity::ERROR,
+                kind: stypes::NativeErrorKind::Io,
                 message: Some(format!(
                     "Removing attachments directory failed. Error: {err}, Path: {}",
                     attachments_dir.display()
diff --git a/application/apps/indexer/session/src/state/source_ids.rs b/application/apps/indexer/session/src/state/source_ids.rs
index 6e1c712987..3eaeaf4106 100644
--- a/application/apps/indexer/session/src/state/source_ids.rs
+++ b/application/apps/indexer/session/src/state/source_ids.rs
@@ -1,5 +1,5 @@
-use serde::{Deserialize, Serialize};
 use std::{collections::HashMap, ops::RangeInclusive};
+
 pub struct MappedRanges<'a> {
     ranges: Vec<&'a (RangeInclusive<u64>, u16)>,
 }
@@ -20,12 +20,6 @@ impl<'a> MappedRanges<'a> {
     }
 }
 
-#[derive(Serialize, Deserialize, Debug, Clone)]
-pub struct SourceDefinition {
-    pub id: u16,
-    pub alias: String,
-}
-
 #[derive(Debug)]
 pub struct SourceIDs {
     pub sources: HashMap<u16, String>,
@@ -81,14 +75,14 @@ impl SourceIDs {
         }
     }
 
-    pub fn get_sources_definitions(&self) -> Vec<SourceDefinition> {
+    pub fn get_sources_definitions(&self) -> Vec<stypes::SourceDefinition> {
         self.sources
             .iter()
-            .map(|(id, alias)| SourceDefinition {
+            .map(|(id, alias)| stypes::SourceDefinition {
                 id: *id,
                 alias: alias.to_string(),
             })
-            .collect::<Vec<SourceDefinition>>()
+            .collect::<Vec<stypes::SourceDefinition>>()
     }
 
     pub fn add_range(&mut self, range: RangeInclusive<u64>, source_id: u16) {
diff --git a/application/apps/indexer/session/src/state/values/graph.rs b/application/apps/indexer/session/src/state/values/graph.rs
index 2e6e834a4d..334d8eacce 100644
--- a/application/apps/indexer/session/src/state/values/graph.rs
+++ b/application/apps/indexer/session/src/state/values/graph.rs
@@ -13,6 +13,18 @@ pub struct CandlePoint {
     y_value: f64,
 }
 
+impl From<CandlePoint> for stypes::Point {
+    fn from(v: CandlePoint) -> Self {
+        let (min, max) = v.min_max_y.unwrap_or((v.y_value, v.y_value));
+        stypes::Point {
+            row: v.row,
+            min,
+            max,
+            y_value: v.y_value,
+        }
+    }
+}
+
 impl Serialize for CandlePoint {
     fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
     where
diff --git a/application/apps/indexer/session/src/state/values/mod.rs b/application/apps/indexer/session/src/state/values/mod.rs
index 6e66d9aea8..80c58d7df1 100644
--- a/application/apps/indexer/session/src/state/values/mod.rs
+++ b/application/apps/indexer/session/src/state/values/mod.rs
@@ -1,4 +1,3 @@
-use crate::events::CallbackEvent;
 use log::{debug, error};
 use std::{collections::HashMap, ops::RangeInclusive};
 use thiserror::Error;
@@ -14,17 +13,27 @@ pub enum ValuesError {
     InvalidFrame(String),
 }
 
+impl From<ValuesError> for stypes::NativeError {
+    fn from(err: ValuesError) -> Self {
+        stypes::NativeError {
+            severity: stypes::Severity::ERROR,
+            kind: stypes::NativeErrorKind::Io,
+            message: Some(err.to_string()),
+        }
+    }
+}
+
 #[derive(Debug)]
 pub struct Values {
     #[allow(clippy::type_complexity)]
     /// maps the dataset id to (min_y, max_y, list of data-points)
     values: HashMap<u8, (f64, f64, Vec<CandlePoint>)>,
     errors: HashMap<u64, Vec<(u8, String)>>,
-    tx_callback_events: Option<UnboundedSender<CallbackEvent>>,
+    tx_callback_events: Option<UnboundedSender<stypes::CallbackEvent>>,
 }
 
 impl Values {
-    pub fn new(tx_callback_events: Option<UnboundedSender<CallbackEvent>>) -> Self {
+    pub fn new(tx_callback_events: Option<UnboundedSender<stypes::CallbackEvent>>) -> Self {
         Values {
             values: HashMap::new(),
             errors: HashMap::new(),
@@ -204,7 +213,10 @@ impl Values {
                 });
                 Some(map)
             };
-            if tx.send(CallbackEvent::SearchValuesUpdated(map)).is_err() {
+            if tx
+                .send(stypes::CallbackEvent::SearchValuesUpdated(map))
+                .is_err()
+            {
                 error!("Fail to emit event CallbackEvent::SearchValuesUpdated");
             }
         }
diff --git a/application/apps/indexer/session/src/tracker.rs b/application/apps/indexer/session/src/tracker.rs
index dad4b59632..e887cb9c84 100644
--- a/application/apps/indexer/session/src/tracker.rs
+++ b/application/apps/indexer/session/src/tracker.rs
@@ -1,9 +1,4 @@
-use crate::{
-    events::{NativeError, NativeErrorKind},
-    operations::OperationStat,
-    progress::{ProgressProviderAPI, Severity},
-    state::SessionStateAPI,
-};
+use crate::{operations::OperationStat, progress::ProgressProviderAPI, state::SessionStateAPI};
 use log::{debug, error};
 use sources::producer::SdeSender;
 use std::collections::{hash_map::Entry, HashMap};
@@ -33,7 +28,7 @@ pub enum TrackerCommand {
     RemoveOperation((Uuid, oneshot::Sender<bool>)),
     CancelOperation((Uuid, oneshot::Sender<bool>)),
     SetDebugMode((bool, oneshot::Sender<()>)),
-    GetOperationsStat(oneshot::Sender<Result<String, NativeError>>),
+    GetOperationsStat(oneshot::Sender<Result<String, stypes::NativeError>>),
     GetSdeSender((Uuid, oneshot::Sender<Option<SdeSender>>)),
     CancelAll(oneshot::Sender<()>),
     Shutdown,
@@ -83,13 +78,13 @@ impl OperationTrackerAPI {
         &self,
         command: TrackerCommand,
         rx_response: oneshot::Receiver<T>,
-    ) -> Result<T, NativeError> {
+    ) -> Result<T, stypes::NativeError> {
         let api_str = format!("{command}");
         self.tx_api.send(command).map_err(|e| {
-            NativeError::channel(&format!("Failed to send to Api::{api_str}; error: {e}"))
+            stypes::NativeError::channel(&format!("Failed to send to Api::{api_str}; error: {e}"))
         })?;
         rx_response.await.map_err(|_| {
-            NativeError::channel(&format!("Failed to get response from Api::{api_str}"))
+            stypes::NativeError::channel(&format!("Failed to get response from Api::{api_str}"))
         })
     }
 
@@ -100,7 +95,7 @@ impl OperationTrackerAPI {
         tx_sde: Option<SdeSender>,
         canceler: CancellationToken,
         done: CancellationToken,
-    ) -> Result<bool, NativeError> {
+    ) -> Result<bool, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(
             TrackerCommand::AddOperation((uuid, name, tx_sde, canceler, done, tx)),
@@ -109,53 +104,56 @@ impl OperationTrackerAPI {
         .await
     }
 
-    pub async fn remove_operation(&self, uuid: Uuid) -> Result<bool, NativeError> {
+    pub async fn remove_operation(&self, uuid: Uuid) -> Result<bool, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(TrackerCommand::RemoveOperation((uuid, tx)), rx)
             .await
     }
 
-    pub async fn cancel_operation(&self, uuid: Uuid) -> Result<bool, NativeError> {
+    pub async fn cancel_operation(&self, uuid: Uuid) -> Result<bool, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(TrackerCommand::CancelOperation((uuid, tx)), rx)
             .await
     }
 
-    pub async fn cancel_all(&self) -> Result<(), NativeError> {
+    pub async fn cancel_all(&self) -> Result<(), stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(TrackerCommand::CancelAll(tx), rx).await
     }
 
-    pub async fn set_debug(&self, debug: bool) -> Result<(), NativeError> {
+    pub async fn set_debug(&self, debug: bool) -> Result<(), stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(TrackerCommand::SetDebugMode((debug, tx)), rx)
             .await?;
         Ok(())
     }
 
-    pub async fn get_operations_stat(&self) -> Result<String, NativeError> {
+    pub async fn get_operations_stat(&self) -> Result<String, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(TrackerCommand::GetOperationsStat(tx), rx)
             .await?
     }
 
-    pub async fn get_sde_sender(&self, uuid: Uuid) -> Result<Option<SdeSender>, NativeError> {
+    pub async fn get_sde_sender(
+        &self,
+        uuid: Uuid,
+    ) -> Result<Option<SdeSender>, stypes::NativeError> {
         let (tx, rx) = oneshot::channel();
         self.exec_operation(TrackerCommand::GetSdeSender((uuid, tx)), rx)
             .await
     }
 
-    pub fn shutdown(&self) -> Result<(), NativeError> {
+    pub fn shutdown(&self) -> Result<(), stypes::NativeError> {
         self.tx_api.send(TrackerCommand::Shutdown).map_err(|e| {
-            NativeError::channel(&format!("fail to send to Api::Shutdown; error: {e}",))
+            stypes::NativeError::channel(&format!("fail to send to Api::Shutdown; error: {e}",))
         })
     }
 
-    pub fn shutdown_with_error(&self) -> Result<(), NativeError> {
+    pub fn shutdown_with_error(&self) -> Result<(), stypes::NativeError> {
         self.tx_api
             .send(TrackerCommand::ShutdownWithError)
             .map_err(|e| {
-                NativeError::channel(&format!(
+                stypes::NativeError::channel(&format!(
                     "fail to send to Api::ShutdownWithError; error: {e}",
                 ))
             })
@@ -165,7 +163,7 @@ impl OperationTrackerAPI {
 pub async fn run(
     state: SessionStateAPI,
     mut rx_api: UnboundedReceiver<TrackerCommand>,
-) -> Result<(), NativeError> {
+) -> Result<(), stypes::NativeError> {
     let mut tracker = OperationTracker {
         operations: HashMap::new(),
         stat: vec![],
@@ -198,7 +196,7 @@ pub async fn run(
                     })
                     .is_err()
                 {
-                    return Err(NativeError::channel(
+                    return Err(stypes::NativeError::channel(
                         "fail to response to Api::AddOperation",
                     ));
                 } else {
@@ -225,7 +223,7 @@ pub async fn run(
                     .send(tracker.operations.remove(&uuid).is_some())
                     .is_err()
                 {
-                    return Err(NativeError::channel(
+                    return Err(stypes::NativeError::channel(
                         "fail to response to Api::RemoveOperation",
                     ));
                 }
@@ -260,7 +258,7 @@ pub async fn run(
                         },
                     )
                     .map_err(|_| {
-                        NativeError::channel("Failed to respond to Api::CancelOperation")
+                        stypes::NativeError::channel("Failed to respond to Api::CancelOperation")
                     })?;
             }
             TrackerCommand::CancelAll(tx_response) => {
@@ -287,7 +285,7 @@ pub async fn run(
                 }
                 tracker.operations.clear();
                 if tx_response.send(()).is_err() {
-                    return Err(NativeError::channel(
+                    return Err(stypes::NativeError::channel(
                         "fail to response to Api::CloseSession",
                     ));
                 }
@@ -295,7 +293,7 @@ pub async fn run(
             TrackerCommand::SetDebugMode((debug, tx_response)) => {
                 tracker.debug = debug;
                 if tx_response.send(()).is_err() {
-                    return Err(NativeError::channel(
+                    return Err(stypes::NativeError::channel(
                         "fail to response to Api::SetDebugMode",
                     ));
                 }
@@ -304,15 +302,15 @@ pub async fn run(
                 if tx_response
                     .send(match serde_json::to_string(&tracker.stat) {
                         Ok(serialized) => Ok(serialized),
-                        Err(err) => Err(NativeError {
-                            severity: Severity::ERROR,
-                            kind: NativeErrorKind::ComputationFailed,
+                        Err(err) => Err(stypes::NativeError {
+                            severity: stypes::Severity::ERROR,
+                            kind: stypes::NativeErrorKind::ComputationFailed,
                             message: Some(format!("{err}")),
                         }),
                     })
                     .is_err()
                 {
-                    return Err(NativeError::channel(
+                    return Err(stypes::NativeError::channel(
                         "fail to response to Api::GetOperationsStat",
                     ));
                 }
@@ -328,7 +326,7 @@ pub async fn run(
                     )
                     .is_err()
                 {
-                    return Err(NativeError::channel(
+                    return Err(stypes::NativeError::channel(
                         "fail to response to Api::GetSdeSender",
                     ));
                 }
@@ -339,9 +337,9 @@ pub async fn run(
             }
             TrackerCommand::ShutdownWithError => {
                 debug!("shutdown tracker loop with error for testing");
-                return Err(NativeError {
-                    severity: Severity::ERROR,
-                    kind: NativeErrorKind::Io,
+                return Err(stypes::NativeError {
+                    severity: stypes::Severity::ERROR,
+                    kind: stypes::NativeErrorKind::Io,
                     message: Some(String::from("Shutdown tracker loop with error for testing")),
                 });
             }
diff --git a/application/apps/indexer/session/src/unbound/api.rs b/application/apps/indexer/session/src/unbound/api.rs
index 46228d31e6..11c8f94e09 100644
--- a/application/apps/indexer/session/src/unbound/api.rs
+++ b/application/apps/indexer/session/src/unbound/api.rs
@@ -1,9 +1,8 @@
-use crate::events::ComputationError;
 use processor::search::filter::SearchFilter;
-use serde::Serialize;
+use serde::{de::DeserializeOwned, Serialize};
 use tokio::sync::{mpsc::UnboundedSender, oneshot};
 
-use super::commands::{Command, CommandOutcome};
+use super::commands::Command;
 
 #[derive(Debug)]
 pub enum API {
@@ -24,42 +23,42 @@ impl UnboundSessionAPI {
         Self { tx }
     }
 
-    pub async fn shutdown(&self) -> Result<(), ComputationError> {
+    pub async fn shutdown(&self) -> Result<(), stypes::ComputationError> {
         let (tx, rx): (oneshot::Sender<()>, oneshot::Receiver<()>) = oneshot::channel();
         self.tx.send(API::Shutdown(tx)).map_err(|_| {
-            ComputationError::Communication(String::from("Fail to send API::Shutdown"))
+            stypes::ComputationError::Communication(String::from("Fail to send API::Shutdown"))
         })?;
         rx.await.map_err(|e| {
-            ComputationError::Communication(format!(
+            stypes::ComputationError::Communication(format!(
                 "Fail to get response from API::Shutdown: {e:?}"
             ))
         })
     }
 
-    pub async fn cancel_job(&self, operation_id: &u64) -> Result<(), ComputationError> {
+    pub async fn cancel_job(&self, operation_id: &u64) -> Result<(), stypes::ComputationError> {
         self.tx.send(API::CancelJob(*operation_id)).map_err(|_| {
-            ComputationError::Communication(String::from("Fail to send API::CancelJob"))
+            stypes::ComputationError::Communication(String::from("Fail to send API::CancelJob"))
         })
     }
 
-    async fn process_command<T: Serialize>(
+    async fn process_command<T: Serialize + DeserializeOwned>(
         &self,
         id: u64,
-        rx_results: oneshot::Receiver<Result<CommandOutcome<T>, ComputationError>>,
+        rx_results: oneshot::Receiver<Result<stypes::CommandOutcome<T>, stypes::ComputationError>>,
         command: Command,
-    ) -> Result<CommandOutcome<T>, ComputationError> {
+    ) -> Result<stypes::CommandOutcome<T>, stypes::ComputationError> {
         let cmd = command.to_string();
-        self.tx
-            .send(API::Run(command, id))
-            .map_err(|_| ComputationError::Communication(format!("Fail to send call {cmd}")))?;
+        self.tx.send(API::Run(command, id)).map_err(|_| {
+            stypes::ComputationError::Communication(format!("Fail to send call {cmd}"))
+        })?;
         rx_results
             .await
-            .map_err(|e| ComputationError::Communication(format!("channel error: {e}")))?
+            .map_err(|e| stypes::ComputationError::Communication(format!("channel error: {e}")))?
     }
 
-    pub(crate) fn remove_command(&self, id: u64) -> Result<(), ComputationError> {
+    pub(crate) fn remove_command(&self, id: u64) -> Result<(), stypes::ComputationError> {
         self.tx.send(API::Remove(id)).map_err(|_| {
-            ComputationError::Communication(format!("Fail to remove command id={id}"))
+            stypes::ComputationError::Communication(format!("Fail to remove command id={id}"))
         })?;
         Ok(())
     }
@@ -69,7 +68,7 @@ impl UnboundSessionAPI {
         id: u64,
         custom_arg_a: i64,
         custom_arg_b: i64,
-    ) -> Result<CommandOutcome<i64>, ComputationError> {
+    ) -> Result<stypes::CommandOutcome<i64>, stypes::ComputationError> {
         let (tx_results, rx_results) = oneshot::channel();
         self.process_command(
             id,
@@ -87,7 +86,8 @@ impl UnboundSessionAPI {
         paths: Vec<String>,
         include_files: bool,
         include_folders: bool,
-    ) -> Result<CommandOutcome<String>, ComputationError> {
+    ) -> Result<stypes::CommandOutcome<stypes::FoldersScanningResult>, stypes::ComputationError>
+    {
         let (tx_results, rx_results) = oneshot::channel();
         self.process_command(
             id,
@@ -108,7 +108,7 @@ impl UnboundSessionAPI {
         &self,
         id: u64,
         file_path: String,
-    ) -> Result<CommandOutcome<bool>, ComputationError> {
+    ) -> Result<stypes::CommandOutcome<bool>, stypes::ComputationError> {
         let (tx_results, rx_results) = oneshot::channel();
         self.process_command(id, rx_results, Command::IsFileBinary(file_path, tx_results))
             .await
@@ -119,7 +119,7 @@ impl UnboundSessionAPI {
         id: u64,
         path: String,
         args: Vec<String>,
-    ) -> Result<CommandOutcome<()>, ComputationError> {
+    ) -> Result<stypes::CommandOutcome<()>, stypes::ComputationError> {
         let (tx_results, rx_results) = oneshot::channel();
         self.process_command(
             id,
@@ -133,7 +133,7 @@ impl UnboundSessionAPI {
         &self,
         id: u64,
         path: String,
-    ) -> Result<CommandOutcome<String>, ComputationError> {
+    ) -> Result<stypes::CommandOutcome<String>, stypes::ComputationError> {
         let (tx_results, rx_results) = oneshot::channel();
         self.process_command(id, rx_results, Command::Checksum(path, tx_results))
             .await
@@ -143,7 +143,7 @@ impl UnboundSessionAPI {
         &self,
         id: u64,
         files: Vec<String>,
-    ) -> Result<CommandOutcome<String>, ComputationError> {
+    ) -> Result<stypes::CommandOutcome<stypes::DltStatisticInfo>, stypes::ComputationError> {
         let (tx_results, rx_results) = oneshot::channel();
         self.process_command(id, rx_results, Command::GetDltStats(files, tx_results))
             .await
@@ -153,7 +153,7 @@ impl UnboundSessionAPI {
         &self,
         id: u64,
         files: Vec<String>,
-    ) -> Result<CommandOutcome<String>, ComputationError> {
+    ) -> Result<stypes::CommandOutcome<String>, stypes::ComputationError> {
         let (tx_results, rx_results) = oneshot::channel();
         self.process_command(
             id,
@@ -166,7 +166,7 @@ impl UnboundSessionAPI {
     pub async fn get_shell_profiles(
         &self,
         id: u64,
-    ) -> Result<CommandOutcome<String>, ComputationError> {
+    ) -> Result<stypes::CommandOutcome<stypes::ProfileList>, stypes::ComputationError> {
         let (tx_results, rx_results) = oneshot::channel();
         self.process_command(id, rx_results, Command::GetShellProfiles(tx_results))
             .await
@@ -175,7 +175,7 @@ impl UnboundSessionAPI {
     pub async fn get_context_envvars(
         &self,
         id: u64,
-    ) -> Result<CommandOutcome<String>, ComputationError> {
+    ) -> Result<stypes::CommandOutcome<stypes::MapKeyValue>, stypes::ComputationError> {
         let (tx_results, rx_results) = oneshot::channel();
         self.process_command(id, rx_results, Command::GetContextEnvvars(tx_results))
             .await
@@ -184,7 +184,7 @@ impl UnboundSessionAPI {
     pub async fn get_serial_ports_list(
         &self,
         id: u64,
-    ) -> Result<CommandOutcome<Vec<String>>, ComputationError> {
+    ) -> Result<stypes::CommandOutcome<stypes::SerialPortsList>, stypes::ComputationError> {
         let (tx_results, rx_results) = oneshot::channel();
         self.process_command(id, rx_results, Command::SerialPortsList(tx_results))
             .await
@@ -194,13 +194,17 @@ impl UnboundSessionAPI {
         &self,
         id: u64,
         filter: SearchFilter,
-    ) -> Result<CommandOutcome<Option<String>>, ComputationError> {
+    ) -> Result<stypes::CommandOutcome<Option<String>>, stypes::ComputationError> {
         let (tx_results, rx_results) = oneshot::channel();
         self.process_command(id, rx_results, Command::GetRegexError(filter, tx_results))
             .await
     }
 
-    pub async fn sleep(&self, id: u64, ms: u64) -> Result<CommandOutcome<()>, ComputationError> {
+    pub async fn sleep(
+        &self,
+        id: u64,
+        ms: u64,
+    ) -> Result<stypes::CommandOutcome<()>, stypes::ComputationError> {
         let (tx_results, rx_results) = oneshot::channel();
         self.process_command(id, rx_results, Command::Sleep(ms, tx_results))
             .await
diff --git a/application/apps/indexer/session/src/unbound/cleanup.rs b/application/apps/indexer/session/src/unbound/cleanup.rs
index e6c39ab100..9744a84e0a 100644
--- a/application/apps/indexer/session/src/unbound/cleanup.rs
+++ b/application/apps/indexer/session/src/unbound/cleanup.rs
@@ -4,23 +4,19 @@ use std::{
     time::{Duration, SystemTime},
 };
 
-use crate::{
-    events::{NativeError, NativeErrorKind},
-    paths::get_streams_dir,
-    progress::Severity,
-};
+use crate::paths::get_streams_dir;
 
 /// Iterates through chipmunk temporary directory and remove the entries which is older
 /// than two months.
-pub fn cleanup_temp_dir() -> Result<(), NativeError> {
+pub fn cleanup_temp_dir() -> Result<(), stypes::NativeError> {
     let tmp_dir = get_streams_dir()?;
 
     const TWO_MONTHS_SECONDS: u64 = 60 * 60 * 24 * 60;
     let modified_limit = SystemTime::now()
         .checked_sub(Duration::from_secs(TWO_MONTHS_SECONDS))
-        .ok_or_else(|| NativeError {
-            severity: Severity::ERROR,
-            kind: NativeErrorKind::Io,
+        .ok_or_else(|| stypes::NativeError {
+            severity: stypes::Severity::ERROR,
+            kind: stypes::NativeErrorKind::Io,
             message: Some(String::from(
                 "Error while calculating modification time limit",
             )),
@@ -88,13 +84,13 @@ mod tests {
         let past = SystemTime::now()
             .checked_sub(Duration::from_secs(3600))
             .unwrap();
-        cleanup_dir(&temp_path, past).unwrap();
+        cleanup_dir(temp_path, past).unwrap();
         for entry in &entries {
             assert!(entry.exists());
         }
 
         // Cleaning up with now must remove all files and directories.
-        cleanup_dir(&temp_path, SystemTime::now()).unwrap();
+        cleanup_dir(temp_path, SystemTime::now()).unwrap();
 
         // Temp directory itself shouldn't be removed.
         assert!(temp_path.exists());
diff --git a/application/apps/indexer/session/src/unbound/commands/cancel_test.rs b/application/apps/indexer/session/src/unbound/commands/cancel_test.rs
index 592570bb38..cb5186426a 100644
--- a/application/apps/indexer/session/src/unbound/commands/cancel_test.rs
+++ b/application/apps/indexer/session/src/unbound/commands/cancel_test.rs
@@ -1,7 +1,4 @@
-use crate::{
-    events::ComputationError,
-    unbound::{commands::CommandOutcome, signal::Signal},
-};
+use crate::unbound::signal::Signal;
 use tokio::{
     select,
     time::{sleep, Duration},
@@ -11,13 +8,13 @@ pub async fn cancel_test(
     custom_arg_a: i64,
     custom_arg_b: i64,
     signal: Signal,
-) -> Result<CommandOutcome<i64>, ComputationError> {
+) -> Result<stypes::CommandOutcome<i64>, stypes::ComputationError> {
     Ok(select! {
         _ = signal.cancelled() => {
-            CommandOutcome::Cancelled
+            stypes::CommandOutcome::Cancelled
         }
         _ = sleep(Duration::from_millis(500)) => {
-            CommandOutcome::Finished(custom_arg_a + custom_arg_b)
+            stypes::CommandOutcome::Finished(custom_arg_a + custom_arg_b)
         }
     })
 }
diff --git a/application/apps/indexer/session/src/unbound/commands/checksum.rs b/application/apps/indexer/session/src/unbound/commands/checksum.rs
index 3273fdfd36..a5d6a495fb 100644
--- a/application/apps/indexer/session/src/unbound/commands/checksum.rs
+++ b/application/apps/indexer/session/src/unbound/commands/checksum.rs
@@ -1,5 +1,4 @@
-use super::CommandOutcome;
-use crate::{events::ComputationError, unbound::signal::Signal};
+use crate::unbound::signal::Signal;
 use blake3;
 use std::{
     fs::File,
@@ -9,9 +8,9 @@ use std::{
 pub fn checksum(
     filename: &str,
     _signal: Signal,
-) -> Result<CommandOutcome<String>, ComputationError> {
+) -> Result<stypes::CommandOutcome<String>, stypes::ComputationError> {
     let mut file =
-        File::open(filename).map_err(|e| ComputationError::IoOperation(e.to_string()))?;
+        File::open(filename).map_err(|e| stypes::ComputationError::IoOperation(e.to_string()))?;
     let mut hasher = blake3::Hasher::new();
     let mut buffer = [0; 65536];
     loop {
@@ -21,8 +20,10 @@ pub fn checksum(
                 hasher.update(&buffer[..n]);
             }
             Err(ref e) if e.kind() == io::ErrorKind::Interrupted => continue,
-            Err(e) => return Err(ComputationError::IoOperation(e.to_string())),
+            Err(e) => return Err(stypes::ComputationError::IoOperation(e.to_string())),
         }
     }
-    Ok(CommandOutcome::Finished(hasher.finalize().to_string()))
+    Ok(stypes::CommandOutcome::Finished(
+        hasher.finalize().to_string(),
+    ))
 }
diff --git a/application/apps/indexer/session/src/unbound/commands/dlt.rs b/application/apps/indexer/session/src/unbound/commands/dlt.rs
index 7217fa4745..75a34d2ff7 100644
--- a/application/apps/indexer/session/src/unbound/commands/dlt.rs
+++ b/application/apps/indexer/session/src/unbound/commands/dlt.rs
@@ -1,12 +1,11 @@
-use super::CommandOutcome;
-use crate::{events::ComputationError, unbound::signal::Signal};
+use crate::unbound::signal::Signal;
 use dlt_core::statistics::{collect_dlt_stats, StatisticInfo};
 use std::path::Path;
 
 pub fn stats(
     files: Vec<String>,
     _signal: Signal,
-) -> Result<CommandOutcome<String>, ComputationError> {
+) -> Result<stypes::CommandOutcome<stypes::DltStatisticInfo>, stypes::ComputationError> {
     let mut stat = StatisticInfo::new();
     let mut error: Option<String> = None;
     files.iter().for_each(|file| {
@@ -23,9 +22,7 @@ pub fn stats(
         }
     });
     if let Some(err) = error {
-        return Err(ComputationError::IoOperation(err));
+        return Err(stypes::ComputationError::IoOperation(err));
     }
-    Ok(CommandOutcome::Finished(
-        serde_json::to_string(&stat).map_err(|e| ComputationError::IoOperation(e.to_string()))?,
-    ))
+    Ok(stypes::CommandOutcome::Finished(stat.into()))
 }
diff --git a/application/apps/indexer/session/src/unbound/commands/file.rs b/application/apps/indexer/session/src/unbound/commands/file.rs
index 503f886367..dddc205421 100644
--- a/application/apps/indexer/session/src/unbound/commands/file.rs
+++ b/application/apps/indexer/session/src/unbound/commands/file.rs
@@ -1,9 +1,9 @@
-use super::{CommandOutcome, CommandOutcome::Finished};
-use crate::events::{ComputationError, ComputationError::OperationNotSupported};
 use file_tools::is_binary;
 
-pub fn is_file_binary(file_path: String) -> Result<CommandOutcome<bool>, ComputationError> {
+pub fn is_file_binary(
+    file_path: String,
+) -> Result<stypes::CommandOutcome<bool>, stypes::ComputationError> {
     is_binary(file_path)
-        .map(Finished)
-        .map_err(|err| OperationNotSupported(err.to_string()))
+        .map(stypes::CommandOutcome::Finished)
+        .map_err(|err| stypes::ComputationError::OperationNotSupported(err.to_string()))
 }
diff --git a/application/apps/indexer/session/src/unbound/commands/folder.rs b/application/apps/indexer/session/src/unbound/commands/folder.rs
index da6cdab732..27341e86f6 100644
--- a/application/apps/indexer/session/src/unbound/commands/folder.rs
+++ b/application/apps/indexer/session/src/unbound/commands/folder.rs
@@ -1,103 +1,6 @@
-use std::{ffi::OsStr, fs::Metadata};
+use crate::unbound::signal::Signal;
 use walkdir::{DirEntry, WalkDir};
 
-use serde::{Deserialize, Serialize};
-
-use crate::{events::ComputationError, unbound::signal::Signal};
-
-use super::CommandOutcome;
-
-#[allow(clippy::upper_case_acronyms)]
-#[derive(Serialize, Deserialize)]
-enum EntityType {
-    BlockDevice = 0,
-    CharacterDevice = 1,
-    Directory = 2,
-    FIFO = 3,
-    File = 4,
-    Socket = 5,
-    SymbolicLink = 6,
-}
-
-#[derive(Serialize, Deserialize)]
-struct EntityDetails {
-    filename: String,
-    full: String,
-    path: String,
-    basename: String,
-    ext: String,
-}
-
-impl EntityDetails {
-    pub fn from(entity: &DirEntry) -> Option<EntityDetails> {
-        entity.path().parent().map(|parent| EntityDetails {
-            full: entity.path().to_string_lossy().to_string(),
-            filename: entity.file_name().to_string_lossy().to_string(),
-            path: parent.to_string_lossy().to_string(),
-            basename: entity.file_name().to_string_lossy().to_string(),
-            ext: entity
-                .path()
-                .extension()
-                .unwrap_or(OsStr::new(""))
-                .to_string_lossy()
-                .to_string(),
-        })
-    }
-}
-
-#[derive(Serialize, Deserialize)]
-struct ScanningResult {
-    pub list: Vec<Entity>,
-    pub max_len_reached: bool,
-}
-
-#[derive(Serialize, Deserialize)]
-struct Entity {
-    name: String,
-    fullname: String,
-    kind: EntityType,
-    details: Option<EntityDetails>,
-}
-
-impl Entity {
-    pub fn from(entity: &DirEntry, md: &Metadata) -> Option<Entity> {
-        if md.is_dir() {
-            Entity::dir(entity)
-        } else if md.is_symlink() {
-            Entity::symlink(entity)
-        } else {
-            Entity::file(entity)
-        }
-    }
-
-    fn dir(entity: &DirEntry) -> Option<Entity> {
-        entity.path().file_name().map(|filename| Entity {
-            name: filename.to_string_lossy().to_string(),
-            fullname: entity.path().to_string_lossy().to_string(),
-            kind: EntityType::Directory,
-            details: None,
-        })
-    }
-
-    fn file(entity: &DirEntry) -> Option<Entity> {
-        entity.path().file_name().map(|filename| Entity {
-            name: filename.to_string_lossy().to_string(),
-            fullname: entity.path().to_string_lossy().to_string(),
-            kind: EntityType::File,
-            details: EntityDetails::from(entity),
-        })
-    }
-
-    fn symlink(entity: &DirEntry) -> Option<Entity> {
-        entity.path().file_name().map(|filename| Entity {
-            name: filename.to_string_lossy().to_string(),
-            fullname: entity.path().to_string_lossy().to_string(),
-            kind: EntityType::SymbolicLink,
-            details: EntityDetails::from(entity),
-        })
-    }
-}
-
 /// Find all files and/or folders in a folder
 /// We first consider all elements on the same level before
 /// descending into the next level. Kind of what you would get with BFS but
@@ -116,8 +19,8 @@ pub fn get_folder_content(
     include_files: bool,
     include_folders: bool,
     signal: Signal,
-) -> Result<CommandOutcome<String>, ComputationError> {
-    let mut list: Vec<Entity> = vec![];
+) -> Result<stypes::CommandOutcome<stypes::FoldersScanningResult>, stypes::ComputationError> {
+    let mut list: Vec<stypes::FolderEntity> = vec![];
     let mut max_len_reached: bool = false;
     for depth in 1..=max_depth {
         if max_len_reached {
@@ -135,10 +38,10 @@ pub fn get_folder_content(
                 .filter(|e| check_file_or_folder(e, include_files, include_folders))
             {
                 if signal.is_cancelling() {
-                    return Ok(CommandOutcome::Cancelled);
+                    return Ok(stypes::CommandOutcome::Cancelled);
                 }
                 if let Some(entity) = if let Ok(md) = dir_entry.metadata() {
-                    Entity::from(&dir_entry, &md)
+                    stypes::FolderEntity::from(&dir_entry, &md)
                 } else {
                     None
                 } {
@@ -151,15 +54,12 @@ pub fn get_folder_content(
             }
         }
     }
-    let results = ScanningResult {
-        list,
-        max_len_reached,
-    };
-    serde_json::to_string(&results)
-        .map(CommandOutcome::Finished)
-        .map_err(|e| -> ComputationError {
-            ComputationError::Process(format!("Could not produce json: {e}"))
-        })
+    Ok(stypes::CommandOutcome::Finished(
+        stypes::FoldersScanningResult {
+            list,
+            max_len_reached,
+        },
+    ))
 }
 
 fn check_file_or_folder(e: &DirEntry, include_files: bool, include_folders: bool) -> bool {
diff --git a/application/apps/indexer/session/src/unbound/commands/mod.rs b/application/apps/indexer/session/src/unbound/commands/mod.rs
index 5e0f7d92e0..278c33a2a2 100644
--- a/application/apps/indexer/session/src/unbound/commands/mod.rs
+++ b/application/apps/indexer/session/src/unbound/commands/mod.rs
@@ -10,43 +10,19 @@ mod shells;
 mod sleep;
 mod someip;
 
-use crate::{events::ComputationError, unbound::commands::someip::get_someip_statistic};
+use crate::unbound::commands::someip::get_someip_statistic;
 
+use super::signal::Signal;
 use log::{debug, error};
 use processor::search::filter::SearchFilter;
-use serde::{Deserialize, Serialize};
 use tokio::sync::oneshot;
-use uuid::Uuid;
-
-use super::signal::Signal;
-
-#[derive(Debug, Serialize, Deserialize)]
-pub enum CommandOutcome<T> {
-    Finished(T),
-    Cancelled,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-pub enum UuidCommandOutcome<T: Serialize> {
-    Finished((Uuid, T)),
-    Cancelled(Uuid),
-}
-
-impl<T: Serialize> CommandOutcome<T> {
-    pub fn as_command_result(self, uuid: Uuid) -> UuidCommandOutcome<T> {
-        match self {
-            CommandOutcome::Cancelled => UuidCommandOutcome::Cancelled(uuid),
-            CommandOutcome::Finished(c) => UuidCommandOutcome::Finished((uuid, c)),
-        }
-    }
-}
 
 #[derive(Debug)]
 pub enum Command {
     // This command is used only for testing/debug goals
     Sleep(
         u64,
-        oneshot::Sender<Result<CommandOutcome<()>, ComputationError>>,
+        oneshot::Sender<Result<stypes::CommandOutcome<()>, stypes::ComputationError>>,
     ),
     FolderContent(
         Vec<String>,
@@ -54,40 +30,56 @@ pub enum Command {
         usize,
         bool,
         bool,
-        oneshot::Sender<Result<CommandOutcome<String>, ComputationError>>,
+        oneshot::Sender<
+            Result<stypes::CommandOutcome<stypes::FoldersScanningResult>, stypes::ComputationError>,
+        >,
     ),
     SpawnProcess(
         String,
         Vec<String>,
-        oneshot::Sender<Result<CommandOutcome<()>, ComputationError>>,
+        oneshot::Sender<Result<stypes::CommandOutcome<()>, stypes::ComputationError>>,
     ),
     GetRegexError(
         SearchFilter,
-        oneshot::Sender<Result<CommandOutcome<Option<String>>, ComputationError>>,
+        oneshot::Sender<Result<stypes::CommandOutcome<Option<String>>, stypes::ComputationError>>,
     ),
     Checksum(
         String,
-        oneshot::Sender<Result<CommandOutcome<String>, ComputationError>>,
+        oneshot::Sender<Result<stypes::CommandOutcome<String>, stypes::ComputationError>>,
     ),
     GetDltStats(
         Vec<String>,
-        oneshot::Sender<Result<CommandOutcome<String>, ComputationError>>,
+        oneshot::Sender<
+            Result<stypes::CommandOutcome<stypes::DltStatisticInfo>, stypes::ComputationError>,
+        >,
     ),
     GetSomeipStatistic(
         Vec<String>,
-        oneshot::Sender<Result<CommandOutcome<String>, ComputationError>>,
+        oneshot::Sender<Result<stypes::CommandOutcome<String>, stypes::ComputationError>>,
+    ),
+    GetShellProfiles(
+        oneshot::Sender<
+            Result<stypes::CommandOutcome<stypes::ProfileList>, stypes::ComputationError>,
+        >,
+    ),
+    GetContextEnvvars(
+        oneshot::Sender<
+            Result<stypes::CommandOutcome<stypes::MapKeyValue>, stypes::ComputationError>,
+        >,
+    ),
+    SerialPortsList(
+        oneshot::Sender<
+            Result<stypes::CommandOutcome<stypes::SerialPortsList>, stypes::ComputationError>,
+        >,
     ),
-    GetShellProfiles(oneshot::Sender<Result<CommandOutcome<String>, ComputationError>>),
-    GetContextEnvvars(oneshot::Sender<Result<CommandOutcome<String>, ComputationError>>),
-    SerialPortsList(oneshot::Sender<Result<CommandOutcome<Vec<String>>, ComputationError>>),
     IsFileBinary(
         String,
-        oneshot::Sender<Result<CommandOutcome<bool>, ComputationError>>,
+        oneshot::Sender<Result<stypes::CommandOutcome<bool>, stypes::ComputationError>>,
     ),
     CancelTest(
         i64,
         i64,
-        oneshot::Sender<Result<CommandOutcome<i64>, ComputationError>>,
+        oneshot::Sender<Result<stypes::CommandOutcome<i64>, stypes::ComputationError>>,
     ),
 }
 
@@ -152,7 +144,7 @@ pub async fn process(command: Command, signal: Signal) {
     }
 }
 
-pub fn err(command: Command, err: ComputationError) {
+pub fn err(command: Command, err: stypes::ComputationError) {
     let cmd = command.to_string();
     if match command {
         Command::Sleep(_, tx) => tx.send(Err(err)).is_err(),
diff --git a/application/apps/indexer/session/src/unbound/commands/process.rs b/application/apps/indexer/session/src/unbound/commands/process.rs
index 9eda3658e0..3146ae607f 100644
--- a/application/apps/indexer/session/src/unbound/commands/process.rs
+++ b/application/apps/indexer/session/src/unbound/commands/process.rs
@@ -1,5 +1,4 @@
-use super::CommandOutcome;
-use crate::{events::ComputationError, unbound::signal::Signal};
+use crate::unbound::signal::Signal;
 #[cfg(target_os = "windows")]
 use std::os::windows::process::CommandExt;
 use std::{
@@ -30,10 +29,10 @@ pub fn execute(
     exe: String,
     args: Vec<String>,
     _signal: Signal,
-) -> Result<CommandOutcome<()>, ComputationError> {
-    Ok(CommandOutcome::Finished(
+) -> Result<stypes::CommandOutcome<()>, stypes::ComputationError> {
+    Ok(stypes::CommandOutcome::Finished(
         spawn(Path::new(&exe), args)
-            .map_err(ComputationError::IoOperation)
+            .map_err(stypes::ComputationError::IoOperation)
             .map(|_c| ())?,
     ))
 }
diff --git a/application/apps/indexer/session/src/unbound/commands/regex.rs b/application/apps/indexer/session/src/unbound/commands/regex.rs
index 3860ea9b66..53ea894ee7 100644
--- a/application/apps/indexer/session/src/unbound/commands/regex.rs
+++ b/application/apps/indexer/session/src/unbound/commands/regex.rs
@@ -1,10 +1,9 @@
-use super::CommandOutcome;
-use crate::{events::ComputationError, unbound::signal::Signal};
+use crate::unbound::signal::Signal;
 use processor::search::filter::{get_filter_error as validator, SearchFilter};
 
 pub fn get_filter_error(
     filter: SearchFilter,
     _signal: Signal,
-) -> Result<CommandOutcome<Option<String>>, ComputationError> {
-    Ok(CommandOutcome::Finished(validator(&filter)))
+) -> Result<stypes::CommandOutcome<Option<String>>, stypes::ComputationError> {
+    Ok(stypes::CommandOutcome::Finished(validator(&filter)))
 }
diff --git a/application/apps/indexer/session/src/unbound/commands/serial.rs b/application/apps/indexer/session/src/unbound/commands/serial.rs
index 8571ed1f90..ac55909d90 100644
--- a/application/apps/indexer/session/src/unbound/commands/serial.rs
+++ b/application/apps/indexer/session/src/unbound/commands/serial.rs
@@ -1,15 +1,16 @@
-use super::CommandOutcome;
-use crate::{events::ComputationError, unbound::signal::Signal};
+use crate::unbound::signal::Signal;
 
-pub fn available_ports(_signal: Signal) -> Result<CommandOutcome<Vec<String>>, ComputationError> {
+pub fn available_ports(
+    _signal: Signal,
+) -> Result<stypes::CommandOutcome<stypes::SerialPortsList>, stypes::ComputationError> {
     serialport::available_ports()
-        .map_err(|e| ComputationError::IoOperation(e.to_string()))
+        .map_err(|e| stypes::ComputationError::IoOperation(e.to_string()))
         .map(|ports| {
-            CommandOutcome::Finished(
+            stypes::CommandOutcome::Finished(stypes::SerialPortsList(
                 ports
                     .into_iter()
                     .map(|p| p.port_name)
                     .collect::<Vec<String>>(),
-            )
+            ))
         })
 }
diff --git a/application/apps/indexer/session/src/unbound/commands/shells.rs b/application/apps/indexer/session/src/unbound/commands/shells.rs
index fdf733427a..bb62e106ee 100644
--- a/application/apps/indexer/session/src/unbound/commands/shells.rs
+++ b/application/apps/indexer/session/src/unbound/commands/shells.rs
@@ -1,27 +1,25 @@
-use super::CommandOutcome;
-use crate::{events::ComputationError, unbound::signal::Signal};
+use crate::unbound::signal::Signal;
 use envvars;
-use serde_json;
 
-pub fn get_valid_profiles(_signal: Signal) -> Result<CommandOutcome<String>, ComputationError> {
-    let mut profiles =
-        envvars::get_profiles().map_err(|e| ComputationError::IoOperation(e.to_string()))?;
+pub fn get_valid_profiles(
+    _signal: Signal,
+) -> Result<stypes::CommandOutcome<stypes::ProfileList>, stypes::ComputationError> {
+    let mut profiles = envvars::get_profiles()
+        .map_err(|e| stypes::ComputationError::IoOperation(e.to_string()))?;
     for profile in &mut profiles {
         if let Err(e) = profile.load() {
             log::warn!("Fail to load envvars for \"{}\": {e}", profile.name);
         }
     }
-    Ok(CommandOutcome::Finished(
-        serde_json::to_string(&profiles)
-            .map_err(|e| ComputationError::IoOperation(e.to_string()))?,
-    ))
+    Ok(stypes::CommandOutcome::Finished(stypes::ProfileList(
+        profiles.into_iter().map(|p| p.into()).collect(),
+    )))
 }
 
-pub fn get_context_envvars(_signal: Signal) -> Result<CommandOutcome<String>, ComputationError> {
-    let envvars =
-        envvars::get_context_envvars().map_err(|e| ComputationError::IoOperation(e.to_string()))?;
-    Ok(CommandOutcome::Finished(
-        serde_json::to_string(&envvars)
-            .map_err(|e| ComputationError::IoOperation(e.to_string()))?,
-    ))
+pub fn get_context_envvars(
+    _signal: Signal,
+) -> Result<stypes::CommandOutcome<stypes::MapKeyValue>, stypes::ComputationError> {
+    let envvars = envvars::get_context_envvars()
+        .map_err(|e| stypes::ComputationError::IoOperation(e.to_string()))?;
+    Ok(stypes::CommandOutcome::Finished(envvars.into()))
 }
diff --git a/application/apps/indexer/session/src/unbound/commands/sleep.rs b/application/apps/indexer/session/src/unbound/commands/sleep.rs
index c629d6d217..1002648696 100644
--- a/application/apps/indexer/session/src/unbound/commands/sleep.rs
+++ b/application/apps/indexer/session/src/unbound/commands/sleep.rs
@@ -1,10 +1,12 @@
-use super::CommandOutcome;
-use crate::{events::ComputationError, unbound::signal::Signal};
+use crate::unbound::signal::Signal;
 use tokio::time;
 
 // This command is used for testing/debug goals only. It should ignore signal to ignore
 // cancellation.
-pub async fn sleep(ms: u64, _signal: Signal) -> Result<CommandOutcome<()>, ComputationError> {
+pub async fn sleep(
+    ms: u64,
+    _signal: Signal,
+) -> Result<stypes::CommandOutcome<()>, stypes::ComputationError> {
     let _ = time::sleep(time::Duration::from_millis(ms)).await;
-    Ok(CommandOutcome::Finished(()))
+    Ok(stypes::CommandOutcome::Finished(()))
 }
diff --git a/application/apps/indexer/session/src/unbound/commands/someip.rs b/application/apps/indexer/session/src/unbound/commands/someip.rs
index d98bb2fc48..bffc3f7c6c 100644
--- a/application/apps/indexer/session/src/unbound/commands/someip.rs
+++ b/application/apps/indexer/session/src/unbound/commands/someip.rs
@@ -1,11 +1,12 @@
-use super::CommandOutcome;
-use crate::{events::ComputationError, unbound::signal::Signal};
+use crate::unbound::signal::Signal;
 
 pub fn get_someip_statistic(
     _files: Vec<String>,
     _signal: Signal,
-) -> Result<CommandOutcome<String>, ComputationError> {
-    Err(ComputationError::OperationNotSupported("NYI".into()))
+) -> Result<stypes::CommandOutcome<String>, stypes::ComputationError> {
+    Err(stypes::ComputationError::OperationNotSupported(
+        "NYI".into(),
+    ))
     // use parsers::someip::{read_someip_statistic_from_pcapng, SomeipStatistic};
     // use log::{error, warn};
     // use std::path::Path;
@@ -31,14 +32,14 @@ pub fn get_someip_statistic(
     // });
     // if let Some(err) = error {
     //     error!("Fail to get statistic for: {files:?}");
-    //     return Err(ComputationError::IoOperation(err));
+    //     return Err(stypes::ComputationError::IoOperation(err));
     // }
     // if signal.is_cancelling() {
     //     warn!("Operation of geting statistic for: {files:?} has been cancelled");
-    //     return Ok(CommandOutcome::Cancelled);
+    //     return Ok(stypes::CommandOutcome::Cancelled);
     // }
-    // Ok(CommandOutcome::Finished(
+    // Ok(stypes::CommandOutcome::Finished(
     //     serde_json::to_string(&statistic)
-    //         .map_err(|e| ComputationError::IoOperation(e.to_string()))?,
+    //         .map_err(|e| stypes::ComputationError::IoOperation(e.to_string()))?,
     // ))
 }
diff --git a/application/apps/indexer/session/src/unbound/mod.rs b/application/apps/indexer/session/src/unbound/mod.rs
index 205ec7fb0c..5d06770e5b 100644
--- a/application/apps/indexer/session/src/unbound/mod.rs
+++ b/application/apps/indexer/session/src/unbound/mod.rs
@@ -4,7 +4,6 @@ pub mod commands;
 mod signal;
 
 use crate::{
-    events::ComputationError,
     progress::ProgressProviderAPI,
     unbound::{
         api::{UnboundSessionAPI, API},
@@ -43,9 +42,12 @@ impl UnboundSession {
         )
     }
 
-    pub async fn init(&mut self) -> Result<(), ComputationError> {
+    pub async fn init(&mut self) -> Result<(), stypes::ComputationError> {
         let finished = self.finished.clone();
-        let mut rx = self.rx.take().ok_or(ComputationError::SessionUnavailable)?; // Error: session already running
+        let mut rx = self
+            .rx
+            .take()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?; // Error: session already running
         let progress = ProgressProviderAPI::new()?;
         let session_api = self.session_api.clone();
         tokio::spawn(async move {
@@ -65,7 +67,7 @@ impl UnboundSession {
                         if jobs.contains_key(&id) {
                             commands::err(
                                 job,
-                                ComputationError::InvalidArgs(String::from(
+                                stypes::ComputationError::InvalidArgs(String::from(
                                     "Job has invalid id. Id already exists.",
                                 )),
                             );
diff --git a/application/apps/indexer/session/tests/snapshot_tests/mod.rs b/application/apps/indexer/session/tests/snapshot_tests/mod.rs
index 3489d3ee7e..f14b98fb08 100644
--- a/application/apps/indexer/session/tests/snapshot_tests/mod.rs
+++ b/application/apps/indexer/session/tests/snapshot_tests/mod.rs
@@ -18,18 +18,16 @@
 mod utls;
 
 use std::path::PathBuf;
-
-use sources::factory::{DltParserSettings, FileFormat, ParserType, SomeIpParserSettings};
 use utls::*;
 
 #[tokio::test]
 async fn observe_dlt_session() {
     let input = "../../../developing/resources/attachments.dlt";
-    let parser_settings = DltParserSettings::default();
+    let parser_settings = stypes::DltParserSettings::default();
     let session_main_file = run_observe_session(
         input,
-        FileFormat::Binary,
-        ParserType::Dlt(parser_settings.clone()),
+        stypes::FileFormat::Binary,
+        stypes::ParserType::Dlt(parser_settings.clone()),
     )
     .await;
 
@@ -55,15 +53,15 @@ async fn observe_dlt_with_someip_session() {
         "Fibex file path doesn't exist. Path: {fibex_file}"
     );
 
-    let parser_settings = DltParserSettings {
+    let parser_settings = stypes::DltParserSettings {
         fibex_file_paths: Some(vec![String::from(fibex_file)]),
         ..Default::default()
     };
 
     let session_main_file = run_observe_session(
         input,
-        FileFormat::Binary,
-        ParserType::Dlt(parser_settings.clone()),
+        stypes::FileFormat::Binary,
+        stypes::ParserType::Dlt(parser_settings.clone()),
     )
     .await;
 
@@ -89,14 +87,14 @@ async fn observe_someip_bcapng_session() {
         "Fibex file path doesn't exist. Path: {fibex_file}"
     );
 
-    let parser_settings = SomeIpParserSettings {
+    let parser_settings = stypes::SomeIpParserSettings {
         fibex_file_paths: Some(vec![String::from(fibex_file)]),
     };
 
     let session_main_file = run_observe_session(
         input,
-        FileFormat::PcapNG,
-        ParserType::SomeIp(parser_settings.clone()),
+        stypes::FileFormat::PcapNG,
+        stypes::ParserType::SomeIp(parser_settings.clone()),
     )
     .await;
 
@@ -122,14 +120,14 @@ async fn observe_someip_legacy_session() {
         "Fibex file path doesn't exist. Path: {fibex_file}"
     );
 
-    let parser_settings = SomeIpParserSettings {
+    let parser_settings = stypes::SomeIpParserSettings {
         fibex_file_paths: Some(vec![String::from(fibex_file)]),
     };
 
     let session_main_file = run_observe_session(
         input,
-        FileFormat::PcapLegacy,
-        ParserType::SomeIp(parser_settings.clone()),
+        stypes::FileFormat::PcapLegacy,
+        stypes::ParserType::SomeIp(parser_settings.clone()),
     )
     .await;
 
diff --git a/application/apps/indexer/session/tests/snapshot_tests/utls.rs b/application/apps/indexer/session/tests/snapshot_tests/utls.rs
index 4978b77fa2..4e58501c72 100644
--- a/application/apps/indexer/session/tests/snapshot_tests/utls.rs
+++ b/application/apps/indexer/session/tests/snapshot_tests/utls.rs
@@ -1,6 +1,5 @@
 use serde::{Deserialize, Serialize};
-use session::{events::CallbackEvent, session::Session};
-use sources::factory::{FileFormat, ObserveOptions, ParserType};
+use session::session::Session;
 use std::path::{Path, PathBuf};
 use uuid::Uuid;
 
@@ -92,8 +91,8 @@ fn session_dir_form_file(session_file: &Path) -> PathBuf {
 /// This function it made for test purposes with snapshots.
 pub async fn run_observe_session<P: Into<PathBuf>>(
     input: P,
-    file_format: FileFormat,
-    parser_type: ParserType,
+    file_format: stypes::FileFormat,
+    parser_type: stypes::ParserType,
 ) -> PathBuf {
     let input: PathBuf = input.into();
 
@@ -109,15 +108,15 @@ pub async fn run_observe_session<P: Into<PathBuf>>(
     session
         .observe(
             uuid,
-            ObserveOptions::file(input.clone(), file_format, parser_type),
+            stypes::ObserveOptions::file(input.clone(), file_format, parser_type),
         )
         .unwrap();
 
     while let Some(feedback) = receiver.recv().await {
         match feedback {
-            CallbackEvent::FileRead | CallbackEvent::SessionDestroyed => break,
-            CallbackEvent::SessionError(err) => panic!("Received session error: {err:#?}"),
-            CallbackEvent::OperationError { error, .. } => {
+            stypes::CallbackEvent::FileRead | stypes::CallbackEvent::SessionDestroyed => break,
+            stypes::CallbackEvent::SessionError(err) => panic!("Received session error: {err:#?}"),
+            stypes::CallbackEvent::OperationError { error, .. } => {
                 panic!("Received operation error: {error:#?}")
             }
             _ => {}
diff --git a/application/apps/indexer/sources/Cargo.toml b/application/apps/indexer/sources/Cargo.toml
index 132738d07c..aeeb5b7b0b 100644
--- a/application/apps/indexer/sources/Cargo.toml
+++ b/application/apps/indexer/sources/Cargo.toml
@@ -24,6 +24,7 @@ uuid = { workspace = true , features = ["serde", "v4"] }
 regex.workspace = true
 lazy_static.workspace = true
 shellexpand = "3.1"
+stypes = { path = "../stypes", features=["rustcore"] }
 
 [dev-dependencies]
 env_logger.workspace = true
diff --git a/application/apps/indexer/sources/benches/dlt_producer.rs b/application/apps/indexer/sources/benches/dlt_producer.rs
index 6fc622d4bb..c284033a82 100644
--- a/application/apps/indexer/sources/benches/dlt_producer.rs
+++ b/application/apps/indexer/sources/benches/dlt_producer.rs
@@ -36,11 +36,9 @@ fn dlt_producer(c: &mut Criterion) {
                 || {
                     let parser = DltParser::new(None, fibex.as_ref(), None, None, true);
                     let source = create_binary_bytesource(data);
-                    let producer = MessageProducer::new(parser, source, black_box(None));
-
-                    producer
+                    MessageProducer::new(parser, source, black_box(None))
                 },
-                |p| run_producer(p),
+                run_producer,
                 BatchSize::SmallInput,
             )
     });
diff --git a/application/apps/indexer/sources/benches/someip_legacy_producer.rs b/application/apps/indexer/sources/benches/someip_legacy_producer.rs
index 5b5cb01f81..2014cd516c 100644
--- a/application/apps/indexer/sources/benches/someip_legacy_producer.rs
+++ b/application/apps/indexer/sources/benches/someip_legacy_producer.rs
@@ -32,11 +32,9 @@ fn someip_legacy_producer(c: &mut Criterion) {
                 || {
                     let parser = create_someip_parser(fibex_path.as_ref());
                     let source = PcapLegacyByteSource::new(Cursor::new(data)).unwrap();
-                    let producer = MessageProducer::new(parser, source, black_box(None));
-
-                    producer
+                    MessageProducer::new(parser, source, black_box(None))
                 },
-                |p| run_producer(p),
+                run_producer,
                 BatchSize::SmallInput,
             )
     });
diff --git a/application/apps/indexer/sources/benches/someip_producer.rs b/application/apps/indexer/sources/benches/someip_producer.rs
index dfe999279b..8b6e44803b 100644
--- a/application/apps/indexer/sources/benches/someip_producer.rs
+++ b/application/apps/indexer/sources/benches/someip_producer.rs
@@ -32,11 +32,9 @@ fn someip_producer(c: &mut Criterion) {
                 || {
                     let parser = create_someip_parser(fibex_path.as_ref());
                     let source = PcapngByteSource::new(Cursor::new(data)).unwrap();
-                    let producer = MessageProducer::new(parser, source, black_box(None));
-
-                    producer
+                    MessageProducer::new(parser, source, black_box(None))
                 },
-                |p| run_producer(p),
+                run_producer,
                 BatchSize::SmallInput,
             )
     });
diff --git a/application/apps/indexer/sources/benches/text_producer.rs b/application/apps/indexer/sources/benches/text_producer.rs
index d5ab7a0658..6288889613 100644
--- a/application/apps/indexer/sources/benches/text_producer.rs
+++ b/application/apps/indexer/sources/benches/text_producer.rs
@@ -20,11 +20,9 @@ fn text_producer(c: &mut Criterion) {
                 || {
                     let parser = StringTokenizer {};
                     let source = create_binary_bytesource(data);
-                    let producer = MessageProducer::new(parser, source, black_box(None));
-
-                    producer
+                    MessageProducer::new(parser, source, black_box(None))
                 },
-                |p| run_producer(p),
+                run_producer,
                 BatchSize::SmallInput,
             )
     });
diff --git a/application/apps/indexer/sources/src/command/process.rs b/application/apps/indexer/sources/src/command/process.rs
index 8758caf857..759b0e1818 100644
--- a/application/apps/indexer/sources/src/command/process.rs
+++ b/application/apps/indexer/sources/src/command/process.rs
@@ -1,4 +1,4 @@
-use crate::{sde, ByteSource, Error as SourceError, ReloadInfo, SourceFilter};
+use crate::{ByteSource, Error as SourceError, ReloadInfo, SourceFilter};
 use buf_redux::Buffer;
 use regex::{Captures, Regex};
 use shellexpand::tilde;
@@ -210,13 +210,16 @@ impl ByteSource for ProcessSource {
         self.len() == 0
     }
 
-    async fn income(&mut self, request: sde::SdeRequest) -> Result<sde::SdeResponse, SourceError> {
+    async fn income(
+        &mut self,
+        request: stypes::SdeRequest,
+    ) -> Result<stypes::SdeResponse, SourceError> {
         let bytes = match request {
-            sde::SdeRequest::WriteText(ref str) => str.as_bytes(),
-            sde::SdeRequest::WriteBytes(ref bytes) => bytes,
+            stypes::SdeRequest::WriteText(ref str) => str.as_bytes(),
+            stypes::SdeRequest::WriteBytes(ref bytes) => bytes,
         };
         self.stdin.write_all(bytes).await.map_err(SourceError::Io)?;
-        Ok(sde::SdeResponse { bytes: bytes.len() })
+        Ok(stypes::SdeResponse { bytes: bytes.len() })
     }
 }
 
diff --git a/application/apps/indexer/sources/src/factory.rs b/application/apps/indexer/sources/src/factory.rs
deleted file mode 100644
index dcecb7160a..0000000000
--- a/application/apps/indexer/sources/src/factory.rs
+++ /dev/null
@@ -1,136 +0,0 @@
-use indexer_base::config::MulticastInfo;
-use parsers::dlt;
-use serde::{Deserialize, Serialize};
-use std::{collections::HashMap, path::PathBuf};
-use uuid::Uuid;
-
-#[allow(clippy::large_enum_variant)]
-#[derive(Debug, Serialize, Deserialize, Clone)]
-pub enum ParserType {
-    Dlt(DltParserSettings),
-    SomeIp(SomeIpParserSettings),
-    Text,
-}
-
-#[derive(Debug, Serialize, Deserialize, Clone)]
-pub struct DltParserSettings {
-    pub filter_config: Option<dlt::DltFilterConfig>,
-    pub fibex_file_paths: Option<Vec<String>>,
-    pub with_storage_header: bool,
-    pub tz: Option<String>,
-    #[serde(skip)]
-    pub fibex_metadata: Option<dlt::FibexDltMetadata>,
-}
-
-impl Default for DltParserSettings {
-    fn default() -> Self {
-        Self {
-            filter_config: None,
-            fibex_file_paths: None,
-            with_storage_header: true,
-            tz: None,
-            fibex_metadata: None,
-        }
-    }
-}
-
-impl DltParserSettings {
-    pub fn new_including_storage_headers(
-        filter_config: Option<dlt::DltFilterConfig>,
-        fibex_file_paths: Option<Vec<String>>,
-    ) -> Self {
-        Self {
-            filter_config,
-            fibex_file_paths,
-            with_storage_header: true,
-            tz: None,
-            fibex_metadata: None,
-        }
-    }
-
-    pub fn load_fibex_metadata(&mut self) {
-        if self.fibex_metadata.is_some() {
-            return;
-        }
-        self.fibex_metadata = if let Some(paths) = self.fibex_file_paths.as_ref() {
-            dlt::gather_fibex_data(dlt::FibexConfig {
-                fibex_file_paths: paths.clone(),
-            })
-        } else {
-            None
-        };
-    }
-}
-
-#[derive(Debug, Serialize, Deserialize, Clone)]
-pub struct SomeIpParserSettings {
-    pub fibex_file_paths: Option<Vec<String>>,
-}
-
-#[derive(Debug, Serialize, Deserialize, Clone)]
-pub enum Transport {
-    Process(ProcessTransportConfig),
-    TCP(TCPTransportConfig),
-    UDP(UDPTransportConfig),
-    Serial(SerialTransportConfig),
-}
-
-#[derive(Debug, Serialize, Deserialize, Clone)]
-pub struct ProcessTransportConfig {
-    pub cwd: PathBuf,
-    pub command: String,
-    pub envs: HashMap<String, String>,
-}
-
-#[derive(Debug, Serialize, Deserialize, Clone)]
-pub struct SerialTransportConfig {
-    pub path: String,
-    pub baud_rate: u32,
-    pub data_bits: u8,
-    pub flow_control: u8,
-    pub parity: u8,
-    pub stop_bits: u8,
-    pub send_data_delay: u8,
-    pub exclusive: bool,
-}
-
-#[derive(Debug, Serialize, Deserialize, Clone)]
-pub struct TCPTransportConfig {
-    pub bind_addr: String,
-}
-
-#[derive(Clone, Debug, Serialize, Deserialize)]
-pub struct UDPTransportConfig {
-    pub bind_addr: String,
-    pub multicast: Vec<MulticastInfo>,
-}
-
-#[derive(Debug, Serialize, Deserialize, Clone)]
-pub enum FileFormat {
-    PcapNG,
-    PcapLegacy,
-    Text,
-    Binary,
-}
-
-#[derive(Debug, Serialize, Deserialize, Clone)]
-pub enum ObserveOrigin {
-    File(String, FileFormat, PathBuf),
-    Concat(Vec<(String, FileFormat, PathBuf)>),
-    Stream(String, Transport),
-}
-
-#[derive(Debug, Serialize, Deserialize, Clone)]
-pub struct ObserveOptions {
-    pub origin: ObserveOrigin,
-    pub parser: ParserType,
-}
-
-impl ObserveOptions {
-    pub fn file(filename: PathBuf, file_origin: FileFormat, parser: ParserType) -> Self {
-        ObserveOptions {
-            origin: ObserveOrigin::File(Uuid::new_v4().to_string(), file_origin, filename),
-            parser,
-        }
-    }
-}
diff --git a/application/apps/indexer/sources/src/lib.rs b/application/apps/indexer/sources/src/lib.rs
index f90cfca90d..efa791c426 100644
--- a/application/apps/indexer/sources/src/lib.rs
+++ b/application/apps/indexer/sources/src/lib.rs
@@ -14,7 +14,6 @@ mod tests;
 
 pub mod binary;
 pub mod command;
-pub mod factory;
 pub mod producer;
 pub mod sde;
 pub mod serial;
@@ -78,6 +77,16 @@ pub enum Error {
     NotSupported,
 }
 
+impl From<Error> for stypes::NativeError {
+    fn from(err: Error) -> Self {
+        stypes::NativeError {
+            severity: stypes::Severity::ERROR,
+            kind: stypes::NativeErrorKind::ComputationFailed,
+            message: Some(format!("Fail create source: {err}")),
+        }
+    }
+}
+
 pub(crate) const DEFAULT_READER_CAPACITY: usize = 10 * 1024 * 1024;
 pub(crate) const DEFAULT_MIN_BUFFER_SPACE: usize = 10 * 1024;
 
@@ -121,7 +130,7 @@ pub trait ByteSource: Send + Sync {
     ///
     /// # Note:
     ///
-    /// This function must be **Cancel-Safe** if for structs which support [`sde::SdeRequest`] by
+    /// This function must be **Cancel-Safe** if for structs which support [`stypes::SdeRequest`] by
     /// implementing the method [`ByteSource::income()`].
     async fn load(&mut self, filter: Option<&SourceFilter>) -> Result<Option<ReloadInfo>, Error>;
 
@@ -138,7 +147,7 @@ pub trait ByteSource: Send + Sync {
     ///
     /// The method [`ByteSource::reload()`] must be **Cancel-Safe** for structs that support this
     /// method
-    async fn income(&mut self, _msg: sde::SdeRequest) -> Result<sde::SdeResponse, Error> {
+    async fn income(&mut self, _msg: stypes::SdeRequest) -> Result<stypes::SdeResponse, Error> {
         Err(Error::NotSupported)
     }
 }
diff --git a/application/apps/indexer/sources/src/producer/tests/mock_byte_source.rs b/application/apps/indexer/sources/src/producer/tests/mock_byte_source.rs
index 48cc6a8739..f8754056f1 100644
--- a/application/apps/indexer/sources/src/producer/tests/mock_byte_source.rs
+++ b/application/apps/indexer/sources/src/producer/tests/mock_byte_source.rs
@@ -1,7 +1,6 @@
 use std::collections::VecDeque;
 use std::time::Duration;
 
-use crate::sde;
 use crate::ByteSource;
 use crate::Error;
 use crate::ReloadInfo;
@@ -101,13 +100,13 @@ impl ByteSource for MockByteSource {
         Ok(Some(reload_info))
     }
 
-    async fn income(&mut self, msg: sde::SdeRequest) -> Result<sde::SdeResponse, Error> {
+    async fn income(&mut self, msg: stypes::SdeRequest) -> Result<stypes::SdeResponse, Error> {
         // Read the input for now and return it's length
         let bytes = match &msg {
-            sde::SdeRequest::WriteText(text) => text.as_bytes(),
-            sde::SdeRequest::WriteBytes(bytes) => bytes,
+            stypes::SdeRequest::WriteText(text) => text.as_bytes(),
+            stypes::SdeRequest::WriteBytes(bytes) => bytes,
         };
-        Ok(sde::SdeResponse { bytes: bytes.len() })
+        Ok(stypes::SdeResponse { bytes: bytes.len() })
     }
 }
 
@@ -189,26 +188,26 @@ async fn test_mock_byte_source_income() {
 
     const BYTES_LEN: usize = 5;
 
-    let byte_msg = sde::SdeRequest::WriteBytes(vec![b'a'; BYTES_LEN]);
+    let byte_msg = stypes::SdeRequest::WriteBytes(vec![b'a'; BYTES_LEN]);
 
     let byte_income_res = source.income(byte_msg).await;
     // Byte income should succeed producing a response with the length of the provided bytes.
     assert!(matches!(
         byte_income_res,
-        Ok(sde::SdeResponse { bytes: BYTES_LEN })
+        Ok(stypes::SdeResponse { bytes: BYTES_LEN })
     ));
 
     // *** Text Tests ***
     const TEXT: &str = "income text";
     const TEXT_LEN: usize = TEXT.len();
 
-    let text_msg = sde::SdeRequest::WriteText(TEXT.into());
+    let text_msg = stypes::SdeRequest::WriteText(TEXT.into());
 
     let text_income_res = source.income(text_msg).await;
 
     // Text income should succeed producing a response wit the length of the provided text bytes.
     assert!(matches!(
         text_income_res,
-        Ok(sde::SdeResponse { bytes: TEXT_LEN })
+        Ok(stypes::SdeResponse { bytes: TEXT_LEN })
     ));
 }
diff --git a/application/apps/indexer/sources/src/producer/tests/multi_parse.rs b/application/apps/indexer/sources/src/producer/tests/multi_parse.rs
index 418b6c200c..8d43fc6508 100644
--- a/application/apps/indexer/sources/src/producer/tests/multi_parse.rs
+++ b/application/apps/indexer/sources/src/producer/tests/multi_parse.rs
@@ -10,7 +10,7 @@ use futures::{pin_mut, StreamExt};
 use parsers::{Error as ParseError, ParseYield};
 use tokio::sync::{mpsc::unbounded_channel, oneshot};
 
-use crate::{producer::MessageProducer, sde::SdeRequest};
+use crate::producer::MessageProducer;
 
 #[tokio::test]
 async fn parse_items_then_skip() {
@@ -407,7 +407,7 @@ async fn sde_communication() {
     const SDE_TEXT: &str = "sde_msg";
     tx_sde
         .send((
-            SdeRequest::WriteText(String::from(SDE_TEXT)),
+            stypes::SdeRequest::WriteText(String::from(SDE_TEXT)),
             tx_sde_response,
         ))
         .unwrap();
diff --git a/application/apps/indexer/sources/src/producer/tests/single_parse.rs b/application/apps/indexer/sources/src/producer/tests/single_parse.rs
index 470b050340..885252a870 100644
--- a/application/apps/indexer/sources/src/producer/tests/single_parse.rs
+++ b/application/apps/indexer/sources/src/producer/tests/single_parse.rs
@@ -10,7 +10,7 @@ use futures::{pin_mut, StreamExt};
 use parsers::{Error as ParseError, ParseYield};
 use tokio::sync::{mpsc::unbounded_channel, oneshot};
 
-use crate::{producer::MessageProducer, sde::SdeRequest, Error};
+use crate::{producer::MessageProducer, Error};
 
 #[tokio::test]
 async fn empty_byte_source() {
@@ -464,7 +464,7 @@ async fn sde_communication() {
     const SDE_TEXT: &str = "sde_msg";
     tx_sde
         .send((
-            SdeRequest::WriteText(String::from(SDE_TEXT)),
+            stypes::SdeRequest::WriteText(String::from(SDE_TEXT)),
             tx_sde_response,
         ))
         .unwrap();
diff --git a/application/apps/indexer/sources/src/sde.rs b/application/apps/indexer/sources/src/sde.rs
index f50c6d7a45..c582ff6d04 100644
--- a/application/apps/indexer/sources/src/sde.rs
+++ b/application/apps/indexer/sources/src/sde.rs
@@ -1,17 +1,8 @@
-use serde::{Deserialize, Serialize};
 use tokio::sync::oneshot;
 
 // SourceDataExchange - Sde
 // Channel allows to send message into ByteSource implementaion in run-time
-pub type SdeMsg = (SdeRequest, oneshot::Sender<Result<SdeResponse, String>>);
-
-#[derive(Deserialize, Serialize)]
-pub enum SdeRequest {
-    WriteText(String),
-    WriteBytes(Vec<u8>),
-}
-
-#[derive(Deserialize, Serialize)]
-pub struct SdeResponse {
-    pub bytes: usize,
-}
+pub type SdeMsg = (
+    stypes::SdeRequest,
+    oneshot::Sender<Result<stypes::SdeResponse, String>>,
+);
diff --git a/application/apps/indexer/sources/src/serial/serialport.rs b/application/apps/indexer/sources/src/serial/serialport.rs
index 6f23382eef..b5f9b8a20f 100644
--- a/application/apps/indexer/sources/src/serial/serialport.rs
+++ b/application/apps/indexer/sources/src/serial/serialport.rs
@@ -1,6 +1,4 @@
-use crate::{
-    factory::SerialTransportConfig, sde, ByteSource, Error as SourceError, ReloadInfo, SourceFilter,
-};
+use crate::{ByteSource, Error as SourceError, ReloadInfo, SourceFilter};
 use buf_redux::Buffer;
 use bytes::{BufMut, BytesMut};
 use futures::{
@@ -94,7 +92,7 @@ pub struct SerialSource {
 // }
 
 impl SerialSource {
-    pub fn new(config: &SerialTransportConfig) -> Result<Self, SourceError> {
+    pub fn new(config: &stypes::SerialTransportConfig) -> Result<Self, SourceError> {
         match tokio_serial::new(config.path.as_str(), config.baud_rate)
             .data_bits(data_bits(&config.data_bits))
             .flow_control(flow_control(&config.flow_control))
@@ -176,9 +174,12 @@ impl ByteSource for SerialSource {
         self.len() == 0
     }
 
-    async fn income(&mut self, request: sde::SdeRequest) -> Result<sde::SdeResponse, SourceError> {
+    async fn income(
+        &mut self,
+        request: stypes::SdeRequest,
+    ) -> Result<stypes::SdeResponse, SourceError> {
         Ok(match request {
-            sde::SdeRequest::WriteText(mut str) => {
+            stypes::SdeRequest::WriteText(mut str) => {
                 let len = str.len();
                 if self.send_data_delay == 0 {
                     self.write_stream
@@ -194,9 +195,9 @@ impl ByteSource for SerialSource {
                         sleep(Duration::from_millis(self.send_data_delay as u64)).await;
                     }
                 }
-                sde::SdeResponse { bytes: len }
+                stypes::SdeResponse { bytes: len }
             }
-            sde::SdeRequest::WriteBytes(mut bytes) => {
+            stypes::SdeRequest::WriteBytes(mut bytes) => {
                 let len = bytes.len();
                 if self.send_data_delay == 0 {
                     self.write_stream
@@ -212,7 +213,7 @@ impl ByteSource for SerialSource {
                         sleep(Duration::from_millis(self.send_data_delay as u64)).await;
                     }
                 }
-                sde::SdeResponse { bytes: len }
+                stypes::SdeResponse { bytes: len }
             }
         })
     }
diff --git a/application/apps/indexer/sources/src/socket/udp.rs b/application/apps/indexer/sources/src/socket/udp.rs
index ae559b4330..6e32000d5b 100644
--- a/application/apps/indexer/sources/src/socket/udp.rs
+++ b/application/apps/indexer/sources/src/socket/udp.rs
@@ -1,6 +1,5 @@
 use crate::{ByteSource, Error as SourceError, ReloadInfo, SourceFilter};
 use buf_redux::Buffer;
-use indexer_base::config::MulticastInfo;
 use log::trace;
 use std::net::{IpAddr, Ipv4Addr};
 use thiserror::Error;
@@ -17,7 +16,7 @@ pub enum UdpSourceError {
     #[error("Invalid number: {0}")]
     ParseNum(std::num::ParseIntError),
     #[error("Config: {0}")]
-    Config(indexer_base::config::Error),
+    Config(stypes::NetError),
 }
 
 pub struct UdpSource {
@@ -31,7 +30,7 @@ const MAX_DATAGRAM_SIZE: usize = 65_507;
 impl UdpSource {
     pub async fn new<A: ToSocketAddrs>(
         addr: A,
-        multicast: Vec<MulticastInfo>,
+        multicast: Vec<stypes::MulticastInfo>,
     ) -> Result<Self, UdpSourceError> {
         let socket = UdpSocket::bind(addr).await.map_err(UdpSourceError::Io)?;
         for multicast_info in &multicast {
diff --git a/application/apps/indexer/stypes/.gitignore b/application/apps/indexer/stypes/.gitignore
new file mode 100644
index 0000000000..ea8c4bf7f3
--- /dev/null
+++ b/application/apps/indexer/stypes/.gitignore
@@ -0,0 +1 @@
+/target
diff --git a/application/apps/indexer/stypes/Cargo.lock b/application/apps/indexer/stypes/Cargo.lock
new file mode 100644
index 0000000000..f0e5af2c42
--- /dev/null
+++ b/application/apps/indexer/stypes/Cargo.lock
@@ -0,0 +1,2075 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "addr2line"
+version = "0.22.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678"
+dependencies = [
+ "gimli",
+]
+
+[[package]]
+name = "adler"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
+
+[[package]]
+name = "aho-corasick"
+version = "1.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "android-tzdata"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0"
+
+[[package]]
+name = "android_system_properties"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "anyhow"
+version = "1.0.93"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775"
+
+[[package]]
+name = "arrayref"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb"
+
+[[package]]
+name = "arrayvec"
+version = "0.7.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
+
+[[package]]
+name = "async-stream"
+version = "0.3.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476"
+dependencies = [
+ "async-stream-impl",
+ "futures-core",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "async-stream-impl"
+version = "0.3.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "autocfg"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
+
+[[package]]
+name = "backtrace"
+version = "0.3.73"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a"
+dependencies = [
+ "addr2line",
+ "cc",
+ "cfg-if",
+ "libc",
+ "miniz_oxide",
+ "object",
+ "rustc-demangle",
+]
+
+[[package]]
+name = "bincode"
+version = "1.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "bitflags"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+
+[[package]]
+name = "bitflags"
+version = "2.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
+
+[[package]]
+name = "blake3"
+version = "1.5.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b8ee0c1824c4dea5b5f81736aff91bae041d2c07ee1192bec91054e10e3e601e"
+dependencies = [
+ "arrayref",
+ "arrayvec",
+ "cc",
+ "cfg-if",
+ "constant_time_eq",
+]
+
+[[package]]
+name = "bstr"
+version = "1.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1a68f1f47cdf0ec8ee4b941b2eee2a80cb796db73118c0dd09ac63fbe405be22"
+dependencies = [
+ "memchr",
+ "regex-automata",
+ "serde",
+]
+
+[[package]]
+name = "buf_redux"
+version = "0.8.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b953a6887648bb07a535631f2bc00fbdb2a2216f135552cb3f534ed136b9c07f"
+dependencies = [
+ "memchr",
+ "safemem",
+ "slice-deque",
+]
+
+[[package]]
+name = "buf_redux"
+version = "0.8.4"
+source = "git+https://github.com/DmitryAstafyev/buf_redux.git#595d13446d3d90eb4834a3cee67c0f79e28f01d8"
+dependencies = [
+ "memchr",
+ "safemem",
+ "slice-deque",
+]
+
+[[package]]
+name = "bumpalo"
+version = "3.16.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
+
+[[package]]
+name = "bytecount"
+version = "0.6.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5ce89b21cab1437276d2650d57e971f9d548a2d9037cc231abdc0562b97498ce"
+
+[[package]]
+name = "byteorder"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
+
+[[package]]
+name = "bytes"
+version = "1.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da"
+
+[[package]]
+name = "cc"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fd9de9f2205d5ef3fd67e685b0df337994ddd4495e2a28d185500d0e1edfea47"
+dependencies = [
+ "shlex",
+]
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "chrono"
+version = "0.4.38"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401"
+dependencies = [
+ "android-tzdata",
+ "iana-time-zone",
+ "js-sys",
+ "num-traits",
+ "wasm-bindgen",
+ "windows-targets 0.52.6",
+]
+
+[[package]]
+name = "chrono-tz"
+version = "0.8.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d59ae0466b83e838b81a54256c39d5d7c20b9d7daa10510a242d9b75abd5936e"
+dependencies = [
+ "chrono",
+ "chrono-tz-build",
+ "phf",
+]
+
+[[package]]
+name = "chrono-tz-build"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "433e39f13c9a060046954e0592a8d0a4bcb1040125cbf91cb8ee58964cfb350f"
+dependencies = [
+ "parse-zoneinfo",
+ "phf",
+ "phf_codegen",
+]
+
+[[package]]
+name = "circular"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b0fc239e0f6cb375d2402d48afb92f76f5404fd1df208a41930ec81eda078bea"
+
+[[package]]
+name = "constant_time_eq"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6"
+
+[[package]]
+name = "convert_case"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
+
+[[package]]
+name = "convert_case"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca"
+dependencies = [
+ "unicode-segmentation",
+]
+
+[[package]]
+name = "core-foundation"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b55271e5c8c478ad3f38ad24ef34923091e0548492a266d19b3c0b4d82574c63"
+dependencies = [
+ "core-foundation-sys",
+ "libc",
+]
+
+[[package]]
+name = "core-foundation-sys"
+version = "0.8.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
+
+[[package]]
+name = "crossbeam-channel"
+version = "0.5.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2"
+dependencies = [
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-utils"
+version = "0.8.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80"
+
+[[package]]
+name = "darling"
+version = "0.20.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989"
+dependencies = [
+ "darling_core",
+ "darling_macro",
+]
+
+[[package]]
+name = "darling_core"
+version = "0.20.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5"
+dependencies = [
+ "fnv",
+ "ident_case",
+ "proc-macro2",
+ "quote",
+ "strsim",
+ "syn",
+]
+
+[[package]]
+name = "darling_macro"
+version = "0.20.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806"
+dependencies = [
+ "darling_core",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "derive_builder"
+version = "0.20.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "507dfb09ea8b7fa618fcf76e953f4f5e192547945816d5358edffe39f6f94947"
+dependencies = [
+ "derive_builder_macro",
+]
+
+[[package]]
+name = "derive_builder_core"
+version = "0.20.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2d5bcf7b024d6835cfb3d473887cd966994907effbe9227e8c8219824d06c4e8"
+dependencies = [
+ "darling",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "derive_builder_macro"
+version = "0.20.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c"
+dependencies = [
+ "derive_builder_core",
+ "syn",
+]
+
+[[package]]
+name = "derive_more"
+version = "0.99.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce"
+dependencies = [
+ "convert_case 0.4.0",
+ "proc-macro2",
+ "quote",
+ "rustc_version",
+ "syn",
+]
+
+[[package]]
+name = "dirs"
+version = "5.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225"
+dependencies = [
+ "dirs-sys",
+]
+
+[[package]]
+name = "dirs-sys"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c"
+dependencies = [
+ "libc",
+ "option-ext",
+ "redox_users",
+ "windows-sys 0.48.0",
+]
+
+[[package]]
+name = "dlt-core"
+version = "0.17.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fa52d43b97a134644192c66296e5d3e7ed8b3d409b117c62203047bb42c6b9f1"
+dependencies = [
+ "buf_redux 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "byteorder",
+ "bytes",
+ "derive_more",
+ "lazy_static",
+ "log",
+ "memchr",
+ "nom",
+ "quick-xml 0.29.0",
+ "rustc-hash",
+ "serde",
+ "serde_json",
+ "thiserror",
+]
+
+[[package]]
+name = "either"
+version = "1.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
+
+[[package]]
+name = "encoding_rs"
+version = "0.8.35"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "encoding_rs_io"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1cc3c5651fb62ab8aa3103998dade57efdd028544bd300516baa31840c252a83"
+dependencies = [
+ "encoding_rs",
+]
+
+[[package]]
+name = "envvars"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f62cb1fd7910188b23784a60e0738f3e85925e863617d61d1d9c9d7c59d99289"
+dependencies = [
+ "blake3",
+ "fs_extra",
+ "home",
+ "is-terminal",
+ "lazy_static",
+ "log",
+ "serde",
+ "serde_json",
+ "thiserror",
+ "uuid",
+]
+
+[[package]]
+name = "equivalent"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
+
+[[package]]
+name = "etherparse"
+version = "0.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "827292ea592108849932ad8e30218f8b1f21c0dfd0696698a18b5d0aed62d990"
+dependencies = [
+ "arrayvec",
+]
+
+[[package]]
+name = "extend"
+version = "0.1.0"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "file-tools"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+]
+
+[[package]]
+name = "fnv"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
+
+[[package]]
+name = "fs_extra"
+version = "1.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c"
+
+[[package]]
+name = "futures"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876"
+dependencies = [
+ "futures-channel",
+ "futures-core",
+ "futures-executor",
+ "futures-io",
+ "futures-sink",
+ "futures-task",
+ "futures-util",
+]
+
+[[package]]
+name = "futures-channel"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10"
+dependencies = [
+ "futures-core",
+ "futures-sink",
+]
+
+[[package]]
+name = "futures-core"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
+
+[[package]]
+name = "futures-executor"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f"
+dependencies = [
+ "futures-core",
+ "futures-task",
+ "futures-util",
+]
+
+[[package]]
+name = "futures-io"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6"
+
+[[package]]
+name = "futures-macro"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "futures-sink"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7"
+
+[[package]]
+name = "futures-task"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988"
+
+[[package]]
+name = "futures-util"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
+dependencies = [
+ "futures-channel",
+ "futures-core",
+ "futures-io",
+ "futures-macro",
+ "futures-sink",
+ "futures-task",
+ "memchr",
+ "pin-project-lite",
+ "pin-utils",
+ "slab",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.2.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "wasi",
+]
+
+[[package]]
+name = "gimli"
+version = "0.29.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd"
+
+[[package]]
+name = "grep-matcher"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "47a3141a10a43acfedc7c98a60a834d7ba00dfe7bec9071cbfc19b55b292ac02"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "grep-regex"
+version = "0.1.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9edd147c7e3296e7a26bd3a81345ce849557d5a8e48ed88f736074e760f91f7e"
+dependencies = [
+ "bstr",
+ "grep-matcher",
+ "log",
+ "regex-automata",
+ "regex-syntax",
+]
+
+[[package]]
+name = "grep-searcher"
+version = "0.1.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b9b6c14b3fc2e0a107d6604d3231dec0509e691e62447104bc385a46a7892cda"
+dependencies = [
+ "bstr",
+ "encoding_rs",
+ "encoding_rs_io",
+ "grep-matcher",
+ "log",
+ "memchr",
+ "memmap2",
+]
+
+[[package]]
+name = "hashbrown"
+version = "0.14.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
+
+[[package]]
+name = "hermit-abi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024"
+
+[[package]]
+name = "hermit-abi"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc"
+
+[[package]]
+name = "home"
+version = "0.5.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5"
+dependencies = [
+ "windows-sys 0.52.0",
+]
+
+[[package]]
+name = "humantime"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
+
+[[package]]
+name = "iana-time-zone"
+version = "0.1.61"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220"
+dependencies = [
+ "android_system_properties",
+ "core-foundation-sys",
+ "iana-time-zone-haiku",
+ "js-sys",
+ "wasm-bindgen",
+ "windows-core",
+]
+
+[[package]]
+name = "iana-time-zone-haiku"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f"
+dependencies = [
+ "cc",
+]
+
+[[package]]
+name = "ident_case"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
+
+[[package]]
+name = "indexer_base"
+version = "0.1.0"
+dependencies = [
+ "log",
+ "serde",
+ "thiserror",
+]
+
+[[package]]
+name = "indexmap"
+version = "2.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26"
+dependencies = [
+ "equivalent",
+ "hashbrown",
+]
+
+[[package]]
+name = "io-kit-sys"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "617ee6cf8e3f66f3b4ea67a4058564628cde41901316e19f559e14c7c72c5e7b"
+dependencies = [
+ "core-foundation-sys",
+ "mach2",
+]
+
+[[package]]
+name = "is-terminal"
+version = "0.4.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "261f68e344040fbd0edea105bef17c66edf46f984ddb1115b775ce31be948f4b"
+dependencies = [
+ "hermit-abi 0.4.0",
+ "libc",
+ "windows-sys 0.52.0",
+]
+
+[[package]]
+name = "itertools"
+version = "0.10.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
+dependencies = [
+ "either",
+]
+
+[[package]]
+name = "itoa"
+version = "1.0.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "540654e97a3f4470a492cd30ff187bc95d89557a903a2bbf112e2fae98104ef2"
+
+[[package]]
+name = "js-sys"
+version = "0.3.72"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6a88f1bda2bd75b0452a14784937d796722fdebfe50df998aeb3f0b7603019a9"
+dependencies = [
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "lazy_static"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
+
+[[package]]
+name = "libc"
+version = "0.2.155"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c"
+
+[[package]]
+name = "libredox"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
+dependencies = [
+ "bitflags 2.6.0",
+ "libc",
+]
+
+[[package]]
+name = "libudev"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "78b324152da65df7bb95acfcaab55e3097ceaab02fb19b228a9eb74d55f135e0"
+dependencies = [
+ "libc",
+ "libudev-sys",
+]
+
+[[package]]
+name = "libudev-sys"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3c8469b4a23b962c1396b9b451dda50ef5b283e8dd309d69033475fa9b334324"
+dependencies = [
+ "libc",
+ "pkg-config",
+]
+
+[[package]]
+name = "lock_api"
+version = "0.4.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17"
+dependencies = [
+ "autocfg",
+ "scopeguard",
+]
+
+[[package]]
+name = "log"
+version = "0.4.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
+
+[[package]]
+name = "mach"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "86dd2487cdfea56def77b88438a2c915fb45113c5319bfe7e14306ca4cd0b0e1"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "mach2"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "19b955cdeb2a02b9117f121ce63aa52d08ade45de53e48fe6a38b39c10f6f709"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "memchr"
+version = "2.7.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
+
+[[package]]
+name = "memmap2"
+version = "0.9.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fd3f7eed9d3848f8b98834af67102b720745c4ec028fcd0aa0239277e7de374f"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "memoffset"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "merging"
+version = "0.1.0"
+dependencies = [
+ "log",
+ "serde",
+ "serde_json",
+ "thiserror",
+]
+
+[[package]]
+name = "mime"
+version = "0.3.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
+
+[[package]]
+name = "mime_guess"
+version = "2.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e"
+dependencies = [
+ "mime",
+ "unicase",
+]
+
+[[package]]
+name = "minimal-lexical"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
+
+[[package]]
+name = "miniz_oxide"
+version = "0.7.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08"
+dependencies = [
+ "adler",
+]
+
+[[package]]
+name = "mio"
+version = "0.8.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c"
+dependencies = [
+ "libc",
+ "log",
+ "wasi",
+ "windows-sys 0.48.0",
+]
+
+[[package]]
+name = "mio"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec"
+dependencies = [
+ "hermit-abi 0.3.9",
+ "libc",
+ "wasi",
+ "windows-sys 0.52.0",
+]
+
+[[package]]
+name = "mio-serial"
+version = "5.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "20a4c60ca5c9c0e114b3bd66ff4aa5f9b2b175442be51ca6c4365d687a97a2ac"
+dependencies = [
+ "log",
+ "mio 0.8.11",
+ "nix",
+ "serialport",
+ "winapi",
+]
+
+[[package]]
+name = "nix"
+version = "0.26.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b"
+dependencies = [
+ "bitflags 1.3.2",
+ "cfg-if",
+ "libc",
+ "memoffset",
+ "pin-utils",
+]
+
+[[package]]
+name = "nom"
+version = "7.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
+dependencies = [
+ "memchr",
+ "minimal-lexical",
+]
+
+[[package]]
+name = "num-traits"
+version = "0.2.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "object"
+version = "0.36.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "once_cell"
+version = "1.20.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
+
+[[package]]
+name = "option-ext"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
+
+[[package]]
+name = "parking_lot"
+version = "0.12.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27"
+dependencies = [
+ "lock_api",
+ "parking_lot_core",
+]
+
+[[package]]
+name = "parking_lot_core"
+version = "0.9.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "redox_syscall",
+ "smallvec",
+ "windows-targets 0.52.6",
+]
+
+[[package]]
+name = "parse-zoneinfo"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1f2a05b18d44e2957b88f96ba460715e295bc1d7510468a2f3d3b44535d26c24"
+dependencies = [
+ "regex",
+]
+
+[[package]]
+name = "parsers"
+version = "0.1.0"
+dependencies = [
+ "byteorder",
+ "chrono",
+ "chrono-tz",
+ "dlt-core",
+ "humantime",
+ "lazy_static",
+ "log",
+ "memchr",
+ "rand",
+ "regex",
+ "serde",
+ "someip-messages",
+ "someip-payload",
+ "someip-tools",
+ "thiserror",
+]
+
+[[package]]
+name = "pcap-parser"
+version = "0.14.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b79dfb40aef938ed2082c9ae9443f4eba21b79c1a9d6cfa071f5c2bd8d829491"
+dependencies = [
+ "circular",
+ "nom",
+ "rusticata-macros",
+]
+
+[[package]]
+name = "phf"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc"
+dependencies = [
+ "phf_shared",
+]
+
+[[package]]
+name = "phf_codegen"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e8d39688d359e6b34654d328e262234662d16cc0f60ec8dcbe5e718709342a5a"
+dependencies = [
+ "phf_generator",
+ "phf_shared",
+]
+
+[[package]]
+name = "phf_generator"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0"
+dependencies = [
+ "phf_shared",
+ "rand",
+]
+
+[[package]]
+name = "phf_shared"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b"
+dependencies = [
+ "siphasher",
+]
+
+[[package]]
+name = "pin-project-lite"
+version = "0.2.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff"
+
+[[package]]
+name = "pin-utils"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
+
+[[package]]
+name = "pkg-config"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2"
+
+[[package]]
+name = "ppv-lite86"
+version = "0.2.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04"
+dependencies = [
+ "zerocopy",
+]
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.85"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "22244ce15aa966053a896d1accb3a6e68469b97c7f33f284b99f0d576879fc23"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "processor"
+version = "0.2.0"
+dependencies = [
+ "bincode",
+ "buf_redux 0.8.4 (git+https://github.com/DmitryAstafyev/buf_redux.git)",
+ "bytecount",
+ "futures",
+ "grep-regex",
+ "grep-searcher",
+ "indexer_base",
+ "itertools",
+ "lazy_static",
+ "log",
+ "parsers",
+ "regex",
+ "serde",
+ "serde_json",
+ "thiserror",
+ "tokio-util",
+ "uuid",
+]
+
+[[package]]
+name = "protocol"
+version = "0.1.0"
+dependencies = [
+ "bincode",
+ "dlt-core",
+ "extend",
+ "processor",
+ "serde",
+ "session",
+ "sources",
+ "tokio",
+ "tslink",
+ "uuid",
+]
+
+[[package]]
+name = "quick-xml"
+version = "0.23.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "11bafc859c6815fbaffbbbf4229ecb767ac913fecb27f9ad4343662e9ef099ea"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "quick-xml"
+version = "0.29.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "81b9228215d82c7b61490fec1de287136b5de6f5700f6e58ea9ad61a7964ca51"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.36"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "rand"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
+dependencies = [
+ "libc",
+ "rand_chacha",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
+dependencies = [
+ "ppv-lite86",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.6.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
+dependencies = [
+ "getrandom",
+]
+
+[[package]]
+name = "redox_syscall"
+version = "0.5.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f"
+dependencies = [
+ "bitflags 2.6.0",
+]
+
+[[package]]
+name = "redox_users"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43"
+dependencies = [
+ "getrandom",
+ "libredox",
+ "thiserror",
+]
+
+[[package]]
+name = "regex"
+version = "1.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-automata",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-automata"
+version = "0.4.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
+
+[[package]]
+name = "rustc-demangle"
+version = "0.1.24"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f"
+
+[[package]]
+name = "rustc-hash"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
+
+[[package]]
+name = "rustc_version"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92"
+dependencies = [
+ "semver",
+]
+
+[[package]]
+name = "rusticata-macros"
+version = "4.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "faf0c4a6ece9950b9abdb62b1cfcf2a68b3b67a10ba445b3bb85be2a293d0632"
+dependencies = [
+ "nom",
+]
+
+[[package]]
+name = "ryu"
+version = "1.0.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f"
+
+[[package]]
+name = "safemem"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072"
+
+[[package]]
+name = "same-file"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "scopeguard"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
+
+[[package]]
+name = "semver"
+version = "1.0.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b"
+
+[[package]]
+name = "serde"
+version = "1.0.204"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bc76f558e0cbb2a839d37354c575f1dc3fdc6546b5be373ba43d95f231bf7c12"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.204"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.133"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377"
+dependencies = [
+ "itoa",
+ "memchr",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "serde_spanned"
+version = "0.6.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "serialport"
+version = "4.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7331eefcaafaa382c0df95bcd84068f0b3e3c215c300750dde2316e9b8806ed5"
+dependencies = [
+ "bitflags 2.6.0",
+ "cfg-if",
+ "core-foundation",
+ "core-foundation-sys",
+ "io-kit-sys",
+ "libudev",
+ "mach2",
+ "nix",
+ "scopeguard",
+ "unescaper",
+ "winapi",
+]
+
+[[package]]
+name = "session"
+version = "0.1.0"
+dependencies = [
+ "blake3",
+ "crossbeam-channel",
+ "dirs",
+ "dlt-core",
+ "envvars",
+ "file-tools",
+ "futures",
+ "indexer_base",
+ "lazy_static",
+ "log",
+ "merging",
+ "mime_guess",
+ "parsers",
+ "processor",
+ "rustc-hash",
+ "serde",
+ "serde_json",
+ "serialport",
+ "sources",
+ "thiserror",
+ "tokio",
+ "tokio-stream",
+ "tokio-util",
+ "uuid",
+ "walkdir",
+]
+
+[[package]]
+name = "shellexpand"
+version = "3.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "da03fa3b94cc19e3ebfc88c4229c49d8f08cdbd1228870a45f0ffdf84988e14b"
+dependencies = [
+ "dirs",
+]
+
+[[package]]
+name = "shlex"
+version = "1.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
+
+[[package]]
+name = "signal-hook-registry"
+version = "1.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "siphasher"
+version = "0.3.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d"
+
+[[package]]
+name = "slab"
+version = "0.4.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "slice-deque"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ffddf594f5f597f63533d897427a570dbaa9feabaaa06595b74b71b7014507d7"
+dependencies = [
+ "libc",
+ "mach",
+ "winapi",
+]
+
+[[package]]
+name = "smallvec"
+version = "1.13.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67"
+
+[[package]]
+name = "socket2"
+version = "0.5.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c"
+dependencies = [
+ "libc",
+ "windows-sys 0.52.0",
+]
+
+[[package]]
+name = "someip-messages"
+version = "0.3.1"
+source = "git+https://github.com/esrlabs/someip#59b27a6689d72948c4569bc6037c2387c1a661ed"
+dependencies = [
+ "byteorder",
+ "derive_builder",
+ "thiserror",
+]
+
+[[package]]
+name = "someip-payload"
+version = "0.1.5"
+source = "git+https://github.com/esrlabs/someip-payload#b9aa52c96fcbdb0e5aa1ee5258299f0ff7e3a094"
+dependencies = [
+ "byteorder",
+ "log",
+ "quick-xml 0.23.1",
+ "regex",
+ "thiserror",
+ "ux",
+ "voca_rs",
+]
+
+[[package]]
+name = "someip-tools"
+version = "0.1.0"
+dependencies = [
+ "nom",
+ "thiserror",
+]
+
+[[package]]
+name = "sources"
+version = "0.1.0"
+dependencies = [
+ "async-stream",
+ "buf_redux 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bytes",
+ "etherparse",
+ "futures",
+ "indexer_base",
+ "lazy_static",
+ "log",
+ "parsers",
+ "pcap-parser",
+ "regex",
+ "serde",
+ "shellexpand",
+ "thiserror",
+ "tokio",
+ "tokio-serial",
+ "tokio-stream",
+ "tokio-util",
+ "uuid",
+]
+
+[[package]]
+name = "stfu8"
+version = "0.2.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e51f1e89f093f99e7432c491c382b88a6860a5adbe6bf02574bf0a08efff1978"
+
+[[package]]
+name = "strsim"
+version = "0.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
+
+[[package]]
+name = "syn"
+version = "2.0.66"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "thiserror"
+version = "1.0.63"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724"
+dependencies = [
+ "thiserror-impl",
+]
+
+[[package]]
+name = "thiserror-impl"
+version = "1.0.63"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "tokio"
+version = "1.41.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "22cfb5bee7a6a52939ca9224d6ac897bb669134078daa8735560897f69de4d33"
+dependencies = [
+ "backtrace",
+ "bytes",
+ "libc",
+ "mio 1.0.2",
+ "parking_lot",
+ "pin-project-lite",
+ "signal-hook-registry",
+ "socket2",
+ "tokio-macros",
+ "windows-sys 0.52.0",
+]
+
+[[package]]
+name = "tokio-macros"
+version = "2.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "tokio-serial"
+version = "5.4.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aa6e2e4cf0520a99c5f87d5abb24172b5bd220de57c3181baaaa5440540c64aa"
+dependencies = [
+ "cfg-if",
+ "futures",
+ "log",
+ "mio-serial",
+ "tokio",
+]
+
+[[package]]
+name = "tokio-stream"
+version = "0.1.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1"
+dependencies = [
+ "futures-core",
+ "pin-project-lite",
+ "tokio",
+]
+
+[[package]]
+name = "tokio-util"
+version = "0.7.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a"
+dependencies = [
+ "bytes",
+ "futures-core",
+ "futures-io",
+ "futures-sink",
+ "futures-util",
+ "hashbrown",
+ "pin-project-lite",
+ "slab",
+ "tokio",
+]
+
+[[package]]
+name = "toml"
+version = "0.8.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e"
+dependencies = [
+ "serde",
+ "serde_spanned",
+ "toml_datetime",
+ "toml_edit",
+]
+
+[[package]]
+name = "toml_datetime"
+version = "0.6.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "toml_edit"
+version = "0.22.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d"
+dependencies = [
+ "indexmap",
+ "serde",
+ "serde_spanned",
+ "toml_datetime",
+ "winnow",
+]
+
+[[package]]
+name = "tslink"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a7af33803ca4ffb62ac69c1230f7610ac0bc8a5b5a28dfb4d584bea0674ec416"
+dependencies = [
+ "convert_case 0.6.0",
+ "lazy_static",
+ "proc-macro2",
+ "quote",
+ "serde",
+ "syn",
+ "thiserror",
+ "toml",
+ "uuid",
+]
+
+[[package]]
+name = "unescaper"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c878a167baa8afd137494101a688ef8c67125089ff2249284bd2b5f9bfedb815"
+dependencies = [
+ "thiserror",
+]
+
+[[package]]
+name = "unicase"
+version = "2.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7e51b68083f157f853b6379db119d1c1be0e6e4dec98101079dec41f6f5cf6df"
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
+
+[[package]]
+name = "unicode-segmentation"
+version = "1.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
+
+[[package]]
+name = "uuid"
+version = "1.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a"
+dependencies = [
+ "getrandom",
+ "serde",
+]
+
+[[package]]
+name = "ux"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3b59fc5417e036e53226bbebd90196825d358624fd5577432c4e486c95b1b096"
+
+[[package]]
+name = "voca_rs"
+version = "1.15.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3e44efbf25e32768d5ecd22244feacc3d3b3eca72d318f5ef0a4764c2c158e18"
+dependencies = [
+ "regex",
+ "stfu8",
+ "unicode-segmentation",
+]
+
+[[package]]
+name = "walkdir"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b"
+dependencies = [
+ "same-file",
+ "winapi-util",
+]
+
+[[package]]
+name = "wasi"
+version = "0.11.0+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
+
+[[package]]
+name = "wasm-bindgen"
+version = "0.2.95"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "128d1e363af62632b8eb57219c8fd7877144af57558fb2ef0368d0087bddeb2e"
+dependencies = [
+ "cfg-if",
+ "once_cell",
+ "wasm-bindgen-macro",
+]
+
+[[package]]
+name = "wasm-bindgen-backend"
+version = "0.2.95"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cb6dd4d3ca0ddffd1dd1c9c04f94b868c37ff5fac97c30b97cff2d74fce3a358"
+dependencies = [
+ "bumpalo",
+ "log",
+ "once_cell",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-macro"
+version = "0.2.95"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e79384be7f8f5a9dd5d7167216f022090cf1f9ec128e6e6a482a2cb5c5422c56"
+dependencies = [
+ "quote",
+ "wasm-bindgen-macro-support",
+]
+
+[[package]]
+name = "wasm-bindgen-macro-support"
+version = "0.2.95"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-backend",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-shared"
+version = "0.2.95"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d"
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-util"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
+dependencies = [
+ "windows-sys 0.59.0",
+]
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+
+[[package]]
+name = "windows-core"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9"
+dependencies = [
+ "windows-targets 0.52.6",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
+dependencies = [
+ "windows-targets 0.48.5",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
+dependencies = [
+ "windows-targets 0.52.6",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.59.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
+dependencies = [
+ "windows-targets 0.52.6",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.48.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
+dependencies = [
+ "windows_aarch64_gnullvm 0.48.5",
+ "windows_aarch64_msvc 0.48.5",
+ "windows_i686_gnu 0.48.5",
+ "windows_i686_msvc 0.48.5",
+ "windows_x86_64_gnu 0.48.5",
+ "windows_x86_64_gnullvm 0.48.5",
+ "windows_x86_64_msvc 0.48.5",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
+dependencies = [
+ "windows_aarch64_gnullvm 0.52.6",
+ "windows_aarch64_msvc 0.52.6",
+ "windows_i686_gnu 0.52.6",
+ "windows_i686_gnullvm",
+ "windows_i686_msvc 0.52.6",
+ "windows_x86_64_gnu 0.52.6",
+ "windows_x86_64_gnullvm 0.52.6",
+ "windows_x86_64_msvc 0.52.6",
+]
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.48.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.48.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.48.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
+
+[[package]]
+name = "windows_i686_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.48.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.48.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.48.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.48.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
+
+[[package]]
+name = "winnow"
+version = "0.6.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "zerocopy"
+version = "0.7.35"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0"
+dependencies = [
+ "byteorder",
+ "zerocopy-derive",
+]
+
+[[package]]
+name = "zerocopy-derive"
+version = "0.7.35"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
diff --git a/application/apps/indexer/stypes/Cargo.toml b/application/apps/indexer/stypes/Cargo.toml
new file mode 100644
index 0000000000..1f4191580d
--- /dev/null
+++ b/application/apps/indexer/stypes/Cargo.toml
@@ -0,0 +1,42 @@
+[package]
+name = "stypes"
+description = "Shared types"
+version = "0.1.0"
+edition = "2021"
+
+[features]
+test_and_gen = []
+rustcore = [
+    "dep:tokio",
+    "dep:walkdir",
+    "dep:regex",
+    "dep:envvars",
+    "dlt-core/statistics",
+    "dlt-core/serde-support",
+]
+nodejs = [
+    "dep:node-bindgen"
+]
+
+[dependencies]
+serde = { workspace = true , features = ["derive"] }
+dlt-core = { workspace = true, features = ["serde-support"] }
+regex = { workspace = true, optional = true }
+bincode = "1.3"
+extend = { path = "../tools/extend"}
+uuid = { workspace = true, features = ["serde"] }
+tokio = { workspace = true, optional = true }
+node-bindgen = { git = "https://github.com/infinyon/node-bindgen.git", branch="master", optional = true}
+thiserror.workspace = true
+walkdir = { workspace = true, optional = true }
+envvars = { workspace = true, optional = true }
+
+[dev-dependencies]
+tokio = { workspace = true  }
+walkdir = { workspace = true }
+node-bindgen = { git = "https://github.com/infinyon/node-bindgen.git", branch="master"  }
+proptest = "1.5"
+paste = "1.0"
+uuid = { workspace = true, features = ["serde", "v4"] }
+remove_dir_all = "1.0"
+ts-rs = { version = "10.1", features = ["uuid-impl"] }
diff --git a/application/apps/indexer/stypes/bindings/attachment.ts b/application/apps/indexer/stypes/bindings/attachment.ts
new file mode 100644
index 0000000000..44e19feb97
--- /dev/null
+++ b/application/apps/indexer/stypes/bindings/attachment.ts
@@ -0,0 +1,42 @@
+// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
+
+/**
+ * Describes the content of attached data found in the `payload` of a `dlt` message.
+ */
+export type AttachmentInfo = { 
+/**
+ * A unique identifier for the attachment.
+ */
+uuid: string, 
+/**
+ * The full path to the file. Note that `chipmunk` serializes the file name to ensure proper
+ * saving to disk, so the actual file name may differ from the value in the `name` field.
+ */
+filepath: string, 
+/**
+ * The name of the application, usually corresponding to the file name.
+ */
+name: string, 
+/**
+ * The file extension, if available.
+ */
+ext: string | null, 
+/**
+ * The size of the file in bytes.
+ */
+size: number, 
+/**
+ * The `mime` type of the file, if it could be determined.
+ */
+mime: string | null, 
+/**
+ * The log entry numbers containing the application data. Note that the application
+ * data may be contained in a single log entry or split into parts distributed
+ * across sequential log entries.
+ */
+messages: number[], };
+
+/**
+ * A list of attachments.
+ */
+export type AttachmentList = Array<AttachmentInfo>;
diff --git a/application/apps/indexer/stypes/bindings/callback.ts b/application/apps/indexer/stypes/bindings/callback.ts
new file mode 100644
index 0000000000..96d5e7d479
--- /dev/null
+++ b/application/apps/indexer/stypes/bindings/callback.ts
@@ -0,0 +1,61 @@
+// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
+import type { AttachmentInfo } from "./attachment";
+import type { FilterMatchList } from "./miscellaneous";
+import type { NativeError } from "./error";
+import type { Progress } from "./progress";
+
+/**
+ * Represents events sent to the client.
+ */
+export type CallbackEvent = { "StreamUpdated": number } | "FileRead" | { "SearchUpdated": { 
+/**
+ * The number of logs with matches. Can be `0` if the search is reset on the client side.
+ */
+found: number, 
+/**
+ * A map of search conditions and their global match counts within the session.
+ * - `String`: The search condition.
+ * - `u64`: The count of matches.
+ */
+stat: Map<string, number>, } } | { "IndexedMapUpdated": { 
+/**
+ * The number of log entries from search results available for reading.
+ */
+len: number, } } | { "SearchMapUpdated": FilterMatchList | null } | { "SearchValuesUpdated": Map<number, [number, number]> } | { "AttachmentsUpdated": { 
+/**
+ * The size of the attachment in bytes.
+ */
+len: number, 
+/**
+ * The description of the attachment.
+ */
+attachment: AttachmentInfo, } } | { "Progress": { 
+/**
+ * The unique identifier of the operation.
+ */
+uuid: string, 
+/**
+ * Information about the progress.
+ */
+progress: Progress, } } | { "SessionError": NativeError } | { "OperationError": { 
+/**
+ * The unique identifier of the operation that caused the error.
+ */
+uuid: string, 
+/**
+ * The error details.
+ */
+error: NativeError, } } | { "OperationStarted": string } | { "OperationProcessing": string } | { "OperationDone": OperationDone } | "SessionDestroyed";
+
+/**
+ * Contains the results of an operation.
+ */
+export type OperationDone = { 
+/**
+ * The unique identifier of the operation.
+ */
+uuid: string, 
+/**
+ * The results of the operation, if available.
+ */
+result: Array<number> | null, };
diff --git a/application/apps/indexer/stypes/bindings/command.ts b/application/apps/indexer/stypes/bindings/command.ts
new file mode 100644
index 0000000000..2a03017a7e
--- /dev/null
+++ b/application/apps/indexer/stypes/bindings/command.ts
@@ -0,0 +1,170 @@
+// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
+
+/**
+ * Represents the result of a command execution.
+ * At the core level, this type is used for all commands invoked within an `UnboundSession`.
+ * It is only used to indicate the successful completion or interruption of a command.
+ */
+export type CommandOutcomeBool = { "Finished": boolean } | "Cancelled";
+
+/**
+ * Represents the result of a command execution.
+ * At the core level, this type is used for all commands invoked within an `UnboundSession`.
+ * It is only used to indicate the successful completion or interruption of a command.
+ */
+export type CommandOutcomeDltStatisticInfoResult = { "Finished": DltStatisticInfo } | "Cancelled";
+
+/**
+ * Represents the result of a command execution.
+ * At the core level, this type is used for all commands invoked within an `UnboundSession`.
+ * It is only used to indicate the successful completion or interruption of a command.
+ */
+export type CommandOutcomeFoldersScanningResult = { "Finished": FoldersScanningResult } | "Cancelled";
+
+/**
+ * Represents the result of a command execution.
+ * At the core level, this type is used for all commands invoked within an `UnboundSession`.
+ * It is only used to indicate the successful completion or interruption of a command.
+ */
+export type CommandOutcomeOptionalString = { "Finished": string | null } | "Cancelled";
+
+/**
+ * Represents the result of a command execution.
+ * At the core level, this type is used for all commands invoked within an `UnboundSession`.
+ * It is only used to indicate the successful completion or interruption of a command.
+ */
+export type CommandOutcomeProfilesResult = { "Finished": ProfileList } | "Cancelled";
+
+/**
+ * Represents the result of a command execution.
+ * At the core level, this type is used for all commands invoked within an `UnboundSession`.
+ * It is only used to indicate the successful completion or interruption of a command.
+ */
+export type CommandOutcomeSerialPortsList = { "Finished": SerialPortsList } | "Cancelled";
+
+/**
+ * Represents the result of a command execution.
+ * At the core level, this type is used for all commands invoked within an `UnboundSession`.
+ * It is only used to indicate the successful completion or interruption of a command.
+ */
+export type CommandOutcomeString = { "Finished": string } | "Cancelled";
+
+/**
+ * Represents the result of a command execution.
+ * At the core level, this type is used for all commands invoked within an `UnboundSession`.
+ * It is only used to indicate the successful completion or interruption of a command.
+ */
+export type CommandOutcomeVoid = "Finished" | "Cancelled";
+
+/**
+ * Represents the result of a command execution.
+ * At the core level, this type is used for all commands invoked within an `UnboundSession`.
+ * It is only used to indicate the successful completion or interruption of a command.
+ */
+export type CommandOutcomei64 = { "Finished": number } | "Cancelled";
+
+export type DltLevelDistribution = { non_log: number, log_fatal: number, log_error: number, log_warning: number, log_info: number, log_debug: number, log_verbose: number, log_invalid: number, };
+
+export type DltStatisticInfo = { app_ids: Array<[string, DltLevelDistribution]>, context_ids: Array<[string, DltLevelDistribution]>, ecu_ids: Array<[string, DltLevelDistribution]>, contained_non_verbose: boolean, };
+
+/**
+ * Represents a folder entity in the file system.
+ */
+export type FolderEntity = { 
+/**
+ * The name of the entity (file or folder).
+ */
+name: string, 
+/**
+ * The full path of the entity.
+ */
+fullname: string, 
+/**
+ * The type of the entity (e.g., file, directory, symbolic link).
+ */
+kind: FolderEntityType, 
+/**
+ * Optional detailed information about the entity.
+ */
+details: FolderEntityDetails | null, };
+
+/**
+ * Contains detailed information about a folder entity.
+ */
+export type FolderEntityDetails = { 
+/**
+ * The name of the file or folder.
+ */
+filename: string, 
+/**
+ * The full path to the file or folder.
+ */
+full: string, 
+/**
+ * The directory path containing the file or folder.
+ */
+path: string, 
+/**
+ * The base name of the file or folder.
+ */
+basename: string, 
+/**
+ * The file extension, if applicable.
+ */
+ext: string, };
+
+/**
+ * Represents the type of a folder entity in the file system.
+ */
+export type FolderEntityType = "BlockDevice" | "CharacterDevice" | "Directory" | "FIFO" | "File" | "Socket" | "SymbolicLink";
+
+/**
+ * Represents the result of scanning a folder.
+ */
+export type FoldersScanningResult = { 
+/**
+ * A list of folder entities found during the scan.
+ */
+list: Array<FolderEntity>, 
+/**
+ * Indicates whether the maximum length of results was reached.
+ */
+max_len_reached: boolean, };
+
+export type Profile = { 
+/**
+ * Suggested name of shell. For unix based systems it will be name of executable file,
+ * like "bash", "fish" etc. For windows it will be names like "GitBash", "PowerShell"
+ * etc.
+ */
+name: string, 
+/**
+ * Path to executable file of shell
+ */
+path: string, 
+/**
+ * List of environment variables. Because extracting operation could take some time
+ * by default `envvars = None`. To load data should be used method `load`, which will
+ * make attempt to detect environment variables.
+ */
+envvars: Map<string, string>, 
+/**
+ * true - if path to executable file of shell is symlink to another location.
+ */
+symlink: boolean, };
+
+/**
+ * Represents a list of serial ports.
+ *
+ * This structure contains a vector of strings, where each string represents the name
+ * or identifier of a serial port available on the system.
+ */
+export type ProfileList = Array<Profile>;
+
+/**
+ * Represents a list of serial ports.
+ *
+ * This structure contains a vector of strings, where each string represents the name
+ * or identifier of a serial port available on the system.
+ */
+export type SerialPortsList = Array<string>;
diff --git a/application/apps/indexer/stypes/bindings/dlt.ts b/application/apps/indexer/stypes/bindings/dlt.ts
new file mode 100644
index 0000000000..4db3b1d986
--- /dev/null
+++ b/application/apps/indexer/stypes/bindings/dlt.ts
@@ -0,0 +1,28 @@
+/**
+ * ATTENTION:
+ * THIS FILE IS MANUALLY CREATED BECAUSE `ts_rs` CANNOT BE APPLIED
+ * TO FOREIGN TYPES (`DltFilterConfig` comes from the `dlt-core` crate).
+ * DO NOT REMOVE THIS FILE.
+ */
+export interface DltFilterConfig {
+    /// only select log entries with level MIN_LEVEL and more severe
+    ///
+    /// ``` text
+    ///  1 => FATAL
+    ///  2 => ERROR
+    ///  3 => WARN
+    ///  4 => INFO
+    ///  5 => DEBUG
+    ///  6 => VERBOSE
+    /// ```    min_log_level?: number,
+    /// what app ids should be allowed.
+    app_ids?: string[];
+    /// what ecu ids should be allowed
+    ecu_ids?: string[];
+    /// what context ids should be allowed
+    context_ids?: string[];
+    /// how many app ids exist in total
+    app_id_count: number;
+    /// how many context ids exist in total
+    context_id_count: number;
+}
diff --git a/application/apps/indexer/stypes/bindings/error.ts b/application/apps/indexer/stypes/bindings/error.ts
new file mode 100644
index 0000000000..0c77c44b0a
--- /dev/null
+++ b/application/apps/indexer/stypes/bindings/error.ts
@@ -0,0 +1,33 @@
+// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
+
+/**
+ * Describes the type and details of an error.
+ */
+export type ComputationError = "DestinationPath" | "SessionCreatingFail" | { "Communication": string } | { "OperationNotSupported": string } | { "IoOperation": string } | "InvalidData" | { "InvalidArgs": string } | { "Process": string } | { "Protocol": string } | { "SearchError": string } | "MultipleInitCall" | "SessionUnavailable" | { "NativeError": NativeError } | { "Grabbing": string } | { "Sde": string } | { "Decoding": string } | { "Encoding": string };
+
+/**
+ * Describes the details of an error.
+ */
+export type NativeError = { 
+/**
+ * The severity level of the error.
+ */
+severity: Severity, 
+/**
+ * The type or source of the error.
+ */
+kind: NativeErrorKind, 
+/**
+ * A detailed message describing the error.
+ */
+message: string | null, };
+
+/**
+ * Defines the source or type of an error.
+ */
+export type NativeErrorKind = "FileNotFound" | "UnsupportedFileType" | "ComputationFailed" | "Configuration" | "Interrupted" | "OperationSearch" | "NotYetImplemented" | "ChannelError" | "Io" | "Grabber";
+
+/**
+ * Indicates the severity level of an error.
+ */
+export type Severity = "WARNING" | "ERROR";
diff --git a/application/apps/indexer/stypes/bindings/index.ts b/application/apps/indexer/stypes/bindings/index.ts
new file mode 100644
index 0000000000..d63e3ee873
--- /dev/null
+++ b/application/apps/indexer/stypes/bindings/index.ts
@@ -0,0 +1,15 @@
+/**
+ * ATTENTION:
+ * THIS FILE IS MANUALLY CREATED TO MANAGE TYPE EXPORTS.
+ * DO NOT DELETE. ADD EXPORT STATEMENTS FOR ANY NEW TYPES.
+ */
+export * from './attachment';
+export * from './callback';
+export * from './command';
+export * from './error';
+export * from './lf_transition';
+export * from './miscellaneous';
+export * from './observe';
+export * from './progress';
+export * from './dlt';
+export * from './operations';
diff --git a/application/apps/indexer/stypes/bindings/lf_transition.ts b/application/apps/indexer/stypes/bindings/lf_transition.ts
new file mode 100644
index 0000000000..de4a4003ee
--- /dev/null
+++ b/application/apps/indexer/stypes/bindings/lf_transition.ts
@@ -0,0 +1,23 @@
+// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
+import type { Ticks } from "./progress";
+
+/**
+ * Describes the progress of an operation.
+ */
+export type LifecycleTransition = { "Started": { 
+/**
+ * The unique identifier of the operation.
+ */
+uuid: string, 
+/**
+ * A user-friendly name of the operation for display purposes.
+ */
+alias: string, } } | { "Ticks": { 
+/**
+ * The unique identifier of the operation.
+ */
+uuid: string, 
+/**
+ * The progress data associated with the operation.
+ */
+ticks: Ticks, } } | { "Stopped": string };
diff --git a/application/apps/indexer/stypes/bindings/miscellaneous.ts b/application/apps/indexer/stypes/bindings/miscellaneous.ts
new file mode 100644
index 0000000000..59ab592e7d
--- /dev/null
+++ b/application/apps/indexer/stypes/bindings/miscellaneous.ts
@@ -0,0 +1,105 @@
+// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
+
+/**
+ * Data about indices (log entry numbers). Used to provide information about
+ * the nearest search results relative to a specific log entry number.
+ */
+export type AroundIndexes = [number | undefined | null, number | undefined | null];
+
+/**
+ * Describes a match for a search condition.
+ */
+export type FilterMatch = { 
+/**
+ * The index (number) of the matching log entry.
+ */
+index: number, 
+/**
+ * The identifiers of the filters (search conditions) that matched
+ * the specified log entry.
+ */
+filters: Array<number>, };
+
+/**
+ * A list of matches for a search condition.
+ */
+export type FilterMatchList = Array<FilterMatch>;
+
+/**
+ * Information about a log entry.
+ */
+export type GrabbedElement = { 
+/**
+ * The unique identifier of the source.
+ */
+source_id: number, 
+/**
+ * The textual content of the log entry.
+ */
+content: string, 
+/**
+ * The position of the log entry in the overall stream.
+ */
+pos: number, 
+/**
+ * The nature of the log entry, represented as a bitmask. Possible values include:
+ * - `SEARCH`: Nature = Nature(1)
+ * - `BOOKMARK`: Nature = Nature(1 << 1)
+ * - `EXPANDED`: Nature = Nature(1 << 5)
+ * - `BREADCRUMB`: Nature = Nature(1 << 6)
+ * - `BREADCRUMB_SEPARATOR`: Nature = Nature(1 << 7)
+ */
+nature: number, };
+
+/**
+ * A list of log entries.
+ */
+export type GrabbedElementList = Array<GrabbedElement>;
+
+export type MapKeyValue = { [key in string]?: string };
+
+/**
+ * Representation of ranges. We cannot use std ranges as soon as no way
+ * to derive Serialize, Deserialize
+ */
+export type Range = { start: number, end: number, };
+
+/**
+ * A list of ranges to read.
+ */
+export type Ranges = Array<Range>;
+
+/**
+ * A request to a stream that supports feedback, such as a terminal command
+ * that accepts input through `stdin`.
+ */
+export type SdeRequest = { "WriteText": string } | { "WriteBytes": Array<number> };
+
+/**
+ * The response from a source to a sent `SdeRequest`. Note that sending data
+ * with `SdeRequest` does not guarantee a response, as the behavior depends
+ * on the source.
+ */
+export type SdeResponse = { 
+/**
+ * The number of bytes received.
+ */
+bytes: number, };
+
+/**
+ * Describes a data source.
+ */
+export type SourceDefinition = { 
+/**
+ * The unique identifier of the source.
+ */
+id: number, 
+/**
+ * The user-friendly name of the source for display purposes.
+ */
+alias: string, };
+
+/**
+ * A list of data sources.
+ */
+export type Sources = Array<SourceDefinition>;
diff --git a/application/apps/indexer/stypes/bindings/observe.ts b/application/apps/indexer/stypes/bindings/observe.ts
new file mode 100644
index 0000000000..f3289a8ebb
--- /dev/null
+++ b/application/apps/indexer/stypes/bindings/observe.ts
@@ -0,0 +1,182 @@
+/**
+ * ATTENTION:
+ * REFERENCE TO `DltFilterConfig` HAS BEEN ADDED MANUALLY
+ * BECAUSE THIS TYPE IS NOT GENERATED BY `ts_rs`.
+ */
+import { DltFilterConfig } from './dlt';
+
+// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
+
+/**
+ * Settings for the DLT parser.
+ */
+export type DltParserSettings = {
+    /**
+     * Configuration for filtering DLT messages.
+     */
+    filter_config: DltFilterConfig;
+    /**
+     * Paths to FIBEX files for additional interpretation of `payload` content.
+     */
+    fibex_file_paths: Array<string> | null;
+    /**
+     * Indicates whether the source contains a `StorageHeader`. Set to `true` if applicable.
+     */
+    with_storage_header: boolean;
+    /**
+     * Timezone for timestamp adjustment. If specified, timestamps are converted to this timezone.
+     */
+    tz: string | null;
+};
+
+/**
+ * Supported file formats for observation.
+ */
+export type FileFormat = 'PcapNG' | 'PcapLegacy' | 'Text' | 'Binary';
+
+/**
+ * Multicast configuration information.
+ * - `multiaddr`: A valid multicast address.
+ * - `interface`: The address of the local interface used to join the multicast group.
+ *   If set to `INADDR_ANY`, the system selects an appropriate interface.
+ */
+export type MulticastInfo = { multiaddr: string; interface: string | null };
+
+/**
+ * Options for observing data within a session.
+ */
+export type ObserveOptions = {
+    /**
+     * The description of the data source.
+     */
+    origin: ObserveOrigin;
+    /**
+     * The parser configuration to be applied.
+     */
+    parser: ParserType;
+};
+
+/**
+ * Describes the source of data for observation.
+ */
+export type ObserveOrigin =
+    | { File: [string, FileFormat, string] }
+    | { Concat: Array<[string, FileFormat, string]> }
+    | { Stream: [string, Transport] };
+
+/**
+ * Specifies the parser to be used for processing session data.
+ */
+export type ParserType =
+    | { Dlt: DltParserSettings }
+    | { SomeIp: SomeIpParserSettings }
+    | { Text: null };
+
+/**
+ * Configuration for executing terminal commands.
+ */
+export type ProcessTransportConfig = {
+    /**
+     * The working directory for the command.
+     */
+    cwd: string;
+    /**
+     * The command to execute.
+     */
+    command: string;
+    /**
+     * Environment variables. If empty, the default environment variables are used.
+     */
+    envs: Map<string, string>;
+};
+
+/**
+ * Configuration for serial port connections.
+ */
+export type SerialTransportConfig = {
+    /**
+     * The path to the serial port.
+     */
+    path: string;
+    /**
+     * The baud rate for the connection.
+     */
+    baud_rate: number;
+    /**
+     * The number of data bits per frame.
+     */
+    data_bits: number;
+    /**
+     * The flow control setting.
+     */
+    flow_control: number;
+    /**
+     * The parity setting.
+     */
+    parity: number;
+    /**
+     * The number of stop bits.
+     */
+    stop_bits: number;
+    /**
+     * The delay in sending data, in milliseconds.
+     */
+    send_data_delay: number;
+    /**
+     * Whether the connection is exclusive.
+     */
+    exclusive: boolean;
+};
+
+/**
+ * Settings for the SomeIp parser.
+ */
+export type SomeIpParserSettings = {
+    /**
+     * Paths to FIBEX files for additional interpretation of `payload` content.
+     */
+    fibex_file_paths: Array<string> | null;
+};
+
+/**
+ * Configuration for TCP connections.
+ */
+export type TCPTransportConfig = {
+    /**
+     * The address to bind the TCP connection to.
+     */
+    bind_addr: string;
+};
+
+/**
+ * Describes the transport source for a session.
+ */
+export type Transport =
+    | { Process: ProcessTransportConfig }
+    | { TCP: TCPTransportConfig }
+    | { UDP: UDPTransportConfig }
+    | { Serial: SerialTransportConfig };
+
+/**
+ * Configuration for UDP connections.
+ */
+export type UDPTransportConfig = {
+    /**
+     * The address to bind the UDP connection to.
+     */
+    bind_addr: string;
+    /**
+     * A list of multicast configurations.
+     */
+    multicast: Array<MulticastInfo>;
+};
+
+/**
+ * Configuration for UDP connections.
+ */
+export type UdpConnectionInfo = {
+    /**
+     * A list of multicast addresses to listen on.
+     */
+    multicast_addr: Array<MulticastInfo>;
+};
diff --git a/application/apps/indexer/stypes/bindings/operations.ts b/application/apps/indexer/stypes/bindings/operations.ts
new file mode 100644
index 0000000000..5989527ca1
--- /dev/null
+++ b/application/apps/indexer/stypes/bindings/operations.ts
@@ -0,0 +1,48 @@
+// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
+
+/**
+ * Used to delivery results of extracting values. That's used in the scope
+ * of chart feature
+ */
+export type ExtractedMatchValue = { 
+/**
+ * The index of log entry (row number)
+ */
+index: number, 
+/**
+ * List of matches:
+ * `usize` - index of filter
+ * `Vec<String>` - list of extracted values
+ */
+values: Array<[number, Array<string>]>, };
+
+export type NearestPosition = { index: number, position: number, };
+
+/**
+ *(row_number, min_value_in_range, max_value_in_range, value)
+ * value - can be last value in range or some kind of average
+ */
+export type Point = { row: number, min: number, max: number, y_value: number, };
+
+export type ResultBool = boolean;
+
+/**
+ * The list of `ExtractedMatchValue`
+ */
+export type ResultExtractedMatchValues = Array<ExtractedMatchValue>;
+
+export type ResultNearestPosition = NearestPosition | null;
+
+/**
+ * Scaled chart data
+ */
+export type ResultScaledDistribution = Array<Array<[number, number]>>;
+
+export type ResultSearchValues = Map<number, Point[]>;
+
+/**
+ * Used only for debug session lifecycle
+ */
+export type ResultSleep = { sleep_well: boolean, };
+
+export type ResultU64 = number;
diff --git a/application/apps/indexer/stypes/bindings/progress.ts b/application/apps/indexer/stypes/bindings/progress.ts
new file mode 100644
index 0000000000..e02a8793ee
--- /dev/null
+++ b/application/apps/indexer/stypes/bindings/progress.ts
@@ -0,0 +1,43 @@
+// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
+import type { Severity } from "./error";
+
+/**
+ * Represents a notification about an event (including potential errors)
+ * related to processing a specific log entry, if such data is available.
+ */
+export type Notification = { 
+/**
+ * The severity level of the event.
+ */
+severity: Severity, 
+/**
+ * The content or message describing the event.
+ */
+content: string, 
+/**
+ * The log entry number that triggered the event, if applicable.
+ */
+line: number | null, };
+
+/**
+ * Describes the progress of an operation.
+ */
+export type Progress = { "Ticks": Ticks } | { "Notification": Notification } | "Stopped";
+
+/**
+ * Provides detailed information about the progress of an operation.
+ */
+export type Ticks = { 
+/**
+ * The current progress count, typically representing `n` out of `100%`.
+ */
+count: number, 
+/**
+ * The name of the current progress stage, for user display purposes.
+ */
+state: string | null, 
+/**
+ * The total progress counter. Usually `100`, but for file operations,
+ * it might represent the file size, where `count` indicates the number of bytes read.
+ */
+total: number | null | undefined, };
diff --git a/application/apps/indexer/stypes/src/attachment/converting.rs b/application/apps/indexer/stypes/src/attachment/converting.rs
new file mode 100644
index 0000000000..c6f02bc337
--- /dev/null
+++ b/application/apps/indexer/stypes/src/attachment/converting.rs
@@ -0,0 +1,12 @@
+use crate::*;
+
+/// Converts a `Vec<AttachmentInfo>` into an `AttachmentList`.
+///
+/// This implementation allows you to create an `AttachmentList` directly from a vector of
+/// `AttachmentInfo` objects. It simplifies the conversion and ensures compatibility
+/// between these types.
+impl From<Vec<AttachmentInfo>> for AttachmentList {
+    fn from(value: Vec<AttachmentInfo>) -> Self {
+        Self(value)
+    }
+}
diff --git a/application/apps/indexer/stypes/src/attachment/mod.rs b/application/apps/indexer/stypes/src/attachment/mod.rs
new file mode 100644
index 0000000000..eea6c8d4b2
--- /dev/null
+++ b/application/apps/indexer/stypes/src/attachment/mod.rs
@@ -0,0 +1,48 @@
+#[cfg(feature = "rustcore")]
+mod converting;
+#[cfg(feature = "nodejs")]
+mod nodejs;
+#[cfg(test)]
+mod proptest;
+
+use crate::*;
+
+/// Describes the content of attached data found in the `payload` of a `dlt` message.
+#[derive(Debug, Serialize, Deserialize, Clone)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "attachment.ts")
+)]
+pub struct AttachmentInfo {
+    /// A unique identifier for the attachment.
+    pub uuid: Uuid,
+    /// The full path to the file. Note that `chipmunk` serializes the file name to ensure proper
+    /// saving to disk, so the actual file name may differ from the value in the `name` field.
+    pub filepath: PathBuf,
+    /// The name of the application, usually corresponding to the file name.
+    pub name: String,
+    /// The file extension, if available.
+    pub ext: Option<String>,
+    /// The size of the file in bytes.
+    #[cfg_attr(all(test, feature = "test_and_gen"), ts(type = "number"))]
+    pub size: usize,
+    /// The `mime` type of the file, if it could be determined.
+    pub mime: Option<String>,
+    /// The log entry numbers containing the application data. Note that the application
+    /// data may be contained in a single log entry or split into parts distributed
+    /// across sequential log entries.
+    #[cfg_attr(all(test, feature = "test_and_gen"), ts(type = "number[]"))]
+    pub messages: Vec<usize>,
+}
+
+/// A list of attachments.
+#[derive(Debug, Serialize, Deserialize, Clone)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "attachment.ts")
+)]
+pub struct AttachmentList(pub Vec<AttachmentInfo>);
diff --git a/application/apps/indexer/stypes/src/attachment/nodejs.rs b/application/apps/indexer/stypes/src/attachment/nodejs.rs
new file mode 100644
index 0000000000..798b648753
--- /dev/null
+++ b/application/apps/indexer/stypes/src/attachment/nodejs.rs
@@ -0,0 +1,4 @@
+use crate::*;
+
+try_into_js!(AttachmentInfo);
+try_into_js!(AttachmentList);
diff --git a/application/apps/indexer/stypes/src/attachment/proptest.rs b/application/apps/indexer/stypes/src/attachment/proptest.rs
new file mode 100644
index 0000000000..b22b071d65
--- /dev/null
+++ b/application/apps/indexer/stypes/src/attachment/proptest.rs
@@ -0,0 +1,65 @@
+use crate::*;
+use std::path::PathBuf;
+use uuid::Uuid;
+
+/// Implements the `Arbitrary` trait for `AttachmentInfo` to generate random instances
+/// for property-based testing using the `proptest` framework.
+///
+/// # Details
+/// - This implementation generates random values for all fields of `AttachmentInfo`,
+///   including:
+///   - A randomly generated `Uuid`.
+///   - A random `PathBuf` for the file path.
+///   - A random `String` for the file name.
+///   - An optional random file extension (`Option<String>`).
+///   - A random file size (`u32`, converted to `usize`).
+///   - An optional random MIME type (`Option<String>`).
+///   - A vector of random log entry indices (`Vec<u32>`, converted to `Vec<usize>`).
+impl Arbitrary for AttachmentInfo {
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (
+            Just(Uuid::new_v4()),
+            any::<PathBuf>(),
+            any::<String>(),
+            any::<Option<String>>(),
+            any::<u32>(),
+            any::<Option<String>>(),
+            prop::collection::vec(any::<u32>(), 0..10),
+        )
+            .prop_map(
+                |(uuid, filepath, name, ext, size, mime, messages)| AttachmentInfo {
+                    uuid,
+                    filepath,
+                    name,
+                    ext,
+                    size: size as usize,
+                    mime,
+                    messages: messages.into_iter().map(|p| p as usize).collect(),
+                },
+            )
+            .boxed()
+    }
+}
+
+/// Implements the `Arbitrary` trait for `AttachmentList` to generate random instances
+/// for property-based testing using the `proptest` framework.
+///
+/// # Details
+/// - This implementation generates a vector of random `AttachmentInfo` objects with
+///   up to 10 elements, which is then wrapped into an `AttachmentList`.
+impl Arbitrary for AttachmentList {
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop::collection::vec(any::<AttachmentInfo>(), 0..10)
+            .prop_map(AttachmentList)
+            .boxed()
+    }
+}
+
+test_msg!(AttachmentInfo, TESTS_USECASE_COUNT);
+test_msg!(AttachmentList, TESTS_USECASE_COUNT);
diff --git a/application/apps/indexer/stypes/src/callback/extending.rs b/application/apps/indexer/stypes/src/callback/extending.rs
new file mode 100644
index 0000000000..31e132c2d2
--- /dev/null
+++ b/application/apps/indexer/stypes/src/callback/extending.rs
@@ -0,0 +1,26 @@
+use crate::*;
+
+impl CallbackEvent {
+    /// Creates a `CallbackEvent::SearchUpdated` with no search results.
+    ///
+    /// # Details
+    /// This is a convenience method for generating a `SearchUpdated` event
+    /// when no matches are found during a search. It sets `found` to `0`
+    /// and initializes an empty statistics map (`stat`).
+    pub fn no_search_results() -> Self {
+        CallbackEvent::SearchUpdated {
+            found: 0,
+            stat: HashMap::new(),
+        }
+    }
+
+    /// Creates a `CallbackEvent::SearchUpdated` with the given search results.
+    ///
+    /// # Parameters
+    /// - `found`: The number of matches found during the search.
+    /// - `stat`: A map containing search conditions as keys and the global
+    ///   match counts as values.
+    pub fn search_results(found: u64, stat: HashMap<String, u64>) -> Self {
+        CallbackEvent::SearchUpdated { found, stat }
+    }
+}
diff --git a/application/apps/indexer/stypes/src/callback/formating.rs b/application/apps/indexer/stypes/src/callback/formating.rs
new file mode 100644
index 0000000000..5ff6813bb4
--- /dev/null
+++ b/application/apps/indexer/stypes/src/callback/formating.rs
@@ -0,0 +1,49 @@
+use crate::*;
+
+impl std::fmt::Display for CallbackEvent {
+    /// Implements the `Display` trait for `CallbackEvent`.
+    ///
+    /// This provides a human-readable string representation for each variant of the
+    /// `CallbackEvent` enum, making it easier to log or debug events.
+    ///
+    /// # Format
+    /// - `StreamUpdated(len)` - Displays the length of the updated stream.
+    /// - `FileRead` - Indicates that a file has been read.
+    /// - `SearchUpdated(found)` - Shows the number of search results found.
+    /// - `IndexedMapUpdated(len)` - Displays the number of indexed map entries.
+    /// - `SearchMapUpdated` - Indicates that the search map has been updated.
+    /// - `SearchValuesUpdated` - Indicates that search values have been updated.
+    /// - `AttachmentsUpdated: {len}` - Displays the size of the updated attachment.
+    /// - `Progress` - Indicates progress for an operation.
+    /// - `SessionError: {err:?}` - Displays details of a session error.
+    /// - `OperationError: {uuid}: {error:?}` - Displays the UUID of the operation and the error details.
+    /// - `OperationStarted: {uuid}` - Displays the UUID of a started operation.
+    /// - `OperationProcessing: {uuid}` - Displays the UUID of an operation in progress.
+    /// - `OperationDone: {info.uuid}` - Displays the UUID of a completed operation.
+    /// - `SessionDestroyed` - Indicates that the session has been destroyed.
+    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+        match self {
+            Self::StreamUpdated(len) => write!(f, "StreamUpdated({len})"),
+            Self::FileRead => write!(f, "FileRead"),
+            Self::SearchUpdated { found, stat: _ } => write!(f, "SearchUpdated({found})"),
+            Self::IndexedMapUpdated { len } => write!(f, "IndexedMapUpdated({len})"),
+            Self::SearchMapUpdated(_) => write!(f, "SearchMapUpdated"),
+            Self::SearchValuesUpdated(_) => write!(f, "SearchValuesUpdated"),
+            Self::AttachmentsUpdated { len, attachment: _ } => {
+                write!(f, "AttachmentsUpdated: {}", len)
+            }
+            Self::Progress {
+                uuid: _,
+                progress: _,
+            } => write!(f, "Progress"),
+            Self::SessionError(err) => write!(f, "SessionError: {err:?}"),
+            Self::OperationError { uuid, error } => {
+                write!(f, "OperationError: {uuid}: {error:?}")
+            }
+            Self::OperationStarted(uuid) => write!(f, "OperationStarted: {uuid}"),
+            Self::OperationProcessing(uuid) => write!(f, "OperationProcessing: {uuid}"),
+            Self::OperationDone(info) => write!(f, "OperationDone: {}", info.uuid),
+            Self::SessionDestroyed => write!(f, "SessionDestroyed"),
+        }
+    }
+}
diff --git a/application/apps/indexer/stypes/src/callback/mod.rs b/application/apps/indexer/stypes/src/callback/mod.rs
new file mode 100644
index 0000000000..849408ab09
--- /dev/null
+++ b/application/apps/indexer/stypes/src/callback/mod.rs
@@ -0,0 +1,133 @@
+#[cfg(feature = "rustcore")]
+mod extending;
+#[cfg(feature = "rustcore")]
+mod formating;
+#[cfg(feature = "nodejs")]
+mod nodejs;
+#[cfg(test)]
+mod proptest;
+
+use crate::*;
+
+/// Contains the results of an operation.
+#[derive(Debug, Serialize, Deserialize, Clone)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "callback.ts")
+)]
+pub struct OperationDone {
+    /// The unique identifier of the operation.
+    pub uuid: Uuid,
+    /// The results of the operation, if available.
+    pub result: Option<Vec<u8>>,
+}
+
+/// Represents events sent to the client.
+#[derive(Debug, Serialize, Deserialize, Clone)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "callback.ts")
+)]
+pub enum CallbackEvent {
+    /// Triggered when the content of the current session is updated.
+    /// - `u64`: The current number of log entries in the stream.
+    /// This can be triggered with `0` when the session is created.
+    #[cfg_attr(all(test, feature = "test_and_gen"), ts(type = "number"))]
+    StreamUpdated(u64),
+
+    /// Triggered when a file is opened within the session.
+    /// Although `chipmunk` continues to monitor the file for changes,
+    /// this event is triggered upon the completion of file reading.
+    /// This event is not triggered for streams within a session.
+    FileRead,
+
+    /// Triggered when search results are updated.
+    SearchUpdated {
+        /// The number of logs with matches. Can be `0` if the search is reset on the client side.
+        #[cfg_attr(all(test, feature = "test_and_gen"), ts(type = "number"))]
+        found: u64,
+        /// A map of search conditions and their global match counts within the session.
+        /// - `String`: The search condition.
+        /// - `u64`: The count of matches.
+        #[cfg_attr(all(test, feature = "test_and_gen"), ts(type = "Map<string, number>"))]
+        stat: HashMap<String, u64>,
+    },
+
+    /// Always triggered immediately after `SearchUpdated`. Contains data about
+    /// the number of log entries from search results that are available for reading.
+    IndexedMapUpdated {
+        /// The number of log entries from search results available for reading.
+        #[cfg_attr(all(test, feature = "test_and_gen"), ts(type = "number"))]
+        len: u64,
+    },
+
+    /// Always triggered immediately after `SearchUpdated`. Contains data about
+    /// the search conditions that matched, along with the indices of log entries where matches were found.
+    /// - `Option<FilterMatchList>`: The list of matches with log entry indices.
+    SearchMapUpdated(Option<FilterMatchList>),
+
+    /// Triggered when the "value map" is updated. The "value map" is used to build charts
+    /// from search results. Always triggered immediately after `SearchUpdated`.
+    /// - `Option<HashMap<u8, (f64, f64)>>`: The value map.
+    #[cfg_attr(
+        all(test, feature = "test_and_gen"),
+        ts(type = "Map<number, [number, number]>")
+    )]
+    SearchValuesUpdated(Option<HashMap<u8, (f64, f64)>>),
+
+    /// Triggered whenever a new attachment is detected in the logs.
+    AttachmentsUpdated {
+        /// The size of the attachment in bytes.
+        #[cfg_attr(all(test, feature = "test_and_gen"), ts(type = "number"))]
+        len: u64,
+        /// The description of the attachment.
+        attachment: AttachmentInfo,
+    },
+
+    /// Triggered when progress is made during an operation.
+    Progress {
+        /// The unique identifier of the operation.
+        uuid: Uuid,
+        /// Information about the progress.
+        progress: Progress,
+    },
+
+    /// Triggered in the event of an undefined session error.
+    SessionError(NativeError),
+
+    /// Triggered when an operation ends with an error.
+    /// This event may follow `OperationStarted` since that event only indicates
+    /// that the operation began successfully. It may also follow `OperationProcessing`.
+    ///
+    /// However, this event cannot precede or follow `OperationDone`, which is triggered
+    /// upon successful operation completion.
+    OperationError {
+        /// The unique identifier of the operation that caused the error.
+        uuid: Uuid,
+        /// The error details.
+        error: NativeError,
+    },
+
+    /// Triggered when an operation starts successfully. This event is only
+    /// triggered once for each specific operation.
+    /// - `Uuid`: The unique identifier of the operation.
+    OperationStarted(Uuid),
+
+    /// Triggered while an operation is in progress. This event can only follow
+    /// `OperationStarted` and may be triggered multiple times for a single operation.
+    /// - `Uuid`: The unique identifier of the operation.
+    OperationProcessing(Uuid),
+
+    /// Triggered upon the successful completion of an operation.
+    /// - `OperationDone`: The results of the completed operation.
+    OperationDone(OperationDone),
+
+    /// Triggered when the current session is fully closed, and all necessary cleanup
+    /// procedures are completed. This event guarantees that all possible read/write
+    /// operations are stopped, and all previously created loops are terminated.
+    SessionDestroyed,
+}
diff --git a/application/apps/indexer/stypes/src/callback/nodejs.rs b/application/apps/indexer/stypes/src/callback/nodejs.rs
new file mode 100644
index 0000000000..dc1a1fadc0
--- /dev/null
+++ b/application/apps/indexer/stypes/src/callback/nodejs.rs
@@ -0,0 +1,4 @@
+use crate::*;
+
+try_into_js!(OperationDone);
+try_into_js!(CallbackEvent);
diff --git a/application/apps/indexer/stypes/src/callback/proptest.rs b/application/apps/indexer/stypes/src/callback/proptest.rs
new file mode 100644
index 0000000000..6d8d70b8c5
--- /dev/null
+++ b/application/apps/indexer/stypes/src/callback/proptest.rs
@@ -0,0 +1,85 @@
+use crate::*;
+use uuid::Uuid;
+
+impl Arbitrary for OperationDone {
+    /// Implements the `Arbitrary` trait for `OperationDone` to generate random instances
+    /// for property-based testing using the `proptest` framework.
+    ///
+    /// # Details
+    /// - Randomly generates a `Uuid` for the `uuid` field.
+    /// - Optionally generates a random `String` for the `result` field.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (Just(Uuid::new_v4()), any::<Option<Vec<u8>>>())
+            .prop_map(|(uuid, result)| OperationDone { uuid, result })
+            .boxed()
+    }
+}
+
+impl Arbitrary for CallbackEvent {
+    /// Implements the `Arbitrary` trait for `CallbackEvent` to generate random instances
+    /// for property-based testing using the `proptest` framework.
+    ///
+    /// # Details
+    /// This implementation supports the generation of all variants of `CallbackEvent`,
+    /// including:
+    /// - `StreamUpdated` with a random `u64` value.
+    /// - `FileRead` as a predefined constant.
+    /// - `SearchUpdated` with random values for `found` and a map of search conditions.
+    /// - `IndexedMapUpdated` with a random `u64` length.
+    /// - `SearchMapUpdated` with an optional `FilterMatchList`.
+    /// - `SearchValuesUpdated` with a map of random values, converting `f32` to `f64`.
+    /// - `AttachmentsUpdated` with random attachment information.
+    /// - `Progress` with a random `Uuid` and `Progress` instance.
+    /// - `SessionError` with a random `NativeError`.
+    /// - `OperationError` with random `Uuid` and `NativeError`.
+    /// - `OperationStarted` with a random `Uuid`.
+    /// - `OperationProcessing` with a random `Uuid`.
+    /// - `OperationDone` with a random `OperationDone` instance.
+    /// - `SessionDestroyed` as a predefined constant.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop_oneof![
+            any::<u32>().prop_map(|n| CallbackEvent::StreamUpdated(n as u64)),
+            Just(CallbackEvent::FileRead),
+            (any::<u32>(), any::<HashMap<String, u32>>(),).prop_map(|(found, stat)| {
+                CallbackEvent::SearchUpdated {
+                    found: found as u64,
+                    stat: stat.into_iter().map(|(k, v)| (k, v as u64)).collect(),
+                }
+            }),
+            any::<u32>().prop_map(|len| CallbackEvent::IndexedMapUpdated { len: len as u64 }),
+            any::<Option<FilterMatchList>>().prop_map(CallbackEvent::SearchMapUpdated),
+            any::<Option<HashMap<u8, (f32, f32)>>>().prop_map(|ev| {
+                CallbackEvent::SearchValuesUpdated(ev.map(|ev| {
+                    ev.into_iter()
+                        .map(|(k, (l, r))| (k, (l as f64, r as f64)))
+                        .collect()
+                }))
+            }),
+            (any::<u32>(), any::<AttachmentInfo>(),).prop_map(|(len, attachment)| {
+                CallbackEvent::AttachmentsUpdated {
+                    len: len as u64,
+                    attachment,
+                }
+            }),
+            (Just(Uuid::new_v4()), any::<Progress>(),)
+                .prop_map(|(uuid, progress)| CallbackEvent::Progress { uuid, progress }),
+            any::<NativeError>().prop_map(CallbackEvent::SessionError),
+            (Just(Uuid::new_v4()), any::<NativeError>(),)
+                .prop_map(|(uuid, error)| CallbackEvent::OperationError { uuid, error }),
+            Just(Uuid::new_v4()).prop_map(CallbackEvent::OperationStarted),
+            Just(Uuid::new_v4()).prop_map(CallbackEvent::OperationProcessing),
+            any::<OperationDone>().prop_map(CallbackEvent::OperationDone),
+            Just(CallbackEvent::SessionDestroyed),
+        ]
+        .boxed()
+    }
+}
+
+test_msg!(OperationDone, TESTS_USECASE_COUNT);
+test_msg!(CallbackEvent, TESTS_USECASE_COUNT);
diff --git a/application/apps/indexer/stypes/src/command/dltstat/converting.rs b/application/apps/indexer/stypes/src/command/dltstat/converting.rs
new file mode 100644
index 0000000000..0e141e560e
--- /dev/null
+++ b/application/apps/indexer/stypes/src/command/dltstat/converting.rs
@@ -0,0 +1,40 @@
+use crate::*;
+use dlt_core::statistics;
+
+impl From<statistics::LevelDistribution> for DltLevelDistribution {
+    fn from(v: statistics::LevelDistribution) -> Self {
+        DltLevelDistribution {
+            non_log: v.non_log,
+            log_fatal: v.log_fatal,
+            log_error: v.log_error,
+            log_warning: v.log_warning,
+            log_info: v.log_info,
+            log_debug: v.log_debug,
+            log_verbose: v.log_verbose,
+            log_invalid: v.log_invalid,
+        }
+    }
+}
+
+trait InnerInto<T> {
+    fn inner_into(self) -> T;
+}
+
+impl InnerInto<Vec<(String, DltLevelDistribution)>>
+    for Vec<(String, statistics::LevelDistribution)>
+{
+    fn inner_into(self) -> Vec<(String, DltLevelDistribution)> {
+        self.into_iter().map(|(k, l)| (k, l.into())).collect()
+    }
+}
+
+impl From<statistics::StatisticInfo> for DltStatisticInfo {
+    fn from(v: statistics::StatisticInfo) -> Self {
+        DltStatisticInfo {
+            app_ids: v.app_ids.inner_into(),
+            context_ids: v.context_ids.inner_into(),
+            ecu_ids: v.ecu_ids.inner_into(),
+            contained_non_verbose: v.contained_non_verbose,
+        }
+    }
+}
diff --git a/application/apps/indexer/stypes/src/command/dltstat/mod.rs b/application/apps/indexer/stypes/src/command/dltstat/mod.rs
new file mode 100644
index 0000000000..2caf117dc8
--- /dev/null
+++ b/application/apps/indexer/stypes/src/command/dltstat/mod.rs
@@ -0,0 +1,40 @@
+#[cfg(feature = "rustcore")]
+mod converting;
+#[cfg(feature = "nodejs")]
+mod nodejs;
+#[cfg(test)]
+mod proptest;
+
+use crate::*;
+
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "command.ts")
+)]
+pub struct DltLevelDistribution {
+    pub non_log: usize,
+    pub log_fatal: usize,
+    pub log_error: usize,
+    pub log_warning: usize,
+    pub log_info: usize,
+    pub log_debug: usize,
+    pub log_verbose: usize,
+    pub log_invalid: usize,
+}
+
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "command.ts")
+)]
+pub struct DltStatisticInfo {
+    pub app_ids: Vec<(String, DltLevelDistribution)>,
+    pub context_ids: Vec<(String, DltLevelDistribution)>,
+    pub ecu_ids: Vec<(String, DltLevelDistribution)>,
+    pub contained_non_verbose: bool,
+}
diff --git a/application/apps/indexer/stypes/src/command/dltstat/nodejs.rs b/application/apps/indexer/stypes/src/command/dltstat/nodejs.rs
new file mode 100644
index 0000000000..e3ee5c08ed
--- /dev/null
+++ b/application/apps/indexer/stypes/src/command/dltstat/nodejs.rs
@@ -0,0 +1,4 @@
+use crate::*;
+
+try_into_js!(DltLevelDistribution);
+try_into_js!(DltStatisticInfo);
diff --git a/application/apps/indexer/stypes/src/command/dltstat/proptest.rs b/application/apps/indexer/stypes/src/command/dltstat/proptest.rs
new file mode 100644
index 0000000000..00d2f3f47a
--- /dev/null
+++ b/application/apps/indexer/stypes/src/command/dltstat/proptest.rs
@@ -0,0 +1,67 @@
+use crate::*;
+
+impl Arbitrary for DltLevelDistribution {
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (
+            any::<u32>(),
+            any::<u32>(),
+            any::<u32>(),
+            any::<u32>(),
+            any::<u32>(),
+            any::<u32>(),
+            any::<u32>(),
+            any::<u32>(),
+        )
+            .prop_map(
+                |(
+                    non_log,
+                    log_fatal,
+                    log_error,
+                    log_warning,
+                    log_info,
+                    log_debug,
+                    log_verbose,
+                    log_invalid,
+                )| DltLevelDistribution {
+                    non_log: non_log as usize,
+                    log_fatal: log_fatal as usize,
+                    log_error: log_error as usize,
+                    log_warning: log_warning as usize,
+                    log_info: log_info as usize,
+                    log_debug: log_debug as usize,
+                    log_verbose: log_verbose as usize,
+                    log_invalid: log_invalid as usize,
+                },
+            )
+            .boxed()
+    }
+}
+
+impl Arbitrary for DltStatisticInfo {
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (
+            prop::collection::vec(any::<(String, DltLevelDistribution)>(), 0..10),
+            prop::collection::vec(any::<(String, DltLevelDistribution)>(), 0..10),
+            prop::collection::vec(any::<(String, DltLevelDistribution)>(), 0..10),
+            any::<bool>(),
+        )
+            .prop_map(
+                |(app_ids, context_ids, ecu_ids, contained_non_verbose)| DltStatisticInfo {
+                    contained_non_verbose,
+                    context_ids,
+                    app_ids,
+                    ecu_ids,
+                },
+            )
+            .boxed()
+    }
+}
+
+test_msg!(DltLevelDistribution, TESTS_USECASE_COUNT);
+test_msg!(DltStatisticInfo, TESTS_USECASE_COUNT);
diff --git a/application/apps/indexer/stypes/src/command/extending.rs b/application/apps/indexer/stypes/src/command/extending.rs
new file mode 100644
index 0000000000..d0b4682259
--- /dev/null
+++ b/application/apps/indexer/stypes/src/command/extending.rs
@@ -0,0 +1,19 @@
+use crate::*;
+
+impl<T: Serialize + DeserializeOwned> CommandOutcome<T> {
+    /// Converts a `CommandOutcome<T>` into a `UuidCommandOutcome<T>`, associating it with a given `Uuid`.
+    ///
+    /// # Parameters
+    /// - `self`: The `CommandOutcome<T>` instance to be converted.
+    /// - `uuid`: The `Uuid` to associate with the resulting `UuidCommandOutcome`.
+    ///
+    /// # Returns
+    /// - `UuidCommandOutcome::Cancelled` if the `CommandOutcome` is `Cancelled`.
+    /// - `UuidCommandOutcome::Finished` if the `CommandOutcome` is `Finished`, pairing the given `Uuid` with the result.
+    pub fn as_command_result(self, uuid: Uuid) -> UuidCommandOutcome<T> {
+        match self {
+            CommandOutcome::Cancelled => UuidCommandOutcome::Cancelled(uuid),
+            CommandOutcome::Finished(c) => UuidCommandOutcome::Finished((uuid, c)),
+        }
+    }
+}
diff --git a/application/apps/indexer/stypes/src/command/folders/extending.rs b/application/apps/indexer/stypes/src/command/folders/extending.rs
new file mode 100644
index 0000000000..89a977b4ad
--- /dev/null
+++ b/application/apps/indexer/stypes/src/command/folders/extending.rs
@@ -0,0 +1,100 @@
+use crate::*;
+use std::fs::Metadata;
+use walkdir::DirEntry;
+
+impl FolderEntityDetails {
+    /// Creates a `FolderEntityDetails` instance from a directory entry.
+    ///
+    /// # Parameters
+    /// - `entity`: The `DirEntry` representing a file or folder.
+    ///
+    /// # Returns
+    /// - `Some(FolderEntityDetails)` if the parent directory can be determined.
+    /// - `None` otherwise.
+    pub fn from(entity: &DirEntry) -> Option<FolderEntityDetails> {
+        entity.path().parent().map(|parent| FolderEntityDetails {
+            full: entity.path().to_string_lossy().to_string(),
+            filename: entity.file_name().to_string_lossy().to_string(),
+            path: parent.to_string_lossy().to_string(),
+            basename: entity.file_name().to_string_lossy().to_string(),
+            ext: entity
+                .path()
+                .extension()
+                .unwrap_or_default()
+                .to_string_lossy()
+                .to_string(),
+        })
+    }
+}
+
+impl FolderEntity {
+    /// Creates a `FolderEntity` instance from a directory entry and its metadata.
+    ///
+    /// # Parameters
+    /// - `entity`: The `DirEntry` representing a file or folder.
+    /// - `md`: The `Metadata` of the directory entry.
+    ///
+    /// # Returns
+    /// - `Some(FolderEntity)` if the entry is a directory, file, or symbolic link.
+    /// - `None` otherwise.
+    pub fn from(entity: &DirEntry, md: &Metadata) -> Option<FolderEntity> {
+        if md.is_dir() {
+            FolderEntity::dir(entity)
+        } else if md.is_symlink() {
+            FolderEntity::symlink(entity)
+        } else {
+            FolderEntity::file(entity)
+        }
+    }
+
+    /// Creates a `FolderEntity` instance for a directory.
+    ///
+    /// # Parameters
+    /// - `entity`: The `DirEntry` representing the directory.
+    ///
+    /// # Returns
+    /// - `Some(FolderEntity)` if the directory has a valid file name.
+    /// - `None` otherwise.
+    fn dir(entity: &DirEntry) -> Option<FolderEntity> {
+        entity.path().file_name().map(|filename| FolderEntity {
+            name: filename.to_string_lossy().to_string(),
+            fullname: entity.path().to_string_lossy().to_string(),
+            kind: FolderEntityType::Directory,
+            details: None,
+        })
+    }
+
+    /// Creates a `FolderEntity` instance for a file.
+    ///
+    /// # Parameters
+    /// - `entity`: The `DirEntry` representing the file.
+    ///
+    /// # Returns
+    /// - `Some(FolderEntity)` if the file has a valid file name.
+    /// - `None` otherwise.
+    fn file(entity: &DirEntry) -> Option<FolderEntity> {
+        entity.path().file_name().map(|filename| FolderEntity {
+            name: filename.to_string_lossy().to_string(),
+            fullname: entity.path().to_string_lossy().to_string(),
+            kind: FolderEntityType::File,
+            details: FolderEntityDetails::from(entity),
+        })
+    }
+
+    /// Creates a `FolderEntity` instance for a symbolic link.
+    ///
+    /// # Parameters
+    /// - `entity`: The `DirEntry` representing the symbolic link.
+    ///
+    /// # Returns
+    /// - `Some(FolderEntity)` if the symbolic link has a valid file name.
+    /// - `None` otherwise.
+    fn symlink(entity: &DirEntry) -> Option<FolderEntity> {
+        entity.path().file_name().map(|filename| FolderEntity {
+            name: filename.to_string_lossy().to_string(),
+            fullname: entity.path().to_string_lossy().to_string(),
+            kind: FolderEntityType::SymbolicLink,
+            details: FolderEntityDetails::from(entity),
+        })
+    }
+}
diff --git a/application/apps/indexer/stypes/src/command/folders/mod.rs b/application/apps/indexer/stypes/src/command/folders/mod.rs
new file mode 100644
index 0000000000..175f3d3c2c
--- /dev/null
+++ b/application/apps/indexer/stypes/src/command/folders/mod.rs
@@ -0,0 +1,89 @@
+#[cfg(feature = "rustcore")]
+mod extending;
+#[cfg(feature = "nodejs")]
+mod nodejs;
+#[cfg(test)]
+mod proptest;
+
+use crate::*;
+
+/// Represents the type of a folder entity in the file system.
+#[allow(clippy::upper_case_acronyms)]
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "command.ts")
+)]
+pub enum FolderEntityType {
+    /// A block device (e.g., a disk or partition).
+    BlockDevice,
+    /// A character device (e.g., a terminal or serial port).
+    CharacterDevice,
+    /// A directory.
+    Directory,
+    /// A named pipe (FIFO).
+    FIFO,
+    /// A regular file.
+    File,
+    /// A socket.
+    Socket,
+    /// A symbolic link.
+    SymbolicLink,
+}
+
+/// Contains detailed information about a folder entity.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "command.ts")
+)]
+pub struct FolderEntityDetails {
+    /// The name of the file or folder.
+    filename: String,
+    /// The full path to the file or folder.
+    full: String,
+    /// The directory path containing the file or folder.
+    path: String,
+    /// The base name of the file or folder.
+    basename: String,
+    /// The file extension, if applicable.
+    ext: String,
+}
+
+/// Represents the result of scanning a folder.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "command.ts")
+)]
+pub struct FoldersScanningResult {
+    /// A list of folder entities found during the scan.
+    pub list: Vec<FolderEntity>,
+    /// Indicates whether the maximum length of results was reached.
+    pub max_len_reached: bool,
+}
+
+/// Represents a folder entity in the file system.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "command.ts")
+)]
+pub struct FolderEntity {
+    /// The name of the entity (file or folder).
+    name: String,
+    /// The full path of the entity.
+    fullname: String,
+    /// The type of the entity (e.g., file, directory, symbolic link).
+    kind: FolderEntityType,
+    /// Optional detailed information about the entity.
+    details: Option<FolderEntityDetails>,
+}
diff --git a/application/apps/indexer/stypes/src/command/folders/nodejs.rs b/application/apps/indexer/stypes/src/command/folders/nodejs.rs
new file mode 100644
index 0000000000..d001336a20
--- /dev/null
+++ b/application/apps/indexer/stypes/src/command/folders/nodejs.rs
@@ -0,0 +1,6 @@
+use crate::*;
+
+try_into_js!(FolderEntityType);
+try_into_js!(FolderEntityDetails);
+try_into_js!(FoldersScanningResult);
+try_into_js!(FolderEntity);
diff --git a/application/apps/indexer/stypes/src/command/folders/proptest.rs b/application/apps/indexer/stypes/src/command/folders/proptest.rs
new file mode 100644
index 0000000000..1b29abf005
--- /dev/null
+++ b/application/apps/indexer/stypes/src/command/folders/proptest.rs
@@ -0,0 +1,124 @@
+use crate::*;
+
+impl Arbitrary for FolderEntityType {
+    /// Implements the `Arbitrary` trait for `FolderEntityType` to generate random variants
+    /// for property-based testing using the `proptest` framework.
+    ///
+    /// # Details
+    /// This implementation generates random variants of `FolderEntityType`, including:
+    /// - `BlockDevice`
+    /// - `CharacterDevice`
+    /// - `Directory`
+    /// - `FIFO`
+    /// - `File`
+    /// - `Socket`
+    /// - `SymbolicLink`
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop_oneof![
+            Just(FolderEntityType::BlockDevice),
+            Just(FolderEntityType::CharacterDevice),
+            Just(FolderEntityType::Directory),
+            Just(FolderEntityType::FIFO),
+            Just(FolderEntityType::File),
+            Just(FolderEntityType::Socket),
+            Just(FolderEntityType::SymbolicLink),
+        ]
+        .boxed()
+    }
+}
+
+impl Arbitrary for FolderEntityDetails {
+    /// Implements the `Arbitrary` trait for `FolderEntityDetails` to generate random instances
+    /// for property-based testing using the `proptest` framework.
+    ///
+    /// # Details
+    /// Generates random values for all fields:
+    /// - `filename`: A random `String`.
+    /// - `full`: A random `String`.
+    /// - `path`: A random `String`.
+    /// - `basename`: A random `String`.
+    /// - `ext`: A random `String`.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (
+            any::<String>(),
+            any::<String>(),
+            any::<String>(),
+            any::<String>(),
+            any::<String>(),
+        )
+            .prop_map(
+                |(filename, full, path, basename, ext)| FolderEntityDetails {
+                    filename,
+                    full,
+                    path,
+                    basename,
+                    ext,
+                },
+            )
+            .boxed()
+    }
+}
+
+impl Arbitrary for FolderEntity {
+    /// Implements the `Arbitrary` trait for `FolderEntity` to generate random instances
+    /// for property-based testing using the `proptest` framework.
+    ///
+    /// # Details
+    /// Generates random values for all fields:
+    /// - `name`: A random `String`.
+    /// - `fullname`: A random `String`.
+    /// - `kind`: A random `FolderEntityType`.
+    /// - `details`: An optional random `FolderEntityDetails`.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (
+            any::<String>(),
+            any::<String>(),
+            any::<FolderEntityType>(),
+            any::<Option<FolderEntityDetails>>(),
+        )
+            .prop_map(|(name, fullname, kind, details)| FolderEntity {
+                name,
+                fullname,
+                kind,
+                details,
+            })
+            .boxed()
+    }
+}
+
+impl Arbitrary for FoldersScanningResult {
+    /// Implements the `Arbitrary` trait for `FoldersScanningResult` to generate random instances
+    /// for property-based testing using the `proptest` framework.
+    ///
+    /// # Details
+    /// - Generates a random list of up to 10 `FolderEntity` instances.
+    /// - Generates a random `bool` to indicate whether the maximum length was reached.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (
+            prop::collection::vec(any::<FolderEntity>(), 0..10),
+            any::<bool>(),
+        )
+            .prop_map(|(list, max_len_reached)| FoldersScanningResult {
+                list,
+                max_len_reached,
+            })
+            .boxed()
+    }
+}
+
+test_msg!(FolderEntityType, TESTS_USECASE_COUNT);
+test_msg!(FolderEntityDetails, TESTS_USECASE_COUNT);
+test_msg!(FoldersScanningResult, TESTS_USECASE_COUNT);
+test_msg!(FolderEntity, TESTS_USECASE_COUNT);
diff --git a/application/apps/indexer/stypes/src/command/mod.rs b/application/apps/indexer/stypes/src/command/mod.rs
new file mode 100644
index 0000000000..8ebea6102d
--- /dev/null
+++ b/application/apps/indexer/stypes/src/command/mod.rs
@@ -0,0 +1,42 @@
+#[cfg(feature = "rustcore")]
+mod extending;
+#[cfg(feature = "nodejs")]
+mod nodejs;
+#[cfg(test)]
+mod proptest;
+#[cfg(test)]
+mod ts;
+
+mod dltstat;
+mod folders;
+mod profiles;
+mod serial;
+
+pub use dltstat::*;
+pub use folders::*;
+pub use profiles::*;
+pub use serial::*;
+
+use crate::*;
+
+/// Represents the result of a command execution.
+/// At the core level, this type is used for all commands invoked within an `UnboundSession`.
+/// It is only used to indicate the successful completion or interruption of a command.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[serde(bound(deserialize = "T: DeserializeOwned"))]
+#[extend::encode_decode]
+pub enum CommandOutcome<T: Serialize + DeserializeOwned> {
+    /// Indicates that the command was successfully completed.
+    Finished(T),
+    /// Indicates that the command execution was interrupted.
+    Cancelled,
+}
+
+/// Similar to `CommandOutcome`, but additionally contains the identifier of the executed command.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+pub enum UuidCommandOutcome<T: Serialize> {
+    /// Indicates that the command was successfully completed.
+    Finished((Uuid, T)),
+    /// Indicates that the command execution was interrupted.
+    Cancelled(Uuid),
+}
diff --git a/application/apps/indexer/stypes/src/command/nodejs.rs b/application/apps/indexer/stypes/src/command/nodejs.rs
new file mode 100644
index 0000000000..e223444e87
--- /dev/null
+++ b/application/apps/indexer/stypes/src/command/nodejs.rs
@@ -0,0 +1,12 @@
+use crate::*;
+
+try_into_js!(CommandOutcome<FoldersScanningResult>);
+try_into_js!(CommandOutcome<SerialPortsList>);
+try_into_js!(CommandOutcome<ProfileList>);
+try_into_js!(CommandOutcome<MapKeyValue>);
+try_into_js!(CommandOutcome<DltStatisticInfo>);
+try_into_js!(CommandOutcome<()>);
+try_into_js!(CommandOutcome<i64>);
+try_into_js!(CommandOutcome<Option<String>>);
+try_into_js!(CommandOutcome<String>);
+try_into_js!(CommandOutcome<bool>);
diff --git a/application/apps/indexer/stypes/src/command/profiles/converting.rs b/application/apps/indexer/stypes/src/command/profiles/converting.rs
new file mode 100644
index 0000000000..43f35105a3
--- /dev/null
+++ b/application/apps/indexer/stypes/src/command/profiles/converting.rs
@@ -0,0 +1,13 @@
+use crate::*;
+
+/// Converts a `envvars::Profile` into an `Profile`.
+impl From<envvars::Profile> for Profile {
+    fn from(pro: envvars::Profile) -> Self {
+        Profile {
+            name: pro.name,
+            path: pro.path,
+            envvars: pro.envvars,
+            symlink: pro.symlink,
+        }
+    }
+}
diff --git a/application/apps/indexer/stypes/src/command/profiles/mod.rs b/application/apps/indexer/stypes/src/command/profiles/mod.rs
new file mode 100644
index 0000000000..b469e24509
--- /dev/null
+++ b/application/apps/indexer/stypes/src/command/profiles/mod.rs
@@ -0,0 +1,44 @@
+#[cfg(feature = "rustcore")]
+mod converting;
+#[cfg(feature = "nodejs")]
+mod nodejs;
+#[cfg(test)]
+mod proptest;
+
+use crate::*;
+
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "command.ts")
+)]
+pub struct Profile {
+    /// Suggested name of shell. For unix based systems it will be name of executable file,
+    /// like "bash", "fish" etc. For windows it will be names like "GitBash", "PowerShell"
+    /// etc.
+    pub name: String,
+    /// Path to executable file of shell
+    pub path: PathBuf,
+    /// List of environment variables. Because extracting operation could take some time
+    /// by default `envvars = None`. To load data should be used method `load`, which will
+    /// make attempt to detect environment variables.
+    #[cfg_attr(all(test, feature = "test_and_gen"), ts(type = "Map<string, string>"))]
+    pub envvars: Option<HashMap<String, String>>,
+    /// true - if path to executable file of shell is symlink to another location.
+    pub symlink: bool,
+}
+
+/// Represents a list of serial ports.
+///
+/// This structure contains a vector of strings, where each string represents the name
+/// or identifier of a serial port available on the system.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "command.ts")
+)]
+pub struct ProfileList(pub Vec<Profile>);
diff --git a/application/apps/indexer/stypes/src/command/profiles/nodejs.rs b/application/apps/indexer/stypes/src/command/profiles/nodejs.rs
new file mode 100644
index 0000000000..441d8e73e2
--- /dev/null
+++ b/application/apps/indexer/stypes/src/command/profiles/nodejs.rs
@@ -0,0 +1,4 @@
+use crate::*;
+
+try_into_js!(Profile);
+try_into_js!(ProfileList);
diff --git a/application/apps/indexer/stypes/src/command/profiles/proptest.rs b/application/apps/indexer/stypes/src/command/profiles/proptest.rs
new file mode 100644
index 0000000000..3a50b0ca44
--- /dev/null
+++ b/application/apps/indexer/stypes/src/command/profiles/proptest.rs
@@ -0,0 +1,36 @@
+use crate::*;
+
+impl Arbitrary for Profile {
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (
+            any::<String>(),
+            any::<PathBuf>(),
+            any::<Option<HashMap<String, String>>>(),
+            any::<bool>(),
+        )
+            .prop_map(|(name, path, envvars, symlink)| Profile {
+                name,
+                path,
+                symlink,
+                envvars,
+            })
+            .boxed()
+    }
+}
+
+impl Arbitrary for ProfileList {
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop::collection::vec(Profile::arbitrary(), 0..10)
+            .prop_map(ProfileList)
+            .boxed()
+    }
+}
+
+test_msg!(Profile, TESTS_USECASE_COUNT);
+test_msg!(ProfileList, TESTS_USECASE_COUNT);
diff --git a/application/apps/indexer/stypes/src/command/proptest.rs b/application/apps/indexer/stypes/src/command/proptest.rs
new file mode 100644
index 0000000000..ac58a5b2b2
--- /dev/null
+++ b/application/apps/indexer/stypes/src/command/proptest.rs
@@ -0,0 +1,182 @@
+use crate::*;
+
+impl Arbitrary for CommandOutcome<String> {
+    /// Implements the `Arbitrary` trait for `CommandOutcome<String>` to generate random instances
+    /// for property-based testing using the `proptest` framework.
+    ///
+    /// # Details
+    /// - Generates either:
+    ///   - `CommandOutcome::Finished` with a random `String`.
+    ///   - `CommandOutcome::Cancelled`.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop_oneof![
+            any::<String>().prop_map(CommandOutcome::Finished),
+            Just(CommandOutcome::Cancelled),
+        ]
+        .boxed()
+    }
+}
+
+impl Arbitrary for CommandOutcome<FoldersScanningResult> {
+    /// Implements the `Arbitrary` trait for `CommandOutcome<FoldersScanningResult>` to generate random instances.
+    ///
+    /// # Details
+    /// - Generates either:
+    ///   - `CommandOutcome::Finished` with a random `FoldersScanningResult`.
+    ///   - `CommandOutcome::Cancelled`.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop_oneof![
+            any::<FoldersScanningResult>().prop_map(CommandOutcome::Finished),
+            Just(CommandOutcome::Cancelled),
+        ]
+        .boxed()
+    }
+}
+
+impl Arbitrary for CommandOutcome<SerialPortsList> {
+    /// Implements the `Arbitrary` trait for `CommandOutcome<SerialPortsList>` to generate random instances.
+    ///
+    /// # Details
+    /// - Generates either:
+    ///   - `CommandOutcome::Finished` with a random `SerialPortsList`.
+    ///   - `CommandOutcome::Cancelled`.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop_oneof![
+            any::<SerialPortsList>().prop_map(CommandOutcome::Finished),
+            Just(CommandOutcome::Cancelled),
+        ]
+        .boxed()
+    }
+}
+
+impl Arbitrary for CommandOutcome<ProfileList> {
+    /// Implements the `Arbitrary` trait for `CommandOutcome<ProfileList>` to generate random instances.
+    ///
+    /// # Details
+    /// - Generates either:
+    ///   - `CommandOutcome::Finished` with a random `ProfileList`.
+    ///   - `CommandOutcome::Cancelled`.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop_oneof![
+            any::<ProfileList>().prop_map(CommandOutcome::Finished),
+            Just(CommandOutcome::Cancelled),
+        ]
+        .boxed()
+    }
+}
+
+impl Arbitrary for CommandOutcome<DltStatisticInfo> {
+    /// Implements the `Arbitrary` trait for `CommandOutcome<DltStatisticInfo>` to generate random instances.
+    ///
+    /// # Details
+    /// - Generates either:
+    ///   - `CommandOutcome::Finished` with a random `DltStatisticInfo`.
+    ///   - `CommandOutcome::Cancelled`.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop_oneof![
+            any::<DltStatisticInfo>().prop_map(CommandOutcome::Finished),
+            Just(CommandOutcome::Cancelled),
+        ]
+        .boxed()
+    }
+}
+impl Arbitrary for CommandOutcome<()> {
+    /// Implements the `Arbitrary` trait for `CommandOutcome<()>` to generate random instances.
+    ///
+    /// # Details
+    /// - Generates either:
+    ///   - `CommandOutcome::Finished` with `()`.
+    ///   - `CommandOutcome::Cancelled`.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop_oneof![
+            Just(()).prop_map(CommandOutcome::Finished),
+            Just(CommandOutcome::Cancelled),
+        ]
+        .boxed()
+    }
+}
+
+impl Arbitrary for CommandOutcome<i64> {
+    /// Implements the `Arbitrary` trait for `CommandOutcome<i64>` to generate random instances.
+    ///
+    /// # Details
+    /// - Generates either:
+    ///   - `CommandOutcome::Finished` with a random `i64` value converted from `i32`.
+    ///   - `CommandOutcome::Cancelled`.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop_oneof![
+            any::<i32>().prop_map(|v| CommandOutcome::Finished(v as i64)),
+            Just(CommandOutcome::Cancelled),
+        ]
+        .boxed()
+    }
+}
+
+impl Arbitrary for CommandOutcome<Option<String>> {
+    /// Implements the `Arbitrary` trait for `CommandOutcome<Option<String>>` to generate random instances.
+    ///
+    /// # Details
+    /// - Generates either:
+    ///   - `CommandOutcome::Finished` with a random `Option<String>`.
+    ///   - `CommandOutcome::Cancelled`.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop_oneof![
+            any::<Option<String>>().prop_map(CommandOutcome::Finished),
+            Just(CommandOutcome::Cancelled),
+        ]
+        .boxed()
+    }
+}
+
+impl Arbitrary for CommandOutcome<bool> {
+    /// Implements the `Arbitrary` trait for `CommandOutcome<bool>` to generate random instances.
+    ///
+    /// # Details
+    /// - Generates either:
+    ///   - `CommandOutcome::Finished` with a random `bool`.
+    ///   - `CommandOutcome::Cancelled`.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop_oneof![
+            any::<bool>().prop_map(CommandOutcome::Finished),
+            Just(CommandOutcome::Cancelled),
+        ]
+        .boxed()
+    }
+}
+
+test_msg!(CommandOutcome<()>, TESTS_USECASE_COUNT);
+test_msg!(CommandOutcome<bool>, TESTS_USECASE_COUNT);
+test_msg!(CommandOutcome<Option<String>>, TESTS_USECASE_COUNT);
+test_msg!(CommandOutcome<i64>, TESTS_USECASE_COUNT);
+test_msg!(CommandOutcome<SerialPortsList>, TESTS_USECASE_COUNT);
+test_msg!(CommandOutcome<String>, TESTS_USECASE_COUNT);
+test_msg!(CommandOutcome<FoldersScanningResult>, TESTS_USECASE_COUNT);
+test_msg!(CommandOutcome<DltStatisticInfo>, TESTS_USECASE_COUNT);
+test_msg!(CommandOutcome<ProfileList>, TESTS_USECASE_COUNT);
diff --git a/application/apps/indexer/stypes/src/command/serial/mod.rs b/application/apps/indexer/stypes/src/command/serial/mod.rs
new file mode 100644
index 0000000000..69f3755b58
--- /dev/null
+++ b/application/apps/indexer/stypes/src/command/serial/mod.rs
@@ -0,0 +1,17 @@
+#[cfg(test)]
+mod proptest;
+
+use crate::*;
+
+/// Represents a list of serial ports.
+///
+/// This structure contains a vector of strings, where each string represents the name
+/// or identifier of a serial port available on the system.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "command.ts")
+)]
+pub struct SerialPortsList(pub Vec<String>);
diff --git a/application/apps/indexer/stypes/src/command/serial/proptest.rs b/application/apps/indexer/stypes/src/command/serial/proptest.rs
new file mode 100644
index 0000000000..8183b7a917
--- /dev/null
+++ b/application/apps/indexer/stypes/src/command/serial/proptest.rs
@@ -0,0 +1,21 @@
+use crate::*;
+
+impl Arbitrary for SerialPortsList {
+    /// Implements the `Arbitrary` trait for `SerialPortsList` to generate random instances
+    /// for property-based testing using the `proptest` framework.
+    ///
+    /// # Details
+    /// - Generates a vector of up to 10 random `String` values, where each string represents
+    ///   the name or identifier of a serial port.
+    /// - Wraps the generated vector into a `SerialPortsList`.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop::collection::vec(any::<String>(), 0..10)
+            .prop_map(SerialPortsList)
+            .boxed()
+    }
+}
+
+test_msg!(SerialPortsList, TESTS_USECASE_COUNT);
diff --git a/application/apps/indexer/stypes/src/command/ts.rs b/application/apps/indexer/stypes/src/command/ts.rs
new file mode 100644
index 0000000000..cb57492931
--- /dev/null
+++ b/application/apps/indexer/stypes/src/command/ts.rs
@@ -0,0 +1,146 @@
+use crate::*;
+
+/// Represents the result of a command execution.
+/// At the core level, this type is used for all commands invoked within an `UnboundSession`.
+/// It is only used to indicate the successful completion or interruption of a command.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "command.ts")
+)]
+pub enum CommandOutcomeDltStatisticInfoResult {
+    /// Indicates that the command was successfully completed.
+    Finished(DltStatisticInfo),
+    /// Indicates that the command execution was interrupted.
+    Cancelled,
+}
+
+/// Represents the result of a command execution.
+/// At the core level, this type is used for all commands invoked within an `UnboundSession`.
+/// It is only used to indicate the successful completion or interruption of a command.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "command.ts")
+)]
+pub enum CommandOutcomeProfilesResult {
+    /// Indicates that the command was successfully completed.
+    Finished(ProfileList),
+    /// Indicates that the command execution was interrupted.
+    Cancelled,
+}
+
+/// Represents the result of a command execution.
+/// At the core level, this type is used for all commands invoked within an `UnboundSession`.
+/// It is only used to indicate the successful completion or interruption of a command.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "command.ts")
+)]
+pub enum CommandOutcomeFoldersScanningResult {
+    /// Indicates that the command was successfully completed.
+    Finished(FoldersScanningResult),
+    /// Indicates that the command execution was interrupted.
+    Cancelled,
+}
+
+/// Represents the result of a command execution.
+/// At the core level, this type is used for all commands invoked within an `UnboundSession`.
+/// It is only used to indicate the successful completion or interruption of a command.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "command.ts")
+)]
+pub enum CommandOutcomeSerialPortsList {
+    /// Indicates that the command was successfully completed.
+    Finished(SerialPortsList),
+    /// Indicates that the command execution was interrupted.
+    Cancelled,
+}
+
+/// Represents the result of a command execution.
+/// At the core level, this type is used for all commands invoked within an `UnboundSession`.
+/// It is only used to indicate the successful completion or interruption of a command.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "command.ts")
+)]
+pub enum CommandOutcomeVoid {
+    /// Indicates that the command was successfully completed.
+    Finished,
+    /// Indicates that the command execution was interrupted.
+    Cancelled,
+}
+
+/// Represents the result of a command execution.
+/// At the core level, this type is used for all commands invoked within an `UnboundSession`.
+/// It is only used to indicate the successful completion or interruption of a command.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "command.ts")
+)]
+pub enum CommandOutcomei64 {
+    /// Indicates that the command was successfully completed.
+    #[cfg_attr(all(test, feature = "test_and_gen"), ts(type = "number"))]
+    Finished(i64),
+    /// Indicates that the command execution was interrupted.
+    Cancelled,
+}
+
+/// Represents the result of a command execution.
+/// At the core level, this type is used for all commands invoked within an `UnboundSession`.
+/// It is only used to indicate the successful completion or interruption of a command.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "command.ts")
+)]
+pub enum CommandOutcomeOptionalString {
+    /// Indicates that the command was successfully completed.
+    Finished(Option<String>),
+    /// Indicates that the command execution was interrupted.
+    Cancelled,
+}
+
+/// Represents the result of a command execution.
+/// At the core level, this type is used for all commands invoked within an `UnboundSession`.
+/// It is only used to indicate the successful completion or interruption of a command.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "command.ts")
+)]
+pub enum CommandOutcomeString {
+    /// Indicates that the command was successfully completed.
+    Finished(String),
+    /// Indicates that the command execution was interrupted.
+    Cancelled,
+}
+
+/// Represents the result of a command execution.
+/// At the core level, this type is used for all commands invoked within an `UnboundSession`.
+/// It is only used to indicate the successful completion or interruption of a command.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "command.ts")
+)]
+pub enum CommandOutcomeBool {
+    /// Indicates that the command was successfully completed.
+    Finished(bool),
+    /// Indicates that the command execution was interrupted.
+    Cancelled,
+}
diff --git a/application/apps/indexer/stypes/src/error/converting.rs b/application/apps/indexer/stypes/src/error/converting.rs
new file mode 100644
index 0000000000..de1c2cd0b3
--- /dev/null
+++ b/application/apps/indexer/stypes/src/error/converting.rs
@@ -0,0 +1,49 @@
+use crate::*;
+
+impl From<std::io::Error> for NativeError {
+    /// Converts a `std::io::Error` into a `NativeError`.
+    ///
+    /// # Mapping Details
+    /// - `severity`: Always set to `Severity::ERROR`.
+    /// - `kind`: Mapped to `NativeErrorKind::Io`.
+    /// - `message`: Set to the string representation of the `std::io::Error`.
+    fn from(err: std::io::Error) -> Self {
+        NativeError {
+            severity: Severity::ERROR,
+            kind: NativeErrorKind::Io,
+            message: Some(err.to_string()),
+        }
+    }
+}
+
+impl From<tokio::sync::mpsc::error::SendError<CallbackEvent>> for NativeError {
+    /// Converts a `tokio::sync::mpsc::error::SendError<CallbackEvent>` into a `NativeError`.
+    ///
+    /// # Mapping Details
+    /// - `severity`: Always set to `Severity::ERROR`.
+    /// - `kind`: Mapped to `NativeErrorKind::ComputationFailed`.
+    /// - `message`: A formatted message indicating that the callback channel is broken.
+    fn from(err: tokio::sync::mpsc::error::SendError<CallbackEvent>) -> Self {
+        NativeError {
+            severity: Severity::ERROR,
+            kind: NativeErrorKind::ComputationFailed,
+            message: Some(format!("Callback channel is broken: {err}")),
+        }
+    }
+}
+
+impl From<ComputationError> for NativeError {
+    /// Converts a `ComputationError` into a `NativeError`.
+    ///
+    /// # Mapping Details
+    /// - `severity`: Always set to `Severity::ERROR`.
+    /// - `kind`: Mapped to `NativeErrorKind::Io`.
+    /// - `message`: Set to the string representation of the `ComputationError`.
+    fn from(err: ComputationError) -> Self {
+        NativeError {
+            severity: Severity::ERROR,
+            kind: NativeErrorKind::Io,
+            message: Some(err.to_string()),
+        }
+    }
+}
diff --git a/application/apps/indexer/stypes/src/error/extending.rs b/application/apps/indexer/stypes/src/error/extending.rs
new file mode 100644
index 0000000000..6cbc536f97
--- /dev/null
+++ b/application/apps/indexer/stypes/src/error/extending.rs
@@ -0,0 +1,35 @@
+use crate::*;
+
+impl NativeError {
+    /// Creates a `NativeError` representing a channel-related error.
+    ///
+    /// # Parameters
+    /// - `msg`: A message describing the error.
+    ///
+    /// # Returns
+    /// A `NativeError` instance with:
+    /// - `severity`: Set to `Severity::ERROR`.
+    /// - `kind`: Set to `NativeErrorKind::ChannelError`.
+    /// - `message`: Set to the provided message.
+    pub fn channel(msg: &str) -> Self {
+        NativeError {
+            severity: Severity::ERROR,
+            kind: NativeErrorKind::ChannelError,
+            message: Some(String::from(msg)),
+        }
+    }
+}
+
+impl Severity {
+    /// Returns a string representation of the `Severity` value.
+    ///
+    /// # Returns
+    /// - `"WARNING"` for `Severity::WARNING`.
+    /// - `"ERROR"` for `Severity::ERROR`.
+    pub fn as_str(&self) -> &str {
+        match self {
+            Severity::WARNING => "WARNING",
+            Severity::ERROR => "ERROR",
+        }
+    }
+}
diff --git a/application/apps/indexer/stypes/src/error/formating.rs b/application/apps/indexer/stypes/src/error/formating.rs
new file mode 100644
index 0000000000..0faf9c74d9
--- /dev/null
+++ b/application/apps/indexer/stypes/src/error/formating.rs
@@ -0,0 +1,14 @@
+use crate::*;
+
+impl std::fmt::Display for Severity {
+    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+        write!(
+            f,
+            "{}",
+            match self {
+                Self::WARNING => "WARNING",
+                Self::ERROR => "ERROR",
+            }
+        )
+    }
+}
diff --git a/application/apps/indexer/stypes/src/error/mod.rs b/application/apps/indexer/stypes/src/error/mod.rs
new file mode 100644
index 0000000000..afcf81323e
--- /dev/null
+++ b/application/apps/indexer/stypes/src/error/mod.rs
@@ -0,0 +1,140 @@
+#[cfg(feature = "rustcore")]
+mod converting;
+#[cfg(feature = "rustcore")]
+mod extending;
+#[cfg(feature = "rustcore")]
+mod formating;
+#[cfg(feature = "nodejs")]
+mod nodejs;
+#[cfg(test)]
+mod proptest;
+
+use crate::*;
+use thiserror::Error;
+
+/// Indicates the severity level of an error.
+#[allow(clippy::upper_case_acronyms)]
+#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "error.ts")
+)]
+pub enum Severity {
+    /// Warning level, indicates a recoverable issue.
+    WARNING,
+    /// Error level, indicates a critical issue.
+    ERROR,
+}
+
+/// Defines the source or type of an error.
+#[derive(Debug, Serialize, Deserialize, Clone)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "error.ts")
+)]
+pub enum NativeErrorKind {
+    /// The file was not found.
+    FileNotFound,
+    /// The file type is not supported.
+    UnsupportedFileType,
+    /// A computation process failed.
+    ComputationFailed,
+    /// Configuration-related errors.
+    Configuration,
+    /// The operation was interrupted.
+    Interrupted,
+    /// Errors related to search operations.
+    OperationSearch,
+    /// The feature or functionality is not yet implemented.
+    NotYetImplemented,
+    /// Errors related to communication channels between loops within a session.
+    /// Typically indicates that a loop ended prematurely, preventing message delivery.
+    ChannelError,
+    /// Input/output-related errors.
+    Io,
+    /// Errors related to reading session data, including search result data.
+    Grabber,
+}
+
+/// Describes the details of an error.
+#[derive(Debug, Serialize, Deserialize, Clone)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "error.ts")
+)]
+pub struct NativeError {
+    /// The severity level of the error.
+    pub severity: Severity,
+    /// The type or source of the error.
+    pub kind: NativeErrorKind,
+    /// A detailed message describing the error.
+    pub message: Option<String>,
+}
+
+/// Describes the type and details of an error.
+#[derive(Error, Debug, Serialize, Deserialize, Clone)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "error.ts")
+)]
+pub enum ComputationError {
+    /// The destination path must be defined to stream from `MessageProducer`.
+    #[error("Destination path should be defined to stream from MessageProducer")]
+    DestinationPath,
+    /// Failed to create a session.
+    #[error("Fail to create session")]
+    SessionCreatingFail,
+    /// A communication error occurred. Includes a description.
+    #[error("Native communication error ({0})")]
+    Communication(String),
+    /// An unsupported operation was attempted. Includes the operation name.
+    #[error("Operation not supported ({0})")]
+    OperationNotSupported(String),
+    /// An input/output error occurred. Includes a description.
+    #[error("IO error ({0})")]
+    IoOperation(String),
+    /// Invalid data was encountered.
+    #[error("Invalid data error")]
+    InvalidData,
+    /// Invalid arguments were provided. Includes a description.
+    #[error("Invalid arguments")]
+    InvalidArgs(String),
+    /// An error occurred during processing. Includes a description.
+    #[error("Error during processing: ({0})")]
+    Process(String),
+    /// An API was used incorrectly. Includes a description.
+    #[error("Wrong usage of API: ({0})")]
+    Protocol(String),
+    /// A search-related error occurred. Includes a description.
+    #[error("Search related error: {0}")]
+    SearchError(String),
+    /// The `start` method can only be called once.
+    #[error("start method can be called just once")]
+    MultipleInitCall,
+    /// The session is unavailable, either destroyed or not initialized.
+    #[error("Session is destroyed or not inited yet")]
+    SessionUnavailable,
+    /// A native error occurred. Includes the error details.
+    #[error("{0:?}")]
+    NativeError(NativeError),
+    /// Unable to grab content. Includes a description.
+    #[error("Grabbing content not possible: {0}")]
+    Grabbing(String),
+    /// An error occurred while sending data to the source. Includes a description.
+    #[error("Sending data to source error: {0}")]
+    Sde(String),
+    /// An error occurred while decoding a message. Includes a description.
+    #[error("Decoding message error: {0}")]
+    Decoding(String),
+    /// An error occurred while encoding a message. Includes a description.
+    #[error("Encoding message error: {0}")]
+    Encoding(String),
+}
diff --git a/application/apps/indexer/stypes/src/error/nodejs.rs b/application/apps/indexer/stypes/src/error/nodejs.rs
new file mode 100644
index 0000000000..75dd572ab8
--- /dev/null
+++ b/application/apps/indexer/stypes/src/error/nodejs.rs
@@ -0,0 +1,6 @@
+use crate::*;
+
+try_into_js!(Severity);
+try_into_js!(NativeErrorKind);
+try_into_js!(NativeError);
+try_into_js!(ComputationError);
diff --git a/application/apps/indexer/stypes/src/error/proptest.rs b/application/apps/indexer/stypes/src/error/proptest.rs
new file mode 100644
index 0000000000..2351527a63
--- /dev/null
+++ b/application/apps/indexer/stypes/src/error/proptest.rs
@@ -0,0 +1,126 @@
+use crate::*;
+// Arbitrary implementations for Severity, NativeErrorKind, NativeError, and ComputationError.
+
+impl Arbitrary for Severity {
+    /// Implements the `Arbitrary` trait for `Severity` to generate random values
+    /// for property-based testing using the `proptest` framework.
+    ///
+    /// # Details
+    /// - Generates random variants of `Severity`:
+    ///   - `Severity::WARNING`
+    ///   - `Severity::ERROR`
+    type Parameters = ();
+
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop_oneof![Just(Severity::WARNING), Just(Severity::ERROR)].boxed()
+    }
+}
+
+impl Arbitrary for NativeErrorKind {
+    /// Implements the `Arbitrary` trait for `NativeErrorKind` to generate random values
+    /// for property-based testing using the `proptest` framework.
+    ///
+    /// # Details
+    /// - Generates random variants of `NativeErrorKind`, including:
+    ///   - `FileNotFound`
+    ///   - `UnsupportedFileType`
+    ///   - `ComputationFailed`
+    ///   - `Configuration`
+    ///   - `Interrupted`
+    ///   - `OperationSearch`
+    ///   - `NotYetImplemented`
+    ///   - `ChannelError`
+    ///   - `Io`
+    ///   - `Grabber`
+    type Parameters = ();
+
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop_oneof![
+            Just(NativeErrorKind::FileNotFound),
+            Just(NativeErrorKind::UnsupportedFileType),
+            Just(NativeErrorKind::ComputationFailed),
+            Just(NativeErrorKind::Configuration),
+            Just(NativeErrorKind::Interrupted),
+            Just(NativeErrorKind::OperationSearch),
+            Just(NativeErrorKind::NotYetImplemented),
+            Just(NativeErrorKind::ChannelError),
+            Just(NativeErrorKind::Io),
+            Just(NativeErrorKind::Grabber)
+        ]
+        .boxed()
+    }
+}
+
+impl Arbitrary for NativeError {
+    /// Implements the `Arbitrary` trait for `NativeError` to generate random values
+    /// for property-based testing using the `proptest` framework.
+    ///
+    /// # Details
+    /// - Generates:
+    ///   - A random `Severity` value.
+    ///   - A random `NativeErrorKind` value.
+    ///   - An optional random `String` for the message.
+    type Parameters = ();
+
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (
+            Severity::arbitrary().boxed(),
+            NativeErrorKind::arbitrary().boxed(),
+            prop::option::of(any::<String>()),
+        )
+            .prop_map(|(severity, kind, message)| NativeError {
+                severity,
+                kind,
+                message,
+            })
+            .boxed()
+    }
+}
+
+impl Arbitrary for ComputationError {
+    /// Implements the `Arbitrary` trait for `ComputationError` to generate random values
+    /// for property-based testing using the `proptest` framework.
+    ///
+    /// # Details
+    /// - Generates random variants of `ComputationError`, including:
+    ///   - Fixed errors such as `DestinationPath`, `SessionCreatingFail`, etc.
+    ///   - Errors with random `String` values for fields like `Communication`, `IoOperation`, etc.
+    ///   - Nested errors, such as `NativeError`.
+    type Parameters = ();
+
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop_oneof![
+            Just(ComputationError::DestinationPath),
+            Just(ComputationError::SessionCreatingFail),
+            any::<String>().prop_map(ComputationError::Communication),
+            any::<String>().prop_map(ComputationError::OperationNotSupported),
+            any::<String>().prop_map(ComputationError::IoOperation),
+            Just(ComputationError::InvalidData),
+            any::<String>().prop_map(ComputationError::InvalidArgs),
+            any::<String>().prop_map(ComputationError::Process),
+            any::<String>().prop_map(ComputationError::Protocol),
+            any::<String>().prop_map(ComputationError::SearchError),
+            Just(ComputationError::MultipleInitCall),
+            Just(ComputationError::SessionUnavailable),
+            NativeError::arbitrary().prop_map(ComputationError::NativeError),
+            any::<String>().prop_map(ComputationError::Grabbing),
+            any::<String>().prop_map(ComputationError::Sde),
+            any::<String>().prop_map(ComputationError::Decoding),
+            any::<String>().prop_map(ComputationError::Encoding),
+        ]
+        .boxed()
+    }
+}
+
+test_msg!(Severity, TESTS_USECASE_COUNT);
+test_msg!(NativeErrorKind, TESTS_USECASE_COUNT);
+test_msg!(NativeError, TESTS_USECASE_COUNT);
+test_msg!(ComputationError, TESTS_USECASE_COUNT);
diff --git a/application/apps/indexer/stypes/src/lf_transition/extending.rs b/application/apps/indexer/stypes/src/lf_transition/extending.rs
new file mode 100644
index 0000000000..b3616f9a10
--- /dev/null
+++ b/application/apps/indexer/stypes/src/lf_transition/extending.rs
@@ -0,0 +1,53 @@
+use crate::*;
+
+impl LifecycleTransition {
+    /// Retrieves the `Uuid` associated with the lifecycle transition.
+    ///
+    /// # Returns
+    /// - The `Uuid` of the operation, regardless of its state.
+    pub fn uuid(&self) -> Uuid {
+        match self {
+            Self::Started { uuid, alias: _ } => *uuid,
+            Self::Ticks { uuid, ticks: _ } => *uuid,
+            Self::Stopped(uuid) => *uuid,
+        }
+    }
+
+    /// Creates a new `LifecycleTransition::Started` instance.
+    ///
+    /// # Parameters
+    /// - `uuid`: The unique identifier of the operation.
+    /// - `alias`: A user-friendly name for the operation.
+    ///
+    /// # Returns
+    /// - A new `LifecycleTransition::Started` instance.
+    pub fn started(uuid: &Uuid, alias: &str) -> Self {
+        LifecycleTransition::Started {
+            uuid: *uuid,
+            alias: alias.to_owned(),
+        }
+    }
+
+    /// Creates a new `LifecycleTransition::Stopped` instance.
+    ///
+    /// # Parameters
+    /// - `uuid`: The unique identifier of the operation.
+    ///
+    /// # Returns
+    /// - A new `LifecycleTransition::Stopped` instance.
+    pub fn stopped(uuid: &Uuid) -> Self {
+        LifecycleTransition::Stopped(*uuid)
+    }
+
+    /// Creates a new `LifecycleTransition::Ticks` instance.
+    ///
+    /// # Parameters
+    /// - `uuid`: The unique identifier of the operation.
+    /// - `ticks`: Progress information associated with the operation.
+    ///
+    /// # Returns
+    /// - A new `LifecycleTransition::Ticks` instance.
+    pub fn ticks(uuid: &Uuid, ticks: Ticks) -> Self {
+        LifecycleTransition::Ticks { uuid: *uuid, ticks }
+    }
+}
diff --git a/application/apps/indexer/stypes/src/lf_transition/mod.rs b/application/apps/indexer/stypes/src/lf_transition/mod.rs
new file mode 100644
index 0000000000..4b76047ce3
--- /dev/null
+++ b/application/apps/indexer/stypes/src/lf_transition/mod.rs
@@ -0,0 +1,36 @@
+#[cfg(feature = "rustcore")]
+mod extending;
+#[cfg(feature = "nodejs")]
+mod nodejs;
+#[cfg(test)]
+mod proptest;
+
+use crate::*;
+
+/// Describes the progress of an operation.
+#[derive(Debug, Serialize, Deserialize, Clone)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "lf_transition.ts")
+)]
+pub enum LifecycleTransition {
+    /// The operation has started.
+    Started {
+        /// The unique identifier of the operation.
+        uuid: Uuid,
+        /// A user-friendly name of the operation for display purposes.
+        alias: String,
+    },
+    /// The progress of the operation.
+    Ticks {
+        /// The unique identifier of the operation.
+        uuid: Uuid,
+        /// The progress data associated with the operation.
+        ticks: Ticks,
+    },
+    /// The operation has completed or was interrupted.
+    /// - `Uuid`: The unique identifier of the operation.
+    Stopped(Uuid),
+}
diff --git a/application/apps/indexer/stypes/src/lf_transition/nodejs.rs b/application/apps/indexer/stypes/src/lf_transition/nodejs.rs
new file mode 100644
index 0000000000..f837b321e8
--- /dev/null
+++ b/application/apps/indexer/stypes/src/lf_transition/nodejs.rs
@@ -0,0 +1,3 @@
+use crate::*;
+
+try_into_js!(LifecycleTransition);
diff --git a/application/apps/indexer/stypes/src/lf_transition/proptest.rs b/application/apps/indexer/stypes/src/lf_transition/proptest.rs
new file mode 100644
index 0000000000..e58797c3c0
--- /dev/null
+++ b/application/apps/indexer/stypes/src/lf_transition/proptest.rs
@@ -0,0 +1,28 @@
+use crate::*;
+use uuid::Uuid;
+
+impl Arbitrary for LifecycleTransition {
+    /// Implements the `Arbitrary` trait for `LifecycleTransition` to generate random values
+    /// for property-based testing using the `proptest` framework.
+    ///
+    /// # Details
+    /// - Randomly generates one of the `LifecycleTransition` variants:
+    ///   - `Started`: Generates a random `Uuid` and a random alias (`String`).
+    ///   - `Ticks`: Generates a random `Uuid` and a random `Ticks` value.
+    ///   - `Stopped`: Generates a random `Uuid`.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop_oneof![
+            (Just(Uuid::new_v4()), any::<String>())
+                .prop_map(|(uuid, alias)| LifecycleTransition::Started { uuid, alias }),
+            (Just(Uuid::new_v4()), any::<Ticks>())
+                .prop_map(|(uuid, ticks)| LifecycleTransition::Ticks { uuid, ticks }),
+            Just(Uuid::new_v4()).prop_map(LifecycleTransition::Stopped),
+        ]
+        .boxed()
+    }
+}
+
+test_msg!(LifecycleTransition, TESTS_USECASE_COUNT);
diff --git a/application/apps/indexer/stypes/src/lib.rs b/application/apps/indexer/stypes/src/lib.rs
new file mode 100644
index 0000000000..1c60267f51
--- /dev/null
+++ b/application/apps/indexer/stypes/src/lib.rs
@@ -0,0 +1,135 @@
+/// The `stypes` crate provides data types used at the `rustcore` level and passed to clients.
+/// While the `stypes` crate does not impose restrictions on the client type, some features are
+/// specifically defined to support certain types of clients. This does not mean that `stypes` cannot
+/// be used with other client types.
+///
+/// ## Features
+/// - `nodejs`: Includes the implementation of the `TryIntoJs` trait, required for transferring data
+///   into the Node.js context when using the `node_bindgen` crate.
+/// - `rustcore`: Includes additional utilities and extensions for using `stypes` within the
+///   `indexer` crate group.
+///
+/// ## Proptest Integration
+/// The crate includes tests based on `proptest`. These tests not only validate the crate itself
+/// but also generate binary files containing variations of each message in a predefined directory.
+/// These files can later be used to test encoding/decoding on the client side.
+///
+/// The `test_msg` macro is used to generate tests for specific data types. For example:
+///
+/// ```ignore
+/// test_msg!(ObserveOptions, 100);
+/// ```
+///
+/// The above code generates 100 variations of `ObserveOptions`. During tests, the generated data
+/// is saved to files in the specified path. To set the output path, use the `CHIPMUNK_PROTOCOL_TEST_OUTPUT`
+/// environment variable:
+///
+/// ```ignore
+/// export CHIPMUNK_PROTOCOL_TEST_OUTPUT="/tmp/test_data"
+/// cargo test --release -- --nocapture
+/// ```
+///
+/// If `CHIPMUNK_PROTOCOL_TEST_OUTPUT` is not set, the default path `$TMP/stypes_test` will be used.
+/// It is recommended to run tests with the `--release` flag to speed up random variation generation,
+/// as the process is significantly slower in debug mode.
+///
+/// Each data type will have its own directory, and each variation will be stored in files with
+/// sequential names (`1.bin`, `2.bin`, etc.).
+///
+/// ## WARNING
+/// When tests are run, the folder specified in `CHIPMUNK_PROTOCOL_TEST_OUTPUT` is completely deleted.
+/// Be extremely cautious when setting the value of this environment variable.
+///
+/// ## Limitations
+/// The current version of `stypes` uses `bincode` for encoding and decoding types. `bincode` requires
+/// both serialization and deserialization implementations. However, using custom `serde` attributes
+/// may lead to protocol instability, especially during decoding. For instance, the attribute
+/// `#[serde(tag = "type", content = "value")]` makes decoding messages with these settings impossible.
+/// Unfortunately, `bincode` does not raise compile-time or serialization-time errors, but only fails
+/// during decoding. Therefore, it is strongly recommended to test encoding/decoding when using additional
+/// attributes on types.
+///
+/// ## Implementation of `encode` and `decode` Methods
+/// The `encode` and `decode` methods are added to each declared data type using the `encode_decode`
+/// macro from the `extend` crate. This macro implements encoding and decoding for the data type
+/// using `bincode`.
+///
+/// For example, the following code:
+///
+/// ```ignore
+/// #[derive(Debug, Serialize, Deserialize, Clone)]
+/// #[extend::encode_decode]
+/// pub struct Notification {
+///     pub severity: Severity,
+///     pub content: String,
+///     pub line: Option<usize>,
+/// }
+/// ```
+///
+/// Is transformed into:
+///
+/// ```ignore
+/// #[derive(Debug, Serialize, Deserialize, Clone)]
+/// pub struct Notification {
+///     pub severity: Severity,
+///     pub content: String,
+///     pub line: Option<usize>,
+/// }
+///
+/// impl Notification {
+///     pub fn encode(&self) -> Result<Vec<u8>, String> {
+///         bincode::serialize(self).map_err(|e| e.to_string())
+///     }
+///     
+///     pub fn decode(buf: &[u8]) -> Result<Self, String> {
+///         bincode::deserialize(buf).map_err(|e| e.to_string())
+///     }
+/// }
+/// ```
+#[cfg(test)]
+mod tests;
+
+#[cfg(feature = "nodejs")]
+mod nodejs;
+
+mod attachment;
+mod callback;
+mod command;
+mod error;
+mod lf_transition;
+mod miscellaneous;
+mod observe;
+mod operations;
+mod progress;
+
+pub use attachment::*;
+pub use callback::*;
+pub use command::*;
+pub use error::*;
+pub use lf_transition::*;
+pub use miscellaneous::*;
+pub use observe::*;
+pub use operations::*;
+pub use progress::*;
+
+pub(crate) use serde::{de::DeserializeOwned, Deserialize, Serialize};
+pub(crate) use std::{collections::HashMap, path::PathBuf};
+#[cfg(test)]
+pub(crate) use ts_rs::TS;
+pub(crate) use uuid::Uuid;
+
+#[cfg(feature = "nodejs")]
+pub(crate) use node_bindgen::{
+    core::{safebuffer::SafeArrayBuffer, val::JsEnv, NjError, TryIntoJs},
+    sys::napi_value,
+};
+
+#[cfg(test)]
+pub(crate) use proptest::prelude::*;
+#[cfg(test)]
+pub(crate) use tests::*;
+
+#[cfg(feature = "rustcore")]
+pub fn serialize<T: Serialize>(v: &T) -> Result<Vec<u8>, bincode::Error> {
+    bincode::serialize(v)
+}
diff --git a/application/apps/indexer/stypes/src/miscellaneous/converting.rs b/application/apps/indexer/stypes/src/miscellaneous/converting.rs
new file mode 100644
index 0000000000..bc93a34ff6
--- /dev/null
+++ b/application/apps/indexer/stypes/src/miscellaneous/converting.rs
@@ -0,0 +1,87 @@
+use crate::*;
+use std::ops::RangeInclusive;
+
+impl From<HashMap<String, String>> for MapKeyValue {
+    fn from(map: HashMap<String, String>) -> Self {
+        MapKeyValue(map)
+    }
+}
+
+impl From<Vec<GrabbedElement>> for GrabbedElementList {
+    /// Converts a `Vec<GrabbedElement>` into a `GrabbedElementList`.
+    ///
+    /// # Parameters
+    /// - `els`: A vector of `GrabbedElement` instances.
+    ///
+    /// # Returns
+    /// - A `GrabbedElementList` containing the elements from the input vector.
+    fn from(els: Vec<GrabbedElement>) -> Self {
+        Self(els)
+    }
+}
+
+impl From<Vec<SourceDefinition>> for Sources {
+    /// Converts a `Vec<SourceDefinition>` into a `Sources`.
+    ///
+    /// # Parameters
+    /// - `els`: A vector of `SourceDefinition` instances.
+    ///
+    /// # Returns
+    /// - A `Sources` containing the elements from the input vector.
+    fn from(els: Vec<SourceDefinition>) -> Self {
+        Self(els)
+    }
+}
+
+impl From<(Option<u64>, Option<u64>)> for AroundIndexes {
+    /// Converts a tuple `(Option<u64>, Option<u64>)` into an `AroundIndexes`.
+    ///
+    /// # Parameters
+    /// - `value`: A tuple containing two optional `u64` values.
+    ///
+    /// # Returns
+    /// - An `AroundIndexes` instance containing the input tuple.
+    fn from(value: (Option<u64>, Option<u64>)) -> Self {
+        Self(value)
+    }
+}
+
+impl From<RangeInclusive<u64>> for Range {
+    /// Converts a `RangeInclusive<u64>` into a `Range`.
+    ///
+    /// # Parameters
+    /// - `value`: `RangeInclusive<u64>` instance.
+    ///
+    /// # Returns
+    /// - A `Range` instance.
+    fn from(range: RangeInclusive<u64>) -> Self {
+        let (start, end) = range.into_inner();
+        Self { start, end }
+    }
+}
+
+impl From<Vec<RangeInclusive<u64>>> for Ranges {
+    /// Converts a `Vec<RangeInclusive<u64>>` into a `Ranges`.
+    ///
+    /// # Parameters
+    /// - `value`: A vector of `RangeInclusive<u64>` instances.
+    ///
+    /// # Returns
+    /// - A `Ranges` instance containing the input ranges.
+    fn from(value: Vec<RangeInclusive<u64>>) -> Self {
+        Self(value.into_iter().map(|r| r.into()).collect())
+    }
+}
+
+impl From<&Vec<FilterMatch>> for FilterMatchList {
+    /// Converts a `&Vec<FilterMatch>` into a `FilterMatchList`.
+    ///
+    /// # Parameters
+    /// - `value`: A reference to a vector of `FilterMatch` instances.
+    ///
+    /// # Returns
+    /// - A `FilterMatchList` containing a copy of the elements in the input vector.
+    fn from(value: &Vec<FilterMatch>) -> Self {
+        FilterMatchList(value.to_vec())
+    }
+}
diff --git a/application/apps/indexer/stypes/src/miscellaneous/extending.rs b/application/apps/indexer/stypes/src/miscellaneous/extending.rs
new file mode 100644
index 0000000000..f81392fff2
--- /dev/null
+++ b/application/apps/indexer/stypes/src/miscellaneous/extending.rs
@@ -0,0 +1,25 @@
+use crate::*;
+
+impl GrabbedElement {
+    /// Sets the `nature` field of the `GrabbedElement`.
+    ///
+    /// # Parameters
+    /// - `nature`: A `u8` value representing the new nature of the element.
+    pub fn set_nature(&mut self, nature: u8) {
+        self.nature = nature;
+    }
+}
+
+impl FilterMatch {
+    /// Creates a new `FilterMatch` instance.
+    ///
+    /// # Parameters
+    /// - `index`: The index of the log entry that matches the filter.
+    /// - `filters`: A vector of `u8` values representing the filter IDs that matched.
+    ///
+    /// # Returns
+    /// - A new `FilterMatch` instance with the specified index and filters.
+    pub fn new(index: u64, filters: Vec<u8>) -> Self {
+        Self { index, filters }
+    }
+}
diff --git a/application/apps/indexer/stypes/src/miscellaneous/mod.rs b/application/apps/indexer/stypes/src/miscellaneous/mod.rs
new file mode 100644
index 0000000000..df677ac90a
--- /dev/null
+++ b/application/apps/indexer/stypes/src/miscellaneous/mod.rs
@@ -0,0 +1,179 @@
+#[cfg(feature = "rustcore")]
+mod converting;
+#[cfg(feature = "rustcore")]
+mod extending;
+#[cfg(feature = "nodejs")]
+mod nodejs;
+#[cfg(test)]
+mod proptest;
+
+use crate::*;
+
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "miscellaneous.ts")
+)]
+pub struct MapKeyValue(pub HashMap<String, String>);
+
+/// Representation of ranges. We cannot use std ranges as soon as no way
+/// to derive Serialize, Deserialize
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "miscellaneous.ts")
+)]
+pub struct Range {
+    #[cfg_attr(all(test, feature = "test_and_gen"), ts(type = "number"))]
+    pub start: u64,
+    #[cfg_attr(all(test, feature = "test_and_gen"), ts(type = "number"))]
+    pub end: u64,
+}
+
+/// A list of ranges to read.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "miscellaneous.ts")
+)]
+pub struct Ranges(pub Vec<Range>);
+
+/// Describes a data source.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "miscellaneous.ts")
+)]
+pub struct SourceDefinition {
+    /// The unique identifier of the source.
+    pub id: u16,
+    /// The user-friendly name of the source for display purposes.
+    pub alias: String,
+}
+
+/// A list of data sources.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "miscellaneous.ts")
+)]
+pub struct Sources(pub Vec<SourceDefinition>);
+
+/// A request to a stream that supports feedback, such as a terminal command
+/// that accepts input through `stdin`.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "miscellaneous.ts")
+)]
+pub enum SdeRequest {
+    /// Sends a text string.
+    WriteText(String),
+    /// Sends raw bytes.
+    WriteBytes(Vec<u8>),
+}
+
+/// The response from a source to a sent `SdeRequest`. Note that sending data
+/// with `SdeRequest` does not guarantee a response, as the behavior depends
+/// on the source.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "miscellaneous.ts")
+)]
+pub struct SdeResponse {
+    /// The number of bytes received.
+    #[cfg_attr(all(test, feature = "test_and_gen"), ts(type = "number"))]
+    pub bytes: usize,
+}
+
+/// Information about a log entry.
+#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "miscellaneous.ts")
+)]
+pub struct GrabbedElement {
+    /// The unique identifier of the source.
+    pub source_id: u16,
+    /// The textual content of the log entry.
+    pub content: String,
+    /// The position of the log entry in the overall stream.
+    #[cfg_attr(all(test, feature = "test_and_gen"), ts(type = "number"))]
+    pub pos: usize,
+    /// The nature of the log entry, represented as a bitmask. Possible values include:
+    /// - `SEARCH`: Nature = Nature(1)
+    /// - `BOOKMARK`: Nature = Nature(1 << 1)
+    /// - `EXPANDED`: Nature = Nature(1 << 5)
+    /// - `BREADCRUMB`: Nature = Nature(1 << 6)
+    /// - `BREADCRUMB_SEPARATOR`: Nature = Nature(1 << 7)
+    pub nature: u8,
+}
+
+/// A list of log entries.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "miscellaneous.ts")
+)]
+pub struct GrabbedElementList(pub Vec<GrabbedElement>);
+
+/// Data about indices (log entry numbers). Used to provide information about
+/// the nearest search results relative to a specific log entry number.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "miscellaneous.ts")
+)]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    ts(type = "[number | undefined | null, number | undefined | null]")
+)]
+pub struct AroundIndexes(pub (Option<u64>, Option<u64>));
+
+/// Describes a match for a search condition.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "miscellaneous.ts")
+)]
+pub struct FilterMatch {
+    /// The index (number) of the matching log entry.
+    #[cfg_attr(all(test, feature = "test_and_gen"), ts(type = "number"))]
+    pub index: u64,
+    /// The identifiers of the filters (search conditions) that matched
+    /// the specified log entry.
+    pub filters: Vec<u8>,
+}
+
+/// A list of matches for a search condition.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "miscellaneous.ts")
+)]
+pub struct FilterMatchList(pub Vec<FilterMatch>);
diff --git a/application/apps/indexer/stypes/src/miscellaneous/nodejs.rs b/application/apps/indexer/stypes/src/miscellaneous/nodejs.rs
new file mode 100644
index 0000000000..e11995a2ef
--- /dev/null
+++ b/application/apps/indexer/stypes/src/miscellaneous/nodejs.rs
@@ -0,0 +1,12 @@
+use crate::*;
+
+try_into_js!(GrabbedElement);
+try_into_js!(GrabbedElementList);
+try_into_js!(Ranges);
+try_into_js!(SourceDefinition);
+try_into_js!(Sources);
+try_into_js!(SdeRequest);
+try_into_js!(SdeResponse);
+try_into_js!(AroundIndexes);
+try_into_js!(FilterMatch);
+try_into_js!(FilterMatchList);
diff --git a/application/apps/indexer/stypes/src/miscellaneous/proptest.rs b/application/apps/indexer/stypes/src/miscellaneous/proptest.rs
new file mode 100644
index 0000000000..e9b901fc1a
--- /dev/null
+++ b/application/apps/indexer/stypes/src/miscellaneous/proptest.rs
@@ -0,0 +1,202 @@
+use crate::*;
+
+impl Arbitrary for Range {
+    /// Implements the `Arbitrary` trait for `Ranges` to generate random values for
+    /// property-based testing using the `proptest` framework.
+    ///
+    /// # Details
+    /// - Generates a vector of random `RangeInclusive<u64>` instances, with up to 10 ranges.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (any::<u32>(), any::<u32>())
+            .prop_map(|(start, end)| Range {
+                start: start as u64,
+                end: end as u64,
+            })
+            .boxed()
+    }
+}
+
+impl Arbitrary for Ranges {
+    /// Implements the `Arbitrary` trait for `Ranges` to generate random values for
+    /// property-based testing using the `proptest` framework.
+    ///
+    /// # Details
+    /// - Generates a vector of random `RangeInclusive<u64>` instances, with up to 10 ranges.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop::collection::vec(Range::arbitrary(), 0..10)
+            .prop_map(Ranges)
+            .boxed()
+    }
+}
+
+impl Arbitrary for SourceDefinition {
+    /// Implements the `Arbitrary` trait for `SourceDefinition` to generate random instances.
+    ///
+    /// # Details
+    /// - Generates random `id` (`u16`) and `alias` (`String`) values.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (any::<u16>(), any::<String>())
+            .prop_map(|(id, alias)| SourceDefinition { id, alias })
+            .boxed()
+    }
+}
+
+impl Arbitrary for Sources {
+    /// Implements the `Arbitrary` trait for `Sources` to generate random instances.
+    ///
+    /// # Details
+    /// - Generates a vector of up to 10 random `SourceDefinition` instances.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop::collection::vec(any::<SourceDefinition>(), 0..10)
+            .prop_map(Sources)
+            .boxed()
+    }
+}
+
+impl Arbitrary for SdeRequest {
+    /// Implements the `Arbitrary` trait for `SdeRequest` to generate random instances.
+    ///
+    /// # Details
+    /// - Generates either:
+    ///   - `WriteText` with a random `String`.
+    ///   - `WriteBytes` with a random vector of `u8` values (up to 100 bytes).
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop_oneof![
+            any::<String>().prop_map(SdeRequest::WriteText),
+            prop::collection::vec(any::<u8>(), 0..100).prop_map(SdeRequest::WriteBytes),
+        ]
+        .boxed()
+    }
+}
+
+impl Arbitrary for SdeResponse {
+    /// Implements the `Arbitrary` trait for `SdeResponse` to generate random instances.
+    ///
+    /// # Details
+    /// - Generates a random `u32` for the `bytes` field.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        any::<u32>()
+            .prop_map(|n| n as usize)
+            .prop_map(|bytes| SdeResponse { bytes })
+            .boxed()
+    }
+}
+
+impl Arbitrary for GrabbedElement {
+    /// Implements the `Arbitrary` trait for `GrabbedElement` to generate random instances.
+    ///
+    /// # Details
+    /// - Generates:
+    ///   - A random `source_id` (`u16`).
+    ///   - A random `content` (`String`).
+    ///   - A random `pos` (`usize`).
+    ///   - A random `nature` (`u8`).
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (any::<u16>(), any::<String>(), any::<u32>(), any::<u8>())
+            .prop_map(|(source_id, content, pos, nature)| GrabbedElement {
+                source_id,
+                content,
+                pos: pos as usize,
+                nature,
+            })
+            .boxed()
+    }
+}
+
+impl Arbitrary for GrabbedElementList {
+    /// Implements the `Arbitrary` trait for `GrabbedElementList` to generate random instances.
+    ///
+    /// # Details
+    /// - Generates a vector of up to 10 random `GrabbedElement` instances.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop::collection::vec(any::<GrabbedElement>(), 0..10)
+            .prop_map(GrabbedElementList)
+            .boxed()
+    }
+}
+
+impl Arbitrary for AroundIndexes {
+    /// Implements the `Arbitrary` trait for `AroundIndexes` to generate random instances.
+    ///
+    /// # Details
+    /// - Generates a tuple of two optional `u32` values, mapped to `u64`.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (any::<Option<u32>>(), any::<Option<u32>>())
+            .prop_map(|(start, end)| {
+                AroundIndexes((start.map(|n| n as u64), end.map(|n| n as u64)))
+            })
+            .boxed()
+    }
+}
+
+impl Arbitrary for FilterMatch {
+    /// Implements the `Arbitrary` trait for `FilterMatch` to generate random instances.
+    ///
+    /// # Details
+    /// - Generates:
+    ///   - A random `index` (`u64`).
+    ///   - A random vector of `u8` filter IDs (up to 10 filters).
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (any::<u32>(), prop::collection::vec(any::<u8>(), 0..10))
+            .prop_map(|(index, filters)| FilterMatch {
+                index: index as u64,
+                filters,
+            })
+            .boxed()
+    }
+}
+
+impl Arbitrary for FilterMatchList {
+    /// Implements the `Arbitrary` trait for `FilterMatchList` to generate random instances.
+    ///
+    /// # Details
+    /// - Generates a vector of up to 10 random `FilterMatch` instances.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop::collection::vec(any::<FilterMatch>(), 0..10)
+            .prop_map(FilterMatchList)
+            .boxed()
+    }
+}
+
+test_msg!(SourceDefinition, TESTS_USECASE_COUNT);
+test_msg!(Sources, TESTS_USECASE_COUNT);
+test_msg!(SdeRequest, TESTS_USECASE_COUNT);
+test_msg!(SdeResponse, TESTS_USECASE_COUNT);
+test_msg!(GrabbedElement, TESTS_USECASE_COUNT);
+test_msg!(GrabbedElementList, TESTS_USECASE_COUNT);
+test_msg!(AroundIndexes, TESTS_USECASE_COUNT);
+test_msg!(FilterMatch, TESTS_USECASE_COUNT);
+test_msg!(FilterMatchList, TESTS_USECASE_COUNT);
diff --git a/application/apps/indexer/stypes/src/nodejs.rs b/application/apps/indexer/stypes/src/nodejs.rs
new file mode 100644
index 0000000000..40161b76f9
--- /dev/null
+++ b/application/apps/indexer/stypes/src/nodejs.rs
@@ -0,0 +1,25 @@
+/// This macro is used with the `feature=nodejs`. It allows adding an implementation of the `TryIntoJs` trait,
+/// enabling seamless data conversion for use in a Node.js context when using the `node_bindgen` crate.
+///
+/// It's important to note that data can still be passed without implementing this trait; however, its use
+/// significantly simplifies the code and improves readability by allowing explicit type annotations in function outputs.
+///
+/// Example code from a trait using `node_bindgen`:
+/// ```ignore
+/// // OutputType implements TryIntoJs
+/// pub fn public_api_call() -> Result<OutputType, ErrType> { ... }
+///
+/// // OutputType doesn't implement TryIntoJs
+/// pub fn public_api_call() -> Result<SafeArrayBuffer, SafeArrayBuffer> { ... }
+/// ```
+#[macro_export]
+macro_rules! try_into_js {
+    ($($t:tt)+) => {
+        impl TryIntoJs for $($t)+ {
+            fn try_to_js(self, js_env: &JsEnv) -> Result<napi_value, NjError> {
+                SafeArrayBuffer::new(self.encode().map_err(NjError::Other)?)
+                    .try_to_js(js_env)
+            }
+        }
+    };
+}
diff --git a/application/apps/indexer/stypes/src/observe/extending.rs b/application/apps/indexer/stypes/src/observe/extending.rs
new file mode 100644
index 0000000000..5e9415b68e
--- /dev/null
+++ b/application/apps/indexer/stypes/src/observe/extending.rs
@@ -0,0 +1,111 @@
+use std::net::IpAddr;
+
+use crate::*;
+use thiserror::Error;
+
+impl ObserveOptions {
+    /// Creates a new `ObserveOptions` instance for a file.
+    ///
+    /// # Parameters
+    /// - `filename`: The path to the file to be observed.
+    /// - `file_origin`: The format of the file (e.g., `FileFormat`).
+    /// - `parser`: The parser to be used for processing the file (e.g., `ParserType`).
+    ///
+    /// # Returns
+    /// - A new `ObserveOptions` instance configured for the specified file.
+    pub fn file(filename: PathBuf, file_origin: FileFormat, parser: ParserType) -> Self {
+        ObserveOptions {
+            origin: ObserveOrigin::File(Uuid::new_v4().to_string(), file_origin, filename),
+            parser,
+        }
+    }
+}
+
+impl Default for DltParserSettings {
+    /// Provides a default implementation for `DltParserSettings`.
+    ///
+    /// # Defaults
+    /// - `filter_config`: `None`
+    /// - `fibex_file_paths`: `None`
+    /// - `with_storage_header`: `true`
+    /// - `tz`: `None`
+    /// - `fibex_metadata`: `None`
+    fn default() -> Self {
+        Self {
+            filter_config: None,
+            fibex_file_paths: None,
+            with_storage_header: true,
+            tz: None,
+            fibex_metadata: None,
+        }
+    }
+}
+
+impl DltParserSettings {
+    /// Creates a new `DltParserSettings` instance with storage headers included.
+    ///
+    /// # Parameters
+    /// - `filter_config`: Optional filter configuration for parsing.
+    /// - `fibex_file_paths`: Optional list of paths to Fibex files.
+    ///
+    /// # Returns
+    /// - A new `DltParserSettings` instance.
+    pub fn new_including_storage_headers(
+        filter_config: Option<dlt_core::filtering::DltFilterConfig>,
+        fibex_file_paths: Option<Vec<String>>,
+    ) -> Self {
+        Self {
+            filter_config,
+            fibex_file_paths,
+            with_storage_header: true,
+            tz: None,
+            fibex_metadata: None,
+        }
+    }
+
+    /// Loads Fibex metadata for the parser settings.
+    ///
+    /// # Details
+    /// If `fibex_file_paths` is specified and `fibex_metadata` is not already loaded,
+    /// this function gathers Fibex data using the specified paths.
+    pub fn load_fibex_metadata(&mut self) {
+        if self.fibex_metadata.is_some() {
+            return;
+        }
+        self.fibex_metadata = if let Some(paths) = self.fibex_file_paths.as_ref() {
+            dlt_core::fibex::gather_fibex_data(dlt_core::fibex::FibexConfig {
+                fibex_file_paths: paths.clone(),
+            })
+        } else {
+            None
+        };
+    }
+}
+
+#[derive(Error, Debug)]
+/// Represents errors related to networking operations.
+pub enum NetError {
+    /// Indicates a problem with the configuration.
+    #[error("Problem with configuration found: {0}")]
+    Configuration(String),
+
+    /// Represents an I/O error.
+    #[error("IO error: {0:?}")]
+    Io(#[from] std::io::Error),
+}
+
+impl MulticastInfo {
+    /// Parses the multicast address into an `IpAddr`.
+    ///
+    /// # Returns
+    /// - `Ok(IpAddr)` if the address is successfully parsed.
+    /// - `Err(NetError::Configuration)` if the address cannot be parsed.
+    pub fn multicast_addr(&self) -> Result<IpAddr, NetError> {
+        self.multiaddr.to_string().parse().map_err(|e| {
+            NetError::Configuration(format!(
+                "Could not parse multicast address \"{}\": {e}",
+                self.multiaddr
+            ))
+        })
+    }
+}
diff --git a/application/apps/indexer/stypes/src/observe/mod.rs b/application/apps/indexer/stypes/src/observe/mod.rs
new file mode 100644
index 0000000000..01f989cb08
--- /dev/null
+++ b/application/apps/indexer/stypes/src/observe/mod.rs
@@ -0,0 +1,234 @@
+#[cfg(feature = "rustcore")]
+mod extending;
+#[cfg(feature = "nodejs")]
+mod nodejs;
+#[cfg(test)]
+mod proptest;
+
+#[cfg(feature = "rustcore")]
+pub use extending::*;
+
+use crate::*;
+use dlt_core::filtering::DltFilterConfig;
+
+/// Multicast configuration information.
+/// - `multiaddr`: A valid multicast address.
+/// - `interface`: The address of the local interface used to join the multicast group.
+///   If set to `INADDR_ANY`, the system selects an appropriate interface.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "observe.ts")
+)]
+pub struct MulticastInfo {
+    pub multiaddr: String,
+    pub interface: Option<String>,
+}
+
+/// Configuration for UDP connections.
+#[derive(Clone, Serialize, Deserialize, Debug)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "observe.ts")
+)]
+pub struct UdpConnectionInfo {
+    /// A list of multicast addresses to listen on.
+    pub multicast_addr: Vec<MulticastInfo>,
+}
+
+/// Specifies the parser to be used for processing session data.
+#[allow(clippy::large_enum_variant)]
+#[derive(Debug, Serialize, Deserialize, Clone)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "observe.ts")
+)]
+pub enum ParserType {
+    /// DLT parser for files (including PCAP files) or streams (TCP/UDP).
+    Dlt(DltParserSettings),
+    /// SomeIp parser for streams (TCP/UDP) or PCAP/PCAPNG files.
+    SomeIp(SomeIpParserSettings),
+    /// A pseudo-parser for reading plain text data without processing.
+    Text(()),
+}
+
+/// Settings for the DLT parser.
+#[derive(Debug, Serialize, Deserialize, Clone)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "observe.ts")
+)]
+pub struct DltParserSettings {
+    /// Configuration for filtering DLT messages.
+    #[cfg_attr(all(test, feature = "test_and_gen"), ts(type = "DltFilterConfig"))]
+    pub filter_config: Option<DltFilterConfig>,
+    /// Paths to FIBEX files for additional interpretation of `payload` content.
+    pub fibex_file_paths: Option<Vec<String>>,
+    /// Indicates whether the source contains a `StorageHeader`. Set to `true` if applicable.
+    pub with_storage_header: bool,
+    /// Timezone for timestamp adjustment. If specified, timestamps are converted to this timezone.
+    pub tz: Option<String>,
+    /// Internal field that stores FIBEX schema metadata. Not exposed to the client.
+    #[serde(skip)]
+    pub fibex_metadata: Option<dlt_core::fibex::FibexMetadata>,
+}
+
+/// Settings for the SomeIp parser.
+#[derive(Debug, Serialize, Deserialize, Clone)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "observe.ts")
+)]
+pub struct SomeIpParserSettings {
+    /// Paths to FIBEX files for additional interpretation of `payload` content.
+    pub fibex_file_paths: Option<Vec<String>>,
+}
+
+/// Describes the transport source for a session.
+#[derive(Debug, Serialize, Deserialize, Clone)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "observe.ts")
+)]
+pub enum Transport {
+    /// Terminal command execution.
+    Process(ProcessTransportConfig),
+    /// TCP connection.
+    TCP(TCPTransportConfig),
+    /// UDP connection.
+    UDP(UDPTransportConfig),
+    /// Serial port connection.
+    Serial(SerialTransportConfig),
+}
+
+/// Configuration for executing terminal commands.
+#[derive(Debug, Serialize, Deserialize, Clone)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "observe.ts")
+)]
+pub struct ProcessTransportConfig {
+    /// The working directory for the command.
+    pub cwd: PathBuf,
+    /// The command to execute.
+    pub command: String,
+    /// Environment variables. If empty, the default environment variables are used.
+    #[cfg_attr(all(test, feature = "test_and_gen"), ts(type = "Map<string, string>"))]
+    pub envs: HashMap<String, String>,
+}
+
+/// Configuration for serial port connections.
+#[derive(Debug, Serialize, Deserialize, Clone)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "observe.ts")
+)]
+pub struct SerialTransportConfig {
+    /// The path to the serial port.
+    pub path: String,
+    /// The baud rate for the connection.
+    pub baud_rate: u32,
+    /// The number of data bits per frame.
+    pub data_bits: u8,
+    /// The flow control setting.
+    pub flow_control: u8,
+    /// The parity setting.
+    pub parity: u8,
+    /// The number of stop bits.
+    pub stop_bits: u8,
+    /// The delay in sending data, in milliseconds.
+    pub send_data_delay: u8,
+    /// Whether the connection is exclusive.
+    pub exclusive: bool,
+}
+
+/// Configuration for TCP connections.
+#[derive(Debug, Serialize, Deserialize, Clone)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "observe.ts")
+)]
+pub struct TCPTransportConfig {
+    /// The address to bind the TCP connection to.
+    pub bind_addr: String,
+}
+
+/// Configuration for UDP connections.
+#[derive(Clone, Debug, Serialize, Deserialize)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "observe.ts")
+)]
+pub struct UDPTransportConfig {
+    /// The address to bind the UDP connection to.
+    pub bind_addr: String,
+    /// A list of multicast configurations.
+    pub multicast: Vec<MulticastInfo>,
+}
+
+/// Supported file formats for observation.
+#[derive(Debug, Serialize, Deserialize, Clone)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "observe.ts")
+)]
+pub enum FileFormat {
+    PcapNG,
+    PcapLegacy,
+    Text,
+    Binary,
+}
+
+/// Describes the source of data for observation.
+#[derive(Debug, Serialize, Deserialize, Clone)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "observe.ts")
+)]
+pub enum ObserveOrigin {
+    /// The source is a single file.
+    File(String, FileFormat, PathBuf),
+    /// The source is multiple files concatenated into a session.
+    Concat(Vec<(String, FileFormat, PathBuf)>),
+    /// The source is a stream.
+    Stream(String, Transport),
+}
+
+/// Options for observing data within a session.
+#[derive(Debug, Serialize, Deserialize, Clone)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "observe.ts")
+)]
+pub struct ObserveOptions {
+    /// The description of the data source.
+    pub origin: ObserveOrigin,
+    /// The parser configuration to be applied.
+    pub parser: ParserType,
+}
diff --git a/application/apps/indexer/stypes/src/observe/nodejs.rs b/application/apps/indexer/stypes/src/observe/nodejs.rs
new file mode 100644
index 0000000000..b79f2bd9d4
--- /dev/null
+++ b/application/apps/indexer/stypes/src/observe/nodejs.rs
@@ -0,0 +1,14 @@
+use crate::*;
+
+try_into_js!(MulticastInfo);
+try_into_js!(ParserType);
+try_into_js!(DltParserSettings);
+try_into_js!(SomeIpParserSettings);
+try_into_js!(Transport);
+try_into_js!(ProcessTransportConfig);
+try_into_js!(SerialTransportConfig);
+try_into_js!(TCPTransportConfig);
+try_into_js!(UDPTransportConfig);
+try_into_js!(FileFormat);
+try_into_js!(ObserveOrigin);
+try_into_js!(ObserveOptions);
diff --git a/application/apps/indexer/stypes/src/observe/proptest.rs b/application/apps/indexer/stypes/src/observe/proptest.rs
new file mode 100644
index 0000000000..f67d0b2d30
--- /dev/null
+++ b/application/apps/indexer/stypes/src/observe/proptest.rs
@@ -0,0 +1,271 @@
+use crate::*;
+use dlt_core::filtering::DltFilterConfig;
+
+impl Arbitrary for MulticastInfo {
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (any::<String>(), any::<Option<String>>())
+            .prop_map(|(multiaddr, interface)| MulticastInfo {
+                multiaddr,
+                interface,
+            })
+            .boxed()
+    }
+}
+
+impl Arbitrary for UdpConnectionInfo {
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop::collection::vec(any::<MulticastInfo>(), 0..10)
+            .prop_map(|multicast_addr| UdpConnectionInfo { multicast_addr })
+            .boxed()
+    }
+}
+
+impl Arbitrary for ParserType {
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop_oneof![
+            any::<DltParserSettings>().prop_map(ParserType::Dlt),
+            any::<SomeIpParserSettings>().prop_map(ParserType::SomeIp),
+            Just(ParserType::Text(())),
+        ]
+        .boxed()
+    }
+}
+
+#[derive(Debug)]
+struct DltFilterConfigWrapper(pub DltFilterConfig);
+
+impl Arbitrary for DltFilterConfigWrapper {
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (
+            any::<Option<u8>>(),
+            any::<Option<Vec<String>>>(),
+            any::<Option<Vec<String>>>(),
+            any::<Option<Vec<String>>>(),
+            any::<i32>(),
+            any::<i32>(),
+        )
+            .prop_map(
+                |(min_log_level, app_ids, ecu_ids, context_ids, app_id_count, context_id_count)| {
+                    DltFilterConfigWrapper(DltFilterConfig {
+                        min_log_level,
+                        app_ids,
+                        ecu_ids,
+                        context_ids,
+                        app_id_count: app_id_count as i64,
+                        context_id_count: context_id_count as i64,
+                    })
+                },
+            )
+            .boxed()
+    }
+}
+
+impl Arbitrary for DltParserSettings {
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (
+            any::<Option<DltFilterConfigWrapper>>().prop_map(|wrp| wrp.map(|wrp| wrp.0)),
+            any::<Option<Vec<String>>>(),
+            any::<bool>(),
+            any::<Option<String>>(),
+            Just(None), // fibex_metadata is skipped
+        )
+            .prop_map(
+                |(filter_config, fibex_file_paths, with_storage_header, tz, fibex_metadata)| {
+                    DltParserSettings {
+                        filter_config,
+                        fibex_file_paths,
+                        with_storage_header,
+                        tz,
+                        fibex_metadata,
+                    }
+                },
+            )
+            .boxed()
+    }
+}
+
+impl Arbitrary for SomeIpParserSettings {
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        any::<Option<Vec<String>>>()
+            .prop_map(|fibex_file_paths| SomeIpParserSettings { fibex_file_paths })
+            .boxed()
+    }
+}
+
+impl Arbitrary for Transport {
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop_oneof![
+            any::<ProcessTransportConfig>().prop_map(Transport::Process),
+            any::<TCPTransportConfig>().prop_map(Transport::TCP),
+            any::<UDPTransportConfig>().prop_map(Transport::UDP),
+            any::<SerialTransportConfig>().prop_map(Transport::Serial),
+        ]
+        .boxed()
+    }
+}
+
+impl Arbitrary for ProcessTransportConfig {
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (
+            any::<PathBuf>(),
+            any::<String>(),
+            any::<HashMap<String, String>>(),
+        )
+            .prop_map(|(cwd, command, envs)| ProcessTransportConfig { cwd, command, envs })
+            .boxed()
+    }
+}
+
+impl Arbitrary for SerialTransportConfig {
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (
+            any::<String>(),
+            any::<u32>(),
+            any::<u8>(),
+            any::<u8>(),
+            any::<u8>(),
+            any::<u8>(),
+            any::<u8>(),
+            any::<bool>(),
+        )
+            .prop_map(
+                |(
+                    path,
+                    baud_rate,
+                    data_bits,
+                    flow_control,
+                    parity,
+                    stop_bits,
+                    send_data_delay,
+                    exclusive,
+                )| {
+                    SerialTransportConfig {
+                        path,
+                        baud_rate,
+                        data_bits,
+                        flow_control,
+                        parity,
+                        stop_bits,
+                        send_data_delay,
+                        exclusive,
+                    }
+                },
+            )
+            .boxed()
+    }
+}
+
+impl Arbitrary for TCPTransportConfig {
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        any::<String>()
+            .prop_map(|bind_addr| TCPTransportConfig { bind_addr })
+            .boxed()
+    }
+}
+
+impl Arbitrary for UDPTransportConfig {
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (
+            any::<String>(),
+            prop::collection::vec(any::<MulticastInfo>(), 0..10),
+        )
+            .prop_map(|(bind_addr, multicast)| UDPTransportConfig {
+                bind_addr,
+                multicast,
+            })
+            .boxed()
+    }
+}
+
+impl Arbitrary for FileFormat {
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop_oneof![
+            Just(FileFormat::PcapNG),
+            Just(FileFormat::PcapLegacy),
+            Just(FileFormat::Text),
+            Just(FileFormat::Binary),
+        ]
+        .boxed()
+    }
+}
+
+impl Arbitrary for ObserveOrigin {
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop_oneof![
+            (any::<String>(), any::<FileFormat>(), any::<PathBuf>(),)
+                .prop_map(|(filename, format, path)| ObserveOrigin::File(filename, format, path)),
+            prop::collection::vec(
+                (any::<String>(), any::<FileFormat>(), any::<PathBuf>(),),
+                0..10,
+            )
+            .prop_map(ObserveOrigin::Concat),
+            (any::<String>(), any::<Transport>(),)
+                .prop_map(|(stream, transport)| ObserveOrigin::Stream(stream, transport)),
+        ]
+        .boxed()
+    }
+}
+
+impl Arbitrary for ObserveOptions {
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (any::<ObserveOrigin>(), any::<ParserType>())
+            .prop_map(|(origin, parser)| ObserveOptions { origin, parser })
+            .boxed()
+    }
+}
+
+test_msg!(ObserveOptions, TESTS_USECASE_COUNT);
+test_msg!(ObserveOrigin, TESTS_USECASE_COUNT);
+test_msg!(FileFormat, TESTS_USECASE_COUNT);
+test_msg!(UDPTransportConfig, TESTS_USECASE_COUNT);
+test_msg!(TCPTransportConfig, TESTS_USECASE_COUNT);
+test_msg!(SerialTransportConfig, TESTS_USECASE_COUNT);
+test_msg!(ProcessTransportConfig, TESTS_USECASE_COUNT);
+test_msg!(Transport, TESTS_USECASE_COUNT);
+test_msg!(SomeIpParserSettings, TESTS_USECASE_COUNT);
+test_msg!(DltParserSettings, TESTS_USECASE_COUNT);
+test_msg!(ParserType, TESTS_USECASE_COUNT);
+test_msg!(UdpConnectionInfo, TESTS_USECASE_COUNT);
+test_msg!(MulticastInfo, TESTS_USECASE_COUNT);
diff --git a/application/apps/indexer/stypes/src/operations/mod.rs b/application/apps/indexer/stypes/src/operations/mod.rs
new file mode 100644
index 0000000000..03c8e26b89
--- /dev/null
+++ b/application/apps/indexer/stypes/src/operations/mod.rs
@@ -0,0 +1,126 @@
+#[cfg(feature = "nodejs")]
+mod nodejs;
+#[cfg(test)]
+mod proptest;
+
+use crate::*;
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "operations.ts")
+)]
+pub struct NearestPosition {
+    #[cfg_attr(all(test, feature = "test_and_gen"), ts(type = "number"))]
+    pub index: u64, // Position in search results
+    #[cfg_attr(all(test, feature = "test_and_gen"), ts(type = "number"))]
+    pub position: u64, // Position in original stream/file
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "operations.ts")
+)]
+pub struct ResultNearestPosition(pub Option<NearestPosition>);
+
+///(row_number, min_value_in_range, max_value_in_range, value)
+/// value - can be last value in range or some kind of average
+#[derive(Debug, Clone, Serialize, Deserialize)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "operations.ts")
+)]
+pub struct Point {
+    #[cfg_attr(all(test, feature = "test_and_gen"), ts(type = "number"))]
+    pub row: u64,
+    pub min: f64,
+    pub max: f64,
+    pub y_value: f64,
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "operations.ts")
+)]
+#[cfg_attr(all(test, feature = "test_and_gen"), ts(type = "Map<number, Point[]>"))]
+pub struct ResultSearchValues(pub HashMap<u8, Vec<Point>>);
+
+/// Scaled chart data
+#[derive(Debug, Clone, Serialize, Deserialize)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "operations.ts")
+)]
+pub struct ResultScaledDistribution(pub Vec<Vec<(u8, u16)>>);
+
+/// Used to delivery results of extracting values. That's used in the scope
+/// of chart feature
+#[derive(Debug, Clone, Serialize, Deserialize)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "operations.ts")
+)]
+pub struct ExtractedMatchValue {
+    /// The index of log entry (row number)
+    #[cfg_attr(all(test, feature = "test_and_gen"), ts(type = "number"))]
+    pub index: u64,
+    /// List of matches:
+    /// `usize` - index of filter
+    /// `Vec<String>` - list of extracted values
+    pub values: Vec<(usize, Vec<String>)>,
+}
+
+/// The list of `ExtractedMatchValue`
+#[derive(Debug, Clone, Serialize, Deserialize)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "operations.ts")
+)]
+pub struct ResultExtractedMatchValues(pub Vec<ExtractedMatchValue>);
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "operations.ts")
+)]
+#[cfg_attr(all(test, feature = "test_and_gen"), ts(type = "number"))]
+pub struct ResultU64(pub u64);
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "operations.ts")
+)]
+pub struct ResultBool(pub bool);
+
+/// Used only for debug session lifecycle
+#[derive(Debug, Clone, Serialize, Deserialize)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "operations.ts")
+)]
+pub struct ResultSleep {
+    pub sleep_well: bool,
+}
diff --git a/application/apps/indexer/stypes/src/operations/nodejs.rs b/application/apps/indexer/stypes/src/operations/nodejs.rs
new file mode 100644
index 0000000000..eb6e60c052
--- /dev/null
+++ b/application/apps/indexer/stypes/src/operations/nodejs.rs
@@ -0,0 +1,12 @@
+use crate::*;
+
+try_into_js!(NearestPosition);
+try_into_js!(ResultNearestPosition);
+try_into_js!(Point);
+try_into_js!(ResultSearchValues);
+try_into_js!(ResultScaledDistribution);
+try_into_js!(ExtractedMatchValue);
+try_into_js!(ResultExtractedMatchValues);
+try_into_js!(ResultU64);
+try_into_js!(ResultBool);
+try_into_js!(ResultSleep);
diff --git a/application/apps/indexer/stypes/src/operations/proptest.rs b/application/apps/indexer/stypes/src/operations/proptest.rs
new file mode 100644
index 0000000000..a5c50a681c
--- /dev/null
+++ b/application/apps/indexer/stypes/src/operations/proptest.rs
@@ -0,0 +1,166 @@
+use crate::*;
+
+impl Arbitrary for NearestPosition {
+    /// Implements the `Arbitrary` trait for `ExtractedMatchValue` to generate random values for
+    /// property-based testing using the `proptest` framework.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (
+            any::<u32>().prop_map(|n| n as u64),
+            any::<u32>().prop_map(|n| n as u64),
+        )
+            .prop_map(|(index, position)| NearestPosition { index, position })
+            .boxed()
+    }
+}
+
+impl Arbitrary for ResultNearestPosition {
+    /// Implements the `Arbitrary` trait for `ExtractedMatchValue` to generate random values for
+    /// property-based testing using the `proptest` framework.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop::option::of(NearestPosition::arbitrary())
+            .prop_map(ResultNearestPosition)
+            .boxed()
+    }
+}
+
+impl Arbitrary for Point {
+    /// Implements the `Arbitrary` trait for `ExtractedMatchValue` to generate random values for
+    /// property-based testing using the `proptest` framework.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (
+            any::<u32>().prop_map(|n| n as u64),
+            any::<f32>().prop_map(|n| n as f64),
+            any::<f32>().prop_map(|n| n as f64),
+            any::<f32>().prop_map(|n| n as f64),
+        )
+            .prop_map(|(row, min, max, y_value)| Point {
+                row,
+                min,
+                max,
+                y_value,
+            })
+            .boxed()
+    }
+}
+
+impl Arbitrary for ResultSearchValues {
+    /// Implements the `Arbitrary` trait for `ExtractedMatchValue` to generate random values for
+    /// property-based testing using the `proptest` framework.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        any::<HashMap<u8, Vec<Point>>>()
+            .prop_map(ResultSearchValues)
+            .boxed()
+    }
+}
+
+impl Arbitrary for ResultScaledDistribution {
+    /// Implements the `Arbitrary` trait for `ExtractedMatchValue` to generate random values for
+    /// property-based testing using the `proptest` framework.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop::collection::vec(
+            prop::collection::vec((any::<u8>(), any::<u16>()), 0..10),
+            0..10,
+        )
+        .prop_map(ResultScaledDistribution)
+        .boxed()
+    }
+}
+
+impl Arbitrary for ExtractedMatchValue {
+    /// Implements the `Arbitrary` trait for `ExtractedMatchValue` to generate random values for
+    /// property-based testing using the `proptest` framework.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (
+            any::<u32>().prop_map(|n| n as u64),
+            prop::collection::vec(
+                (
+                    any::<u32>().prop_map(|n| n as usize),
+                    prop::collection::vec(any::<String>(), 0..10),
+                ),
+                0..10,
+            ),
+        )
+            .prop_map(|(index, values)| ExtractedMatchValue { index, values })
+            .boxed()
+    }
+}
+
+impl Arbitrary for ResultExtractedMatchValues {
+    /// Implements the `Arbitrary` trait for `ResultExtractedMatchValues` to generate random values for
+    /// property-based testing using the `proptest` framework.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop::collection::vec(ExtractedMatchValue::arbitrary(), 0..10)
+            .prop_map(ResultExtractedMatchValues)
+            .boxed()
+    }
+}
+
+impl Arbitrary for ResultU64 {
+    /// Implements the `Arbitrary` trait for `ResultU64` to generate random values for
+    /// property-based testing using the `proptest` framework.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        any::<u32>()
+            .prop_map(|n| n as u64)
+            .prop_map(ResultU64)
+            .boxed()
+    }
+}
+
+impl Arbitrary for ResultBool {
+    /// Implements the `Arbitrary` trait for `ResultBool` to generate random values for
+    /// property-based testing using the `proptest` framework.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        any::<bool>().prop_map(ResultBool).boxed()
+    }
+}
+
+impl Arbitrary for ResultSleep {
+    /// Implements the `Arbitrary` trait for `ResultBool` to generate random values for
+    /// property-based testing using the `proptest` framework.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        any::<bool>()
+            .prop_map(|sleep_well| ResultSleep { sleep_well })
+            .boxed()
+    }
+}
+
+test_msg!(NearestPosition, TESTS_USECASE_COUNT);
+test_msg!(ResultNearestPosition, TESTS_USECASE_COUNT);
+test_msg!(Point, TESTS_USECASE_COUNT);
+test_msg!(ResultSearchValues, TESTS_USECASE_COUNT);
+test_msg!(ResultScaledDistribution, TESTS_USECASE_COUNT);
+test_msg!(ExtractedMatchValue, TESTS_USECASE_COUNT);
+test_msg!(ResultExtractedMatchValues, TESTS_USECASE_COUNT);
+test_msg!(ResultU64, TESTS_USECASE_COUNT);
+test_msg!(ResultBool, TESTS_USECASE_COUNT);
+test_msg!(ResultSleep, TESTS_USECASE_COUNT);
diff --git a/application/apps/indexer/stypes/src/progress/extending.rs b/application/apps/indexer/stypes/src/progress/extending.rs
new file mode 100644
index 0000000000..c40b14cce4
--- /dev/null
+++ b/application/apps/indexer/stypes/src/progress/extending.rs
@@ -0,0 +1,15 @@
+use crate::*;
+
+impl Ticks {
+    /// Checks if the operation associated with the `Ticks` instance is complete.
+    ///
+    /// # Returns
+    /// - `true` if the `count` equals `total` and `total` is not `None`.
+    /// - `false` otherwise.
+    ///
+    /// # Details
+    /// - If `total` is `None`, the operation is considered incomplete.
+    pub fn done(&self) -> bool {
+        self.total.is_some_and(|total| self.count == total)
+    }
+}
diff --git a/application/apps/indexer/stypes/src/progress/mod.rs b/application/apps/indexer/stypes/src/progress/mod.rs
new file mode 100644
index 0000000000..6cbda01d3c
--- /dev/null
+++ b/application/apps/indexer/stypes/src/progress/mod.rs
@@ -0,0 +1,66 @@
+#[cfg(feature = "rustcore")]
+mod extending;
+#[cfg(feature = "nodejs")]
+mod nodejs;
+#[cfg(test)]
+mod proptest;
+
+use crate::*;
+
+/// Represents a notification about an event (including potential errors)
+/// related to processing a specific log entry, if such data is available.
+#[derive(Debug, Serialize, Deserialize, Clone)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "progress.ts")
+)]
+pub struct Notification {
+    /// The severity level of the event.
+    pub severity: Severity,
+    /// The content or message describing the event.
+    pub content: String,
+    /// The log entry number that triggered the event, if applicable.
+    pub line: Option<usize>,
+}
+
+/// Describes the progress of an operation.
+#[derive(Debug, Serialize, Deserialize, Clone)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "progress.ts")
+)]
+pub enum Progress {
+    /// Represents the current progress status.
+    Ticks(Ticks),
+    /// A notification related to the progress of the operation.
+    Notification(Notification),
+    /// Indicates that the operation has been stopped.
+    Stopped,
+}
+
+/// Provides detailed information about the progress of an operation.
+#[derive(Debug, Serialize, Deserialize, Clone, Default)]
+#[extend::encode_decode]
+#[cfg_attr(
+    all(test, feature = "test_and_gen"),
+    derive(TS),
+    ts(export, export_to = "progress.ts")
+)]
+pub struct Ticks {
+    /// The current progress count, typically representing `n` out of `100%`.
+    #[cfg_attr(all(test, feature = "test_and_gen"), ts(type = "number"))]
+    pub count: u64,
+    /// The name of the current progress stage, for user display purposes.
+    pub state: Option<String>,
+    /// The total progress counter. Usually `100`, but for file operations,
+    /// it might represent the file size, where `count` indicates the number of bytes read.
+    #[cfg_attr(
+        all(test, feature = "test_and_gen"),
+        ts(type = "number | null | undefined")
+    )]
+    pub total: Option<u64>,
+}
diff --git a/application/apps/indexer/stypes/src/progress/nodejs.rs b/application/apps/indexer/stypes/src/progress/nodejs.rs
new file mode 100644
index 0000000000..29a053090a
--- /dev/null
+++ b/application/apps/indexer/stypes/src/progress/nodejs.rs
@@ -0,0 +1,5 @@
+use crate::*;
+
+try_into_js!(Notification);
+try_into_js!(Progress);
+try_into_js!(Ticks);
diff --git a/application/apps/indexer/stypes/src/progress/proptest.rs b/application/apps/indexer/stypes/src/progress/proptest.rs
new file mode 100644
index 0000000000..65600daa67
--- /dev/null
+++ b/application/apps/indexer/stypes/src/progress/proptest.rs
@@ -0,0 +1,71 @@
+use crate::*;
+
+impl Arbitrary for Notification {
+    /// Implements the `Arbitrary` trait for `Notification` to generate random instances
+    /// for property-based testing using the `proptest` framework.
+    ///
+    /// # Details
+    /// - Generates:
+    ///   - `severity`: A random `Severity` value.
+    ///   - `content`: A random `String`.
+    ///   - `line`: An optional random `usize` value.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (any::<Severity>(), any::<String>(), any::<Option<u32>>())
+            .prop_map(|(severity, content, line)| Notification {
+                severity,
+                content,
+                line: line.map(|n| n as usize),
+            })
+            .boxed()
+    }
+}
+
+impl Arbitrary for Ticks {
+    /// Implements the `Arbitrary` trait for `Ticks` to generate random instances
+    /// for property-based testing using the `proptest` framework.
+    ///
+    /// # Details
+    /// - Generates:
+    ///   - `count`: A random `u64` value.
+    ///   - `state`: An optional random `String`.
+    ///   - `total`: An optional random `u64` value.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        (any::<u32>(), any::<Option<String>>(), any::<Option<u32>>())
+            .prop_map(|(count, state, total)| Ticks {
+                count: count as u64,
+                state,
+                total: total.map(|n| n as u64),
+            })
+            .boxed()
+    }
+}
+
+impl Arbitrary for Progress {
+    /// Implements the `Arbitrary` trait for `Progress` to generate random instances
+    /// for property-based testing using the `proptest` framework.
+    ///
+    /// # Details
+    /// - Randomly generates one of the following variants:
+    ///   - `Ticks` with a random `Ticks` value.
+    ///   - `Notification` with a random `Notification` value.
+    ///   - `Stopped` as a constant value.
+    type Parameters = ();
+    type Strategy = BoxedStrategy<Self>;
+
+    fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {
+        prop_oneof![
+            any::<Ticks>().prop_map(Progress::Ticks),
+            any::<Notification>().prop_map(Progress::Notification),
+            Just(Progress::Stopped),
+        ]
+        .boxed()
+    }
+}
+
+test_msg!(Progress, TESTS_USECASE_COUNT);
diff --git a/application/apps/indexer/stypes/src/tests.rs b/application/apps/indexer/stypes/src/tests.rs
new file mode 100644
index 0000000000..ed1a9f40dd
--- /dev/null
+++ b/application/apps/indexer/stypes/src/tests.rs
@@ -0,0 +1,227 @@
+use std::path::PathBuf;
+/// The number of test cases to generate for use in test scenarios.
+pub const TESTS_USECASE_COUNT: usize = 100;
+
+/// The name of the environment variable that specifies the path for storing test data.
+/// If this variable is not set, the default path will be used.
+pub const OUTPUT_PATH_ENVVAR: &str = "CHIPMUNK_PROTOCOL_TEST_OUTPUT";
+
+/// This function returns the path for writing test data (for testing in a different context).
+/// The function checks the value of the `CHIPMUNK_PROTOCOL_TEST_OUTPUT` environment variable.
+/// If the variable is defined, its value will be used as the path for writing test data.
+pub fn get_output_path() -> Result<PathBuf, String> {
+    std::env::var(OUTPUT_PATH_ENVVAR)
+        .map_err(|err| err.to_string())
+        .and_then(|s| {
+            if s.is_empty() {
+                Err(String::from("No valid path"))
+            } else {
+                Ok(s)
+            }
+        })
+        .map(PathBuf::from)
+}
+
+/// The `test_msg` macro creates a `proptest` for the specified data type. The macro also supports
+/// generic types. For example:
+/// ```ignore
+/// test_msg!(Progress, 10);
+/// test_msg!(CommandOutcome<()>, 10);
+/// test_msg!(CommandOutcome<bool>, 10);
+/// test_msg!(CommandOutcome<Option<String>>, 10);
+/// ```
+/// The second numeric argument specifies the number of variants to generate for each type.
+/// During testing, a separate directory is created for each type. For each variant of the type,
+/// a file with a sequential name (e.g., `1.bin`, `2.bin`, ...) will be created.
+///
+/// **WARNING**: When running tests, the folder specified in the `CHIPMUNK_PROTOCOL_TEST_OUTPUT`
+/// environment variable will be completely deleted. Be extremely cautious when setting the value
+/// of the `CHIPMUNK_PROTOCOL_TEST_OUTPUT` environment variable.
+#[macro_export]
+macro_rules! test_msg {
+    ($type:ident, $exp_count:expr) => {
+        paste::item! {
+
+            #[cfg(feature = "test_and_gen")]
+            proptest! {
+                #![proptest_config(ProptestConfig {
+                    max_shrink_iters: 50,
+                    ..ProptestConfig::with_cases(500)
+                })]
+
+                #[allow(non_snake_case)]
+                #[test]
+                #[ignore]
+                fn [< write_test_data_for_ $type >](cases in proptest::collection::vec($type::arbitrary(), $exp_count)) {
+                    let Ok(output_path) = get_output_path() else {
+                        return Ok(());
+                    };
+                    use std::fs::{File, create_dir_all};
+                    use std::io::{Write};
+                    use remove_dir_all::remove_dir_all;
+
+                    let dest = output_path.join(stringify!($type));
+                    if dest.exists() {
+                        remove_dir_all(&dest).expect("Folder for tests has been cleaned");
+                    }
+                    if !dest.exists()  {
+                        create_dir_all(&dest).expect("Folder for tests has been created");
+                    }
+                    for (n, case) in cases.into_iter().enumerate() {
+                        let bytes = case.encode();
+                        assert!(bytes.is_ok());
+                        let bytes = bytes.unwrap();
+                        let mut file = File::create(dest.join(format!("{n}.raw")))?;
+                        assert!(file.write_all(&bytes).is_ok());
+                        assert!(file.flush().is_ok());
+                        let msg = $type::decode(&bytes);
+                        if let Err(err) = &msg {
+                            eprintln!("Decoding error: {err:?}");
+                        }
+                        assert!(msg.is_ok());
+                    }
+
+                }
+
+            }
+        }
+    };
+
+    ($type:ident<()>, $exp_count:expr) => {
+        paste::item! {
+
+            #[cfg(feature = "test_and_gen")]
+            proptest! {
+                #![proptest_config(ProptestConfig {
+                    max_shrink_iters: 50,
+                    ..ProptestConfig::with_cases(500)
+                })]
+
+                #[allow(non_snake_case)]
+                #[test]
+                #[ignore]
+                fn [< write_test_data_for_ $type Void >](cases in proptest::collection::vec($type::<()>::arbitrary(), $exp_count)) {
+                    let Ok(output_path) = get_output_path() else {
+                        return Ok(());
+                    };
+                    use std::fs::{File, create_dir_all};
+                    use std::io::{Write};
+                    use remove_dir_all::remove_dir_all;
+
+                    let dest = output_path.join(format!("{}_Void",stringify!($type)));
+                    if dest.exists() {
+                        remove_dir_all(&dest).expect("Folder for tests has been cleaned");
+                    }
+                    if !dest.exists()  {
+                        create_dir_all(&dest).expect("Folder for tests has been created");
+                    }
+                    for (n, case) in cases.into_iter().enumerate() {
+                        let bytes = case.encode();
+                        assert!(bytes.is_ok());
+                        let bytes = bytes.unwrap();
+                        let mut file = File::create(dest.join(format!("{n}.raw")))?;
+                        assert!(file.write_all(&bytes).is_ok());
+                        assert!(file.flush().is_ok());
+                        let msg = $type::<()>::decode(&bytes);
+                        if let Err(err) = &msg {
+                            eprintln!("Decoding error: {err:?}");
+                        }
+                        assert!(msg.is_ok());
+                    }
+
+                }
+
+            }
+        }
+    };
+
+    ($type:ident<$generic:ident>, $exp_count:expr) => {
+        paste::item! {
+
+            #[cfg(feature = "test_and_gen")]
+            proptest! {
+                #![proptest_config(ProptestConfig {
+                    max_shrink_iters: 50,
+                    ..ProptestConfig::with_cases(500)
+                })]
+
+                #[allow(non_snake_case)]
+                #[test]
+                #[ignore]
+                fn [< write_test_data_for_ $type $generic >](cases in proptest::collection::vec($type::<$generic>::arbitrary(), $exp_count)) {
+                    let Ok(output_path) = get_output_path() else {
+                        return Ok(());
+                    };
+                    use std::fs::{File, create_dir_all};
+                    use std::io::{Write};
+                    use remove_dir_all::remove_dir_all;
+
+                    let dest = output_path.join(format!("{}_{}",stringify!($type), stringify!($generic)));
+                    if dest.exists() {
+                        remove_dir_all(&dest).expect("Folder for tests has been cleaned");
+                    }
+                    if !dest.exists()  {
+                        create_dir_all(&dest).expect("Folder for tests has been created");
+                    }
+                    for (n, case) in cases.into_iter().enumerate() {
+                        let bytes = case.encode();
+                        assert!(bytes.is_ok());
+                        let bytes = bytes.unwrap();
+                        let mut file = File::create(dest.join(format!("{n}.raw")))?;
+                        assert!(file.write_all(&bytes).is_ok());
+                        assert!(file.flush().is_ok());
+                        let msg = $type::<$generic>::decode(&bytes);
+                        if let Err(err) = &msg {
+                            eprintln!("Decoding error: {err:?}");
+                        }
+                    }
+                }
+            }
+        }
+    };
+
+    ($type:ident<$generic:ident<$nested:ident>>, $exp_count:expr) => {
+        paste::item! {
+
+            #[cfg(feature = "test_and_gen")]
+            proptest! {
+                #![proptest_config(ProptestConfig {
+                    max_shrink_iters: 50,
+                    ..ProptestConfig::with_cases(500)
+                })]
+
+                #[allow(non_snake_case)]
+                #[test]
+                #[ignore]
+                fn [< write_test_data_for_ $type $generic $nested>](cases in proptest::collection::vec($type::<$generic<$nested>>::arbitrary(), $exp_count)) {
+                    let Ok(output_path) = get_output_path() else {
+                        return Ok(());
+                    };
+                    use std::fs::{File, create_dir_all};
+                    use std::io::{Write};
+                    use remove_dir_all::remove_dir_all;
+
+                    let dest = output_path.join(format!("{}_{}_{}",stringify!($type), stringify!($generic), stringify!($nested)));
+                    if dest.exists() {
+                        remove_dir_all(&dest).expect("Folder for tests has been cleaned");
+                    }
+                    if !dest.exists()  {
+                        create_dir_all(&dest).expect("Folder for tests has been created");
+                    }
+                    for (n, case) in cases.into_iter().enumerate() {
+                        let bytes = case.encode();
+                        assert!(bytes.is_ok());
+                        let bytes = bytes.unwrap();
+                        let mut file = File::create(dest.join(format!("{n}.raw")))?;
+                        assert!(file.write_all(&bytes).is_ok());
+                        assert!(file.flush().is_ok());
+                        let msg = $type::<$generic<$nested>>::decode(&bytes);
+                        if let Err(err) = &msg {
+                            eprintln!("Decoding error: {err:?}");
+                        }
+                    }
+                }
+            }
+        }
+    };
+}
diff --git a/application/apps/indexer/tools/extend/Cargo.lock b/application/apps/indexer/tools/extend/Cargo.lock
new file mode 100644
index 0000000000..6b9fcdcce5
--- /dev/null
+++ b/application/apps/indexer/tools/extend/Cargo.lock
@@ -0,0 +1,47 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "extend"
+version = "0.1.0"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.78"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.35"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "syn"
+version = "2.0.52"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b699d15b36d1f02c3e7c69f8ffef53de37aefae075d8488d4ba1a7788d574a07"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
diff --git a/application/apps/indexer/tools/extend/Cargo.toml b/application/apps/indexer/tools/extend/Cargo.toml
new file mode 100644
index 0000000000..a1c54c654b
--- /dev/null
+++ b/application/apps/indexer/tools/extend/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "extend"
+version = "0.1.0"
+edition = "2018"
+
+[lib]
+proc-macro = true
+
+[dependencies]
+proc-macro2 = "1.0"
+quote = "1.0"
+syn = { version="2.0", features=["full","fold"] }
diff --git a/application/apps/indexer/tools/extend/src/lib.rs b/application/apps/indexer/tools/extend/src/lib.rs
new file mode 100644
index 0000000000..4e87171ad7
--- /dev/null
+++ b/application/apps/indexer/tools/extend/src/lib.rs
@@ -0,0 +1,75 @@
+/// The `extend` crate is designed to simplify the generation of `encode` and `decode` methods
+/// for every public type in the `stypes` crate.
+///
+/// For example, the following code:
+///
+/// ```ignore
+/// #[derive(Debug, Serialize, Deserialize, Clone)]
+/// #[extend::encode_decode]
+/// pub struct Notification {
+///     pub severity: Severity,
+///     pub content: String,
+///     pub line: Option<usize>,
+/// }
+/// ```
+///
+/// Is transformed into:
+///
+/// ```ignore
+/// #[derive(Debug, Serialize, Deserialize, Clone)]
+/// pub struct Notification {
+///     pub severity: Severity,
+///     pub content: String,
+///     pub line: Option<usize>,
+/// }
+///
+/// impl Notification {
+///     pub fn encode(&self) -> Result<Vec<u8>, String> {
+///         bincode::serialize(self).map_err(|e| e.to_string())
+///     }
+///     
+///     pub fn decode(buf: &[u8]) -> Result<Self, String> {
+///         bincode::deserialize(buf).map_err(|e| e.to_string())
+///     }
+/// }
+/// ```
+use proc_macro::TokenStream;
+use quote::quote;
+use syn::{parse_macro_input, Item};
+
+#[proc_macro_attribute]
+pub fn encode_decode(_: TokenStream, input: TokenStream) -> TokenStream {
+    let item = parse_macro_input!(input as syn::Item);
+
+    let item_clone = item.clone();
+
+    let (entity_name, generics) = match &item {
+        Item::Struct(s) => (&s.ident, &s.generics),
+        Item::Enum(e) => (&e.ident, &e.generics),
+        _ => {
+            return syn::Error::new_spanned(
+                &item,
+                "encode_decode can be applied only to structs and enums",
+            )
+            .to_compile_error()
+            .into();
+        }
+    };
+
+    let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
+
+    TokenStream::from(quote! {
+
+        #item_clone
+
+        impl #impl_generics #entity_name #ty_generics #where_clause {
+            pub fn encode(&self) -> Result<Vec<u8>, String> {
+                bincode::serialize(self).map_err(|e| e.to_string())
+            }
+
+            pub fn decode(buf: &[u8]) -> Result<Self, String> {
+                bincode::deserialize(buf).map_err(|e| e.to_string())
+            }
+        }
+    })
+}
diff --git a/application/apps/protocol/.gitignore b/application/apps/protocol/.gitignore
new file mode 100644
index 0000000000..aae35b0c0c
--- /dev/null
+++ b/application/apps/protocol/.gitignore
@@ -0,0 +1,3 @@
+./target
+!./pkg
+!*.js
\ No newline at end of file
diff --git a/application/apps/protocol/Cargo.lock b/application/apps/protocol/Cargo.lock
new file mode 100644
index 0000000000..2850dfd0bf
--- /dev/null
+++ b/application/apps/protocol/Cargo.lock
@@ -0,0 +1,578 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 4
+
+[[package]]
+name = "bincode"
+version = "1.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "bumpalo"
+version = "3.16.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
+
+[[package]]
+name = "byteorder"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
+
+[[package]]
+name = "bytes"
+version = "1.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da"
+
+[[package]]
+name = "cc"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fd9de9f2205d5ef3fd67e685b0df337994ddd4495e2a28d185500d0e1edfea47"
+dependencies = [
+ "shlex",
+]
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "console_error_panic_hook"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc"
+dependencies = [
+ "cfg-if",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "convert_case"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
+
+[[package]]
+name = "derive_more"
+version = "0.99.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce"
+dependencies = [
+ "convert_case",
+ "proc-macro2",
+ "quote",
+ "rustc_version",
+ "syn",
+]
+
+[[package]]
+name = "dlt-core"
+version = "0.18.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0b304e32f1164b8c2ef1dc746b32d321f25f88a32672f0f5bcba2df0f70a3b70"
+dependencies = [
+ "byteorder",
+ "bytes",
+ "derive_more",
+ "lazy_static",
+ "log",
+ "memchr",
+ "nom",
+ "quick-xml",
+ "rustc-hash",
+ "serde",
+ "serde_json",
+ "thiserror 1.0.69",
+]
+
+[[package]]
+name = "extend"
+version = "0.1.0"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "itoa"
+version = "1.0.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674"
+
+[[package]]
+name = "js-sys"
+version = "0.3.72"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6a88f1bda2bd75b0452a14784937d796722fdebfe50df998aeb3f0b7603019a9"
+dependencies = [
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "lazy_static"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
+
+[[package]]
+name = "log"
+version = "0.4.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
+
+[[package]]
+name = "memchr"
+version = "2.7.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
+
+[[package]]
+name = "minicov"
+version = "0.3.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f27fe9f1cc3c22e1687f9446c2083c4c5fc7f0bcf1c7a86bdbded14985895b4b"
+dependencies = [
+ "cc",
+ "walkdir",
+]
+
+[[package]]
+name = "minimal-lexical"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
+
+[[package]]
+name = "nom"
+version = "7.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
+dependencies = [
+ "memchr",
+ "minimal-lexical",
+]
+
+[[package]]
+name = "once_cell"
+version = "1.20.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
+
+[[package]]
+name = "paste"
+version = "1.0.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.92"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "protocol"
+version = "0.1.0"
+dependencies = [
+ "paste",
+ "serde",
+ "serde-wasm-bindgen",
+ "stypes",
+ "thiserror 2.0.3",
+ "wasm-bindgen",
+ "wasm-bindgen-test",
+]
+
+[[package]]
+name = "quick-xml"
+version = "0.29.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "81b9228215d82c7b61490fec1de287136b5de6f5700f6e58ea9ad61a7964ca51"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "rustc-hash"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
+
+[[package]]
+name = "rustc_version"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92"
+dependencies = [
+ "semver",
+]
+
+[[package]]
+name = "ryu"
+version = "1.0.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f"
+
+[[package]]
+name = "same-file"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "scoped-tls"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294"
+
+[[package]]
+name = "semver"
+version = "1.0.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b"
+
+[[package]]
+name = "serde"
+version = "1.0.215"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde-wasm-bindgen"
+version = "0.6.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8302e169f0eddcc139c70f139d19d6467353af16f9fce27e8c30158036a1e16b"
+dependencies = [
+ "js-sys",
+ "serde",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.215"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.133"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377"
+dependencies = [
+ "itoa",
+ "memchr",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "shlex"
+version = "1.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
+
+[[package]]
+name = "stypes"
+version = "0.1.0"
+dependencies = [
+ "bincode",
+ "dlt-core",
+ "extend",
+ "serde",
+ "thiserror 2.0.3",
+ "uuid",
+]
+
+[[package]]
+name = "syn"
+version = "2.0.89"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "44d46482f1c1c87acd84dea20c1bf5ebff4c757009ed6bf19cfd36fb10e92c4e"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "thiserror"
+version = "1.0.69"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52"
+dependencies = [
+ "thiserror-impl 1.0.69",
+]
+
+[[package]]
+name = "thiserror"
+version = "2.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c006c85c7651b3cf2ada4584faa36773bd07bac24acfb39f3c431b36d7e667aa"
+dependencies = [
+ "thiserror-impl 2.0.3",
+]
+
+[[package]]
+name = "thiserror-impl"
+version = "1.0.69"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "thiserror-impl"
+version = "2.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f077553d607adc1caf65430528a576c757a71ed73944b66ebb58ef2bbd243568"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83"
+
+[[package]]
+name = "uuid"
+version = "1.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "walkdir"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b"
+dependencies = [
+ "same-file",
+ "winapi-util",
+]
+
+[[package]]
+name = "wasm-bindgen"
+version = "0.2.95"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "128d1e363af62632b8eb57219c8fd7877144af57558fb2ef0368d0087bddeb2e"
+dependencies = [
+ "cfg-if",
+ "once_cell",
+ "wasm-bindgen-macro",
+]
+
+[[package]]
+name = "wasm-bindgen-backend"
+version = "0.2.95"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cb6dd4d3ca0ddffd1dd1c9c04f94b868c37ff5fac97c30b97cff2d74fce3a358"
+dependencies = [
+ "bumpalo",
+ "log",
+ "once_cell",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-futures"
+version = "0.4.45"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cc7ec4f8827a71586374db3e87abdb5a2bb3a15afed140221307c3ec06b1f63b"
+dependencies = [
+ "cfg-if",
+ "js-sys",
+ "wasm-bindgen",
+ "web-sys",
+]
+
+[[package]]
+name = "wasm-bindgen-macro"
+version = "0.2.95"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e79384be7f8f5a9dd5d7167216f022090cf1f9ec128e6e6a482a2cb5c5422c56"
+dependencies = [
+ "quote",
+ "wasm-bindgen-macro-support",
+]
+
+[[package]]
+name = "wasm-bindgen-macro-support"
+version = "0.2.95"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-backend",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-shared"
+version = "0.2.95"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d"
+
+[[package]]
+name = "wasm-bindgen-test"
+version = "0.3.45"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d381749acb0943d357dcbd8f0b100640679883fcdeeef04def49daf8d33a5426"
+dependencies = [
+ "console_error_panic_hook",
+ "js-sys",
+ "minicov",
+ "scoped-tls",
+ "wasm-bindgen",
+ "wasm-bindgen-futures",
+ "wasm-bindgen-test-macro",
+]
+
+[[package]]
+name = "wasm-bindgen-test-macro"
+version = "0.3.45"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c97b2ef2c8d627381e51c071c2ab328eac606d3f69dd82bcbca20a9e389d95f0"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "web-sys"
+version = "0.3.72"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f6488b90108c040df0fe62fa815cbdee25124641df01814dd7282749234c6112"
+dependencies = [
+ "js-sys",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "winapi-util"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
+dependencies = [
+ "windows-sys",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.59.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
+dependencies = [
+ "windows-targets",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
+dependencies = [
+ "windows_aarch64_gnullvm",
+ "windows_aarch64_msvc",
+ "windows_i686_gnu",
+ "windows_i686_gnullvm",
+ "windows_i686_msvc",
+ "windows_x86_64_gnu",
+ "windows_x86_64_gnullvm",
+ "windows_x86_64_msvc",
+]
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
+
+[[package]]
+name = "windows_i686_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
diff --git a/application/apps/protocol/Cargo.toml b/application/apps/protocol/Cargo.toml
new file mode 100644
index 0000000000..2f3441aa69
--- /dev/null
+++ b/application/apps/protocol/Cargo.toml
@@ -0,0 +1,17 @@
+[package]
+name = "protocol"
+version = "0.1.0"
+edition = "2021"
+
+[lib]
+crate-type = ["cdylib"]
+
+[dependencies]
+serde-wasm-bindgen = "0.6"
+wasm-bindgen = "0.2"
+serde = { version = "1.0", features = ["derive"] }
+thiserror = "2.0"
+wasm-bindgen-test = "0.3"
+stypes = { path = "../indexer/stypes"}
+paste = "1.0"
+
diff --git a/application/apps/protocol/src/err.rs b/application/apps/protocol/src/err.rs
new file mode 100644
index 0000000000..7eb8ed8aab
--- /dev/null
+++ b/application/apps/protocol/src/err.rs
@@ -0,0 +1,34 @@
+use thiserror::Error;
+use wasm_bindgen::JsValue;
+
+#[derive(Error, Debug)]
+pub enum E {
+    #[error("Missed field {0}")]
+    MissedField(String),
+    #[error("Invalid value of: {0}")]
+    InvalidValue(String),
+    #[error("Codec decode error: {0}")]
+    CodecDecodeError(String),
+    #[error("Codec encode error: {0}")]
+    CodecEncodeError(String),
+    #[error("Decode error: {0}")]
+    DecodeError(String),
+    #[error("Encode error: {0}")]
+    EncodeError(String),
+    #[error("Binding error: {0}")]
+    Binding(serde_wasm_bindgen::Error),
+    #[error("Not yet implemented feature")]
+    NotImplemented,
+}
+
+impl From<serde_wasm_bindgen::Error> for E {
+    fn from(err: serde_wasm_bindgen::Error) -> Self {
+        Self::Binding(err)
+    }
+}
+
+impl From<E> for JsValue {
+    fn from(val: E) -> Self {
+        JsValue::from_str(&val.to_string())
+    }
+}
diff --git a/application/apps/protocol/src/gen.rs b/application/apps/protocol/src/gen.rs
new file mode 100644
index 0000000000..40d8536720
--- /dev/null
+++ b/application/apps/protocol/src/gen.rs
@@ -0,0 +1,106 @@
+#[macro_export]
+macro_rules! gen_encode_decode_fns {
+    // All regular use cases: gen_encode_decode_fns!(ObserveOptions);
+    ($type:ident) => {
+        paste::item! {
+            #[wasm_bindgen]
+            #[allow(non_snake_case)]
+            pub fn [<decode $type>](buf: &[u8]) -> Result<JsValue, E> {
+                let serializer = Serializer::new()
+                    .serialize_missing_as_null(true)
+                    .serialize_maps_as_objects(false)
+                    .serialize_large_number_types_as_bigints(false);
+                $type::decode(buf)
+                    .map_err(E::CodecDecodeError)?
+                    .serialize(&serializer)
+                    .map_err(|e| E::DecodeError(e.to_string()))
+            }
+
+            #[wasm_bindgen]
+            #[allow(non_snake_case)]
+            pub fn [<encode $type>](val: JsValue) -> Result<Vec<u8>, E> {
+                from_value::<$type>(val)?
+                    .encode()
+                    .map_err(E::DecodeError)
+            }
+        }
+    };
+
+    // Subtype returns void: gen_encode_decode_fns!(CommandOutcome<()>);
+    ($type:ident<()>) => {
+        paste::item! {
+            #[wasm_bindgen]
+            #[allow(non_snake_case)]
+            pub fn [<decode $type WithVoid>](buf: &[u8]) -> Result<JsValue, E> {
+                let serializer = Serializer::new()
+                    .serialize_missing_as_null(true)
+                    .serialize_maps_as_objects(false)
+                    .serialize_large_number_types_as_bigints(false);
+                $type::<()>::decode(buf)
+                    .map_err(E::CodecDecodeError)?
+                    .serialize(&serializer)
+                    .map_err(|e| E::DecodeError(e.to_string()))
+            }
+
+            #[wasm_bindgen]
+            #[allow(non_snake_case)]
+            pub fn [<encode $type WithVoid>](val: JsValue) -> Result<Vec<u8>, E> {
+                from_value::<$type::<()>>(val)?
+                    .encode()
+                    .map_err(E::DecodeError)
+            }
+        }
+    };
+
+    // With subtypes: gen_encode_decode_fns!(CommandOutcome<String>);
+    ($type:ident<$generic:ident>) => {
+        paste::item! {
+            #[wasm_bindgen]
+            #[allow(non_snake_case)]
+            pub fn [<decode $type With $generic>](buf: &[u8]) -> Result<JsValue, E> {
+                let serializer = Serializer::new()
+                    .serialize_missing_as_null(true)
+                    .serialize_maps_as_objects(false)
+                    .serialize_large_number_types_as_bigints(false);
+                $type::<$generic>::decode(buf)
+                    .map_err(E::CodecDecodeError)?
+                    .serialize(&serializer)
+                    .map_err(|e| E::DecodeError(e.to_string()))
+            }
+
+            #[wasm_bindgen]
+            #[allow(non_snake_case)]
+            pub fn [<encode $type With $generic>](val: JsValue) -> Result<Vec<u8>, E> {
+                from_value::<$type::<$generic>>(val)?
+                    .encode()
+                    .map_err(E::DecodeError)
+            }
+        }
+    };
+
+    // With nested subtypes: gen_encode_decode_fns!(CommandOutcome<Option<String>>);
+    ($type:ident<$generic:ident<$nested:ident>>) => {
+        paste::item! {
+            #[wasm_bindgen]
+            #[allow(non_snake_case)]
+            pub fn [<decode $type With $generic $nested>](buf: &[u8]) -> Result<JsValue, E> {
+                let serializer = Serializer::new()
+                    .serialize_missing_as_null(true)
+                    .serialize_maps_as_objects(false)
+                    .serialize_large_number_types_as_bigints(false);
+                $type::<$generic<$nested>>::decode(buf)
+                    .map_err(E::CodecDecodeError)?
+                    .serialize(&serializer)
+                    .map_err(|e| E::DecodeError(e.to_string()))
+            }
+
+            #[wasm_bindgen]
+            #[allow(non_snake_case)]
+            pub fn [<encode $type With $generic $nested>](val: JsValue) -> Result<Vec<u8>, E> {
+                from_value::<$type::<$generic<$nested>>>(val)?
+                    .encode()
+                    .map_err(E::DecodeError)
+            }
+        }
+    };
+}
diff --git a/application/apps/protocol/src/lib.rs b/application/apps/protocol/src/lib.rs
new file mode 100644
index 0000000000..46831c05f6
--- /dev/null
+++ b/application/apps/protocol/src/lib.rs
@@ -0,0 +1,177 @@
+/// The `protocol` crate is a WebAssembly-wrapped version of the `stypes` crate, designed for encoding  
+/// and decoding message types used both on the Rust side and the Node.js side (including client-side code).
+///
+/// Code generation for `wasm_bindgen` is handled by the `gen_encode_decode_fns` macro, which sets up  
+/// the necessary encode/decode functions. For example:
+///
+/// ```
+///     gen_encode_decode_fns!(ObserveOptions);
+/// ```
+///
+/// This will generate:
+///
+/// ```ignore
+/// #[wasm_bindgen]
+/// #[allow(non_snake_case)]
+/// pub fn decodeObserveOptions(buf: &[u8]) -> Result<JsValue, E> {
+///     let serializer = Serializer::new()
+///         .serialize_missing_as_null(true)
+///         .serialize_maps_as_objects(false)
+///         .serialize_large_number_types_as_bigints(false);
+///
+///     ObserveOptions::decode(buf)
+///         .map_err(E::CodecDecodeError)?
+///         .serialize(&serializer)
+///         .map_err(|e| E::DecodeError(e.to_string()))
+/// }
+///
+/// #[wasm_bindgen]
+/// #[allow(non_snake_case)]
+/// pub fn encodeObserveOptions(val: JsValue) -> Result<Vec<u8>, E> {
+///     from_value::<ObserveOptions>(val)?
+///         .encode()
+///         .map_err(E::DecodeError)
+/// }
+/// ```
+///
+/// As a result, on the Node.js side you can directly decode and encode `ObserveOptions`:
+///
+/// ```ignore
+/// import * as protocol from "protocol";
+///
+/// // Decoding
+/// const bytes: Uint8Array = get_bytes();
+/// const msg = protocol.decodeObserveOptions(bytes);
+///
+/// // Encoding
+/// const obj: ObserveOptions = ...;
+/// cosnt bytes = protocol.encodeObserveOptions(obj);
+/// ```
+///
+/// It's important to note that `wasm_bindgen` does not generate type definitions (`.d.ts` files),  
+/// so the decoding function will return `any`, and the encoding function will accept `any`.  
+/// Ensuring that the correct types are passed is therefore beyond the scope of this crate.  
+/// While supplying an invalid byte sequence (one that doesn't match the expected data type)  
+/// will cause an error to be thrown, it is theoretically possible (though unlikely)  
+/// that an incorrect byte sequence could decode into a valid but unexpected type.  
+/// Therefore, when using this crate, ensure that the expected data type aligns with the chosen
+/// decode function.
+///
+/// ## Adding New Types
+/// To add new types, follow the steps below.
+///
+/// ### Updating `stypes`
+/// - Add your new type to the `stypes` crate.
+/// - **Important:** Ensure that `proptest` tests are implemented in `stypes` for the new type.
+///   This is a mandatory requirement when introducing any new type.
+/// - If the type is directly used in `rs-bindings`, add an implementation of the trait `TryIntoJs`. This can
+///   be easily done by using the macro `try_into_js`.
+/// - Add TypeScript definitions. This step can also be done mostly automatically by adding
+///   `#[cfg_attr(test, derive(TS), ts(export, export_to = "module_name.ts"))]` above the
+///   definition of your type. Make sure you are using the correct `module_name`. The TypeScript type
+///   definition will be placed into `application/apps/indexer/stypes/bindings/module_name.ts`.
+///   Writing of types happens by executing tests (`cargo test`).
+///   As soon as a new TypeScript definition has been created, you have to manually copy it into
+///   `application/platform/types/bindings`.
+///   **Important:** Do not remove `application/apps/indexer/stypes/bindings/index.ts`. This file
+///   isn't generated and is created manually. If you are introducing a new
+///   module along with your type, please add a reference to it in the `index.ts` file.
+///
+/// ### Updating `protocol`
+/// Once the type is added to `stypes`, simply reference it in `protocol`:
+/// ```ignore
+/// gen_encode_decode_fns!(MyRecentlyAddedType);
+/// ```
+///
+/// ### Updating test in `ts-bindings`
+/// As soon as type was added, you have to update map in `application/apps/rustcore/ts-bindings/spec/session.protocol.spec.ts`.
+/// Add name of your type and link it to related decode function
+///
+/// ```
+/// const MAP: { [key: string]: (buf: Uint8Array) => any } = {
+///     AroundIndexes: protocol.decodeAroundIndexes,
+///     ...
+///     AttachmentList: protocol.decodeAttachmentList,
+/// }
+/// ```
+///
+/// ### Verification
+/// To verify your changes, run the `test.sh` script. This test uses `proptest` in `stypes` to
+/// randomly generate values for all types, then serialize them as bytes to temporary files. Next,
+/// it uses `proptest` within `ts-bindings` to decode all these messages.  
+///
+/// - If the process fails, `test.sh` will report an error.  
+/// - If it succeeds, you can consider the new type successfully integrated.
+mod err;
+mod gen;
+
+pub(crate) use err::*;
+pub(crate) use serde::Serialize;
+pub(crate) use serde_wasm_bindgen::{from_value, Serializer};
+pub(crate) use stypes::*;
+pub(crate) use wasm_bindgen::prelude::*;
+
+gen_encode_decode_fns!(ObserveOptions);
+gen_encode_decode_fns!(MulticastInfo);
+gen_encode_decode_fns!(UdpConnectionInfo);
+gen_encode_decode_fns!(ParserType);
+gen_encode_decode_fns!(DltParserSettings);
+gen_encode_decode_fns!(SomeIpParserSettings);
+gen_encode_decode_fns!(Transport);
+gen_encode_decode_fns!(ProcessTransportConfig);
+gen_encode_decode_fns!(SerialTransportConfig);
+gen_encode_decode_fns!(TCPTransportConfig);
+gen_encode_decode_fns!(UDPTransportConfig);
+gen_encode_decode_fns!(FileFormat);
+gen_encode_decode_fns!(ObserveOrigin);
+gen_encode_decode_fns!(FoldersScanningResult);
+gen_encode_decode_fns!(DltStatisticInfo);
+gen_encode_decode_fns!(Profile);
+gen_encode_decode_fns!(ProfileList);
+gen_encode_decode_fns!(CommandOutcome<FoldersScanningResult>);
+gen_encode_decode_fns!(CommandOutcome<SerialPortsList>);
+gen_encode_decode_fns!(CommandOutcome<ProfileList>);
+gen_encode_decode_fns!(CommandOutcome<DltStatisticInfo>);
+gen_encode_decode_fns!(CommandOutcome<MapKeyValue>);
+gen_encode_decode_fns!(CommandOutcome<()>);
+gen_encode_decode_fns!(CommandOutcome<i64>);
+gen_encode_decode_fns!(CommandOutcome<Option<String>>);
+gen_encode_decode_fns!(CommandOutcome<String>);
+gen_encode_decode_fns!(CommandOutcome<bool>);
+gen_encode_decode_fns!(ComputationError);
+gen_encode_decode_fns!(CallbackEvent);
+gen_encode_decode_fns!(NativeError);
+gen_encode_decode_fns!(NativeErrorKind);
+gen_encode_decode_fns!(Severity);
+gen_encode_decode_fns!(OperationDone);
+gen_encode_decode_fns!(LifecycleTransition);
+gen_encode_decode_fns!(AttachmentInfo);
+gen_encode_decode_fns!(AttachmentList);
+gen_encode_decode_fns!(Notification);
+gen_encode_decode_fns!(Progress);
+gen_encode_decode_fns!(Ticks);
+gen_encode_decode_fns!(Ranges);
+gen_encode_decode_fns!(SourceDefinition);
+gen_encode_decode_fns!(Sources);
+gen_encode_decode_fns!(SdeRequest);
+gen_encode_decode_fns!(SdeResponse);
+gen_encode_decode_fns!(GrabbedElement);
+gen_encode_decode_fns!(GrabbedElementList);
+gen_encode_decode_fns!(AroundIndexes);
+gen_encode_decode_fns!(FilterMatch);
+gen_encode_decode_fns!(FilterMatchList);
+gen_encode_decode_fns!(FolderEntity);
+gen_encode_decode_fns!(FolderEntityDetails);
+gen_encode_decode_fns!(FolderEntityType);
+gen_encode_decode_fns!(SerialPortsList);
+gen_encode_decode_fns!(ExtractedMatchValue);
+gen_encode_decode_fns!(ResultExtractedMatchValues);
+gen_encode_decode_fns!(ResultU64);
+gen_encode_decode_fns!(ResultBool);
+gen_encode_decode_fns!(ResultSleep);
+gen_encode_decode_fns!(NearestPosition);
+gen_encode_decode_fns!(ResultNearestPosition);
+gen_encode_decode_fns!(Point);
+gen_encode_decode_fns!(ResultSearchValues);
+gen_encode_decode_fns!(ResultScaledDistribution);
+gen_encode_decode_fns!(DltLevelDistribution);
diff --git a/application/apps/protocol/test.sh b/application/apps/protocol/test.sh
new file mode 100644
index 0000000000..716dd42c44
--- /dev/null
+++ b/application/apps/protocol/test.sh
@@ -0,0 +1,36 @@
+#!/bin/bash
+
+reset
+echo "
+====== WARNING ===============================================================
+This script performs an operation to DELETE ALL the contents of 
+the folder specified in the environment variable CHIPMUNK_PROTOCOL_TEST_OUTPUT.
+Before proceeding, make sure that the CHIPMUNK_PROTOCOL_TEST_OUTPUT variable
+contains the correct path. If the CHIPMUNK_PROTOCOL_TEST_OUTPUT variable is
+not defined, test data will be written to /\$TMP/stypes_tests.
+====== WARNING ===============================================================
+"
+read -p "Do you want to continue? (y/N): " response
+
+response=${response,,} 
+if [[ "$response" != "y" ]]; then
+    echo "Operation aborted."
+    exit 1
+fi
+
+echo "Build wasm module"
+cargo clean
+wasm-pack build --target nodejs
+
+echo "Create test use-cases"
+cd ../indexer/stypes
+export CHIPMUNK_PROTOCOL_TEST_OUTPUT="/tmp/stypes_test/"
+cargo test --release --features "test_and_gen" -- --nocapture --ignored
+
+echo "Run tests"
+export JASMIN_TEST_BLOCKS_LOGS=on
+cd ../../rustcore/ts-bindings
+rm -rf ./node_modules
+rm -rf ./spec/build
+rake bindings:test:protocol
+ 
\ No newline at end of file
diff --git a/application/apps/rustcore/rs-bindings/Cargo.lock b/application/apps/rustcore/rs-bindings/Cargo.lock
index f82a31e83c..96297c41eb 100644
--- a/application/apps/rustcore/rs-bindings/Cargo.lock
+++ b/application/apps/rustcore/rs-bindings/Cargo.lock
@@ -646,9 +646,9 @@ dependencies = [
 
 [[package]]
 name = "dlt-core"
-version = "0.17.0"
+version = "0.18.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fa52d43b97a134644192c66296e5d3e7ed8b3d409b117c62203047bb42c6b9f1"
+checksum = "0b304e32f1164b8c2ef1dc746b32d321f25f88a32672f0f5bcba2df0f70a3b70"
 dependencies = [
  "buf_redux 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)",
  "byteorder",
@@ -759,6 +759,15 @@ dependencies = [
  "pin-project-lite",
 ]
 
+[[package]]
+name = "extend"
+version = "0.1.0"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.90",
+]
+
 [[package]]
 name = "fastrand"
 version = "2.2.0"
@@ -1511,6 +1520,7 @@ dependencies = [
  "serde_json",
  "session",
  "sources",
+ "stypes",
  "thiserror 1.0.69",
  "tikv-jemallocator",
  "tokio",
@@ -1834,6 +1844,7 @@ dependencies = [
  "regex",
  "serde",
  "serde_json",
+ "stypes",
  "thiserror 2.0.3",
  "tokio-util",
  "uuid",
@@ -2171,6 +2182,7 @@ dependencies = [
  "serde_json",
  "serialport",
  "sources",
+ "stypes",
  "thiserror 2.0.3",
  "tokio",
  "tokio-stream",
@@ -2303,6 +2315,7 @@ dependencies = [
  "regex",
  "serde",
  "shellexpand",
+ "stypes",
  "thiserror 2.0.3",
  "tokio",
  "tokio-serial",
@@ -2323,6 +2336,23 @@ version = "0.11.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
 
+[[package]]
+name = "stypes"
+version = "0.1.0"
+dependencies = [
+ "bincode",
+ "dlt-core",
+ "envvars",
+ "extend",
+ "node-bindgen",
+ "regex",
+ "serde",
+ "thiserror 2.0.3",
+ "tokio",
+ "uuid",
+ "walkdir",
+]
+
 [[package]]
 name = "syn"
 version = "1.0.109"
diff --git a/application/apps/rustcore/rs-bindings/Cargo.toml b/application/apps/rustcore/rs-bindings/Cargo.toml
index de9df7f094..c391998dba 100644
--- a/application/apps/rustcore/rs-bindings/Cargo.toml
+++ b/application/apps/rustcore/rs-bindings/Cargo.toml
@@ -22,19 +22,20 @@ dirs = "5.0"
 indexer_base = { path = "../../indexer/indexer_base" }
 log = "0.4"
 log4rs = "1.3"
-merging = { path = "../../indexer/merging" }
 # node-bindgen = {git = "https://github.com/DmitryAstafyev/node-bindgen.git", branch="master", features = ["serde-json"] }
 node-bindgen = {git = "https://github.com/infinyon/node-bindgen.git", branch="master", features = ["serde-json"] }
-# node-bindgen = { version = "5.0", features = ["serde-json"] }
-processor = { path = "../../indexer/processor" }
 serde = { version = "1.0", features = ["derive"] }
-serde_json = "1.0"
-session = { path = "../../indexer/session" }
-sources = { path = "../../indexer/sources" }
 thiserror = "1.0"
 tokio = { version = "1.24", features = ["full"] }
 tokio-util = "0.7"
 uuid = { version = "1.3", features = ["serde", "v4"] }
+serde_json = "1.0"
+
+merging = { path = "../../indexer/merging" }
+processor = { path = "../../indexer/processor" }
+session = { path = "../../indexer/session" }
+sources = { path = "../../indexer/sources" }
+stypes = { path = "../../indexer/stypes", features=["nodejs"] }
 
 [target.'cfg(unix)'.dependencies]
 # Jemalloc combined with Node.js exceeds the default TLS memory limit on Linux.
diff --git a/application/apps/rustcore/rs-bindings/src/js/converting/source.rs b/application/apps/rustcore/rs-bindings/src/js/converting/source.rs
index 1737f3bf15..a0ed3c9ca9 100644
--- a/application/apps/rustcore/rs-bindings/src/js/converting/source.rs
+++ b/application/apps/rustcore/rs-bindings/src/js/converting/source.rs
@@ -6,10 +6,9 @@ use node_bindgen::{
     sys::napi_value,
 };
 use serde::Serialize;
-use session::state::SourceDefinition;
 
 #[derive(Serialize, Debug, Clone)]
-pub struct WrappedSourceDefinition(pub SourceDefinition);
+pub struct WrappedSourceDefinition(pub stypes::SourceDefinition);
 
 impl TryIntoJs for WrappedSourceDefinition {
     fn try_to_js(self, js_env: &JsEnv) -> Result<napi_value, NjError> {
diff --git a/application/apps/rustcore/rs-bindings/src/js/jobs/mod.rs b/application/apps/rustcore/rs-bindings/src/js/jobs/mod.rs
index 852600af61..5db31b87a6 100644
--- a/application/apps/rustcore/rs-bindings/src/js/jobs/mod.rs
+++ b/application/apps/rustcore/rs-bindings/src/js/jobs/mod.rs
@@ -1,17 +1,8 @@
-use crate::js::{
-    converting::filter::WrappedSearchFilter, session::events::ComputationErrorWrapper,
-};
+use crate::js::converting::filter::WrappedSearchFilter;
 use log::{debug, error};
-use node_bindgen::{
-    core::{val::JsEnv, NjError, TryIntoJs},
-    derive::node_bindgen,
-    sys::napi_value,
-};
-use serde::Serialize;
-use session::{
-    events::ComputationError,
-    unbound::{api::UnboundSessionAPI, commands::CommandOutcome, UnboundSession},
-};
+use node_bindgen::derive::node_bindgen;
+
+use session::unbound::{api::UnboundSessionAPI, UnboundSession};
 use std::{convert::TryFrom, thread};
 use tokio::runtime::Runtime;
 use tokio_util::sync::CancellationToken;
@@ -21,33 +12,14 @@ struct UnboundJobs {
     finished: CancellationToken,
 }
 
-pub(crate) struct CommandOutcomeWrapper<T: Serialize>(pub CommandOutcome<T>);
-
-impl<T: Serialize> TryIntoJs for CommandOutcomeWrapper<T> {
-    /// serialize into json object
-    fn try_to_js(self, js_env: &JsEnv) -> Result<napi_value, NjError> {
-        match serde_json::to_string(&self.0) {
-            Ok(s) => js_env.create_string_utf8(&s),
-            Err(e) => Err(NjError::Other(format!(
-                "Could not convert Callback event to json: {e}"
-            ))),
-        }
-    }
-}
-
-fn u64_from_i64(id: i64) -> Result<u64, ComputationErrorWrapper> {
-    u64::try_from(id).map_err(|_| {
-        ComputationErrorWrapper(ComputationError::InvalidArgs(String::from(
-            "ID of job is invalid",
-        )))
-    })
+fn u64_from_i64(id: i64) -> Result<u64, stypes::ComputationError> {
+    u64::try_from(id)
+        .map_err(|_| stypes::ComputationError::InvalidArgs(String::from("ID of job is invalid")))
 }
 
-fn usize_from_i64(id: i64) -> Result<usize, ComputationErrorWrapper> {
+fn usize_from_i64(id: i64) -> Result<usize, stypes::ComputationError> {
     usize::try_from(id).map_err(|_| {
-        ComputationErrorWrapper(ComputationError::InvalidArgs(String::from(
-            "Fail to conver i64 to usize",
-        )))
+        stypes::ComputationError::InvalidArgs(String::from("Fail to conver i64 to usize"))
     })
 }
 
@@ -63,9 +35,9 @@ impl UnboundJobs {
     }
 
     #[node_bindgen(mt)]
-    async fn init(&mut self) -> Result<(), ComputationErrorWrapper> {
+    async fn init(&mut self) -> Result<(), stypes::ComputationError> {
         let rt = Runtime::new().map_err(|e| {
-            ComputationError::Process(format!("Could not start tokio runtime: {e}"))
+            stypes::ComputationError::Process(format!("Could not start tokio runtime: {e}"))
         })?;
 
         let (mut session, api) = UnboundSession::new();
@@ -87,10 +59,10 @@ impl UnboundJobs {
     }
 
     #[node_bindgen]
-    async fn destroy(&self) -> Result<(), ComputationErrorWrapper> {
+    async fn destroy(&self) -> Result<(), stypes::ComputationError> {
         self.api
             .as_ref()
-            .ok_or(ComputationError::SessionUnavailable)?
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
             .shutdown()
             .await?;
         self.finished.cancelled().await;
@@ -99,13 +71,12 @@ impl UnboundJobs {
 
     /// Cancel given operation/task
     #[node_bindgen]
-    async fn abort(&self, id: i64) -> Result<(), ComputationErrorWrapper> {
+    async fn abort(&self, id: i64) -> Result<(), stypes::ComputationError> {
         self.api
             .as_ref()
-            .ok_or(ComputationError::SessionUnavailable)?
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
             .cancel_job(&u64_from_i64(id)?)
             .await
-            .map_err(ComputationErrorWrapper)
     }
 
     // Custom methods (jobs)
@@ -118,10 +89,11 @@ impl UnboundJobs {
         paths: Vec<String>,
         include_files: bool,
         include_folders: bool,
-    ) -> Result<CommandOutcomeWrapper<String>, ComputationErrorWrapper> {
+    ) -> Result<stypes::CommandOutcome<stypes::FoldersScanningResult>, stypes::ComputationError>
+    {
         self.api
             .as_ref()
-            .ok_or(ComputationError::SessionUnavailable)?
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
             .list_folder_content(
                 u64_from_i64(id)?,
                 usize_from_i64(depth)?,
@@ -131,8 +103,6 @@ impl UnboundJobs {
                 include_folders,
             )
             .await
-            .map_err(ComputationErrorWrapper)
-            .map(CommandOutcomeWrapper)
     }
 
     #[node_bindgen]
@@ -140,14 +110,12 @@ impl UnboundJobs {
         &self,
         id: i64,
         file_path: String,
-    ) -> Result<CommandOutcomeWrapper<bool>, ComputationErrorWrapper> {
+    ) -> Result<stypes::CommandOutcome<bool>, stypes::ComputationError> {
         self.api
             .as_ref()
-            .ok_or(ComputationError::SessionUnavailable)?
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
             .is_file_binary(u64_from_i64(id)?, file_path)
             .await
-            .map_err(ComputationErrorWrapper)
-            .map(CommandOutcomeWrapper)
     }
 
     #[node_bindgen]
@@ -156,14 +124,12 @@ impl UnboundJobs {
         id: i64,
         path: String,
         args: Vec<String>,
-    ) -> Result<CommandOutcomeWrapper<()>, ComputationErrorWrapper> {
+    ) -> Result<stypes::CommandOutcome<()>, stypes::ComputationError> {
         self.api
             .as_ref()
-            .ok_or(ComputationError::SessionUnavailable)?
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
             .spawn_process(u64_from_i64(id)?, path, args)
             .await
-            .map_err(ComputationErrorWrapper)
-            .map(CommandOutcomeWrapper)
     }
 
     #[node_bindgen]
@@ -171,14 +137,12 @@ impl UnboundJobs {
         &self,
         id: i64,
         path: String,
-    ) -> Result<CommandOutcomeWrapper<String>, ComputationErrorWrapper> {
+    ) -> Result<stypes::CommandOutcome<String>, stypes::ComputationError> {
         self.api
             .as_ref()
-            .ok_or(ComputationError::SessionUnavailable)?
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
             .get_file_checksum(u64_from_i64(id)?, path)
             .await
-            .map_err(ComputationErrorWrapper)
-            .map(CommandOutcomeWrapper)
     }
 
     #[node_bindgen]
@@ -186,14 +150,12 @@ impl UnboundJobs {
         &self,
         id: i64,
         files: Vec<String>,
-    ) -> Result<CommandOutcomeWrapper<String>, ComputationErrorWrapper> {
+    ) -> Result<stypes::CommandOutcome<stypes::DltStatisticInfo>, stypes::ComputationError> {
         self.api
             .as_ref()
-            .ok_or(ComputationError::SessionUnavailable)?
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
             .get_dlt_stats(u64_from_i64(id)?, files)
             .await
-            .map_err(ComputationErrorWrapper)
-            .map(CommandOutcomeWrapper)
     }
 
     #[node_bindgen]
@@ -201,56 +163,48 @@ impl UnboundJobs {
         &self,
         id: i64,
         files: Vec<String>,
-    ) -> Result<CommandOutcomeWrapper<String>, ComputationErrorWrapper> {
+    ) -> Result<stypes::CommandOutcome<String>, stypes::ComputationError> {
         self.api
             .as_ref()
-            .ok_or(ComputationError::SessionUnavailable)?
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
             .get_someip_statistic(u64_from_i64(id)?, files)
             .await
-            .map_err(ComputationErrorWrapper)
-            .map(CommandOutcomeWrapper)
     }
 
     #[node_bindgen]
     async fn get_shell_profiles(
         &self,
         id: i64,
-    ) -> Result<CommandOutcomeWrapper<String>, ComputationErrorWrapper> {
+    ) -> Result<stypes::CommandOutcome<stypes::ProfileList>, stypes::ComputationError> {
         self.api
             .as_ref()
-            .ok_or(ComputationError::SessionUnavailable)?
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
             .get_shell_profiles(u64_from_i64(id)?)
             .await
-            .map_err(ComputationErrorWrapper)
-            .map(CommandOutcomeWrapper)
     }
 
     #[node_bindgen]
     async fn get_context_envvars(
         &self,
         id: i64,
-    ) -> Result<CommandOutcomeWrapper<String>, ComputationErrorWrapper> {
+    ) -> Result<stypes::CommandOutcome<stypes::MapKeyValue>, stypes::ComputationError> {
         self.api
             .as_ref()
-            .ok_or(ComputationError::SessionUnavailable)?
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
             .get_context_envvars(u64_from_i64(id)?)
             .await
-            .map_err(ComputationErrorWrapper)
-            .map(CommandOutcomeWrapper)
     }
 
     #[node_bindgen]
     async fn get_serial_ports_list(
         &self,
         id: i64,
-    ) -> Result<CommandOutcomeWrapper<Vec<String>>, ComputationErrorWrapper> {
+    ) -> Result<stypes::CommandOutcome<stypes::SerialPortsList>, stypes::ComputationError> {
         self.api
             .as_ref()
-            .ok_or(ComputationError::SessionUnavailable)?
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
             .get_serial_ports_list(u64_from_i64(id)?)
             .await
-            .map_err(ComputationErrorWrapper)
-            .map(CommandOutcomeWrapper)
     }
 
     #[node_bindgen]
@@ -258,14 +212,12 @@ impl UnboundJobs {
         &self,
         id: i64,
         filter: WrappedSearchFilter,
-    ) -> Result<CommandOutcomeWrapper<Option<String>>, ComputationErrorWrapper> {
+    ) -> Result<stypes::CommandOutcome<Option<String>>, stypes::ComputationError> {
         self.api
             .as_ref()
-            .ok_or(ComputationError::SessionUnavailable)?
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
             .get_regex_error(u64_from_i64(id)?, filter.as_filter())
             .await
-            .map_err(ComputationErrorWrapper)
-            .map(CommandOutcomeWrapper)
     }
 
     #[node_bindgen]
@@ -274,14 +226,12 @@ impl UnboundJobs {
         id: i64,
         custom_arg_a: i64,
         custom_arg_b: i64,
-    ) -> Result<CommandOutcomeWrapper<i64>, ComputationErrorWrapper> {
+    ) -> Result<stypes::CommandOutcome<i64>, stypes::ComputationError> {
         self.api
             .as_ref()
-            .ok_or(ComputationError::SessionUnavailable)?
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
             .cancel_test(u64_from_i64(id)?, custom_arg_a, custom_arg_b)
             .await
-            .map_err(ComputationErrorWrapper)
-            .map(CommandOutcomeWrapper)
     }
 
     #[node_bindgen]
@@ -289,13 +239,11 @@ impl UnboundJobs {
         &self,
         id: i64,
         ms: i64,
-    ) -> Result<CommandOutcomeWrapper<()>, ComputationErrorWrapper> {
+    ) -> Result<stypes::CommandOutcome<()>, stypes::ComputationError> {
         self.api
             .as_ref()
-            .ok_or(ComputationError::SessionUnavailable)?
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
             .sleep(u64_from_i64(id)?, u64_from_i64(ms)?)
             .await
-            .map_err(ComputationErrorWrapper)
-            .map(CommandOutcomeWrapper)
     }
 }
diff --git a/application/apps/rustcore/rs-bindings/src/js/session/events.rs b/application/apps/rustcore/rs-bindings/src/js/session/events.rs
deleted file mode 100644
index d05eff3338..0000000000
--- a/application/apps/rustcore/rs-bindings/src/js/session/events.rs
+++ /dev/null
@@ -1,61 +0,0 @@
-use node_bindgen::{
-    core::{val::JsEnv, NjError, TryIntoJs},
-    sys::napi_value,
-};
-use session::events::{CallbackEvent, ComputationError, LifecycleTransition};
-
-#[derive(Debug)]
-pub(crate) struct CallbackEventWrapper(pub CallbackEvent);
-
-impl TryIntoJs for CallbackEventWrapper {
-    /// serialize into json object
-    fn try_to_js(self, js_env: &JsEnv) -> Result<napi_value, NjError> {
-        match serde_json::to_string(&self.0) {
-            Ok(s) => js_env.create_string_utf8(&s),
-            Err(e) => Err(NjError::Other(format!(
-                "Could not convert Callback event to json: {e}"
-            ))),
-        }
-    }
-}
-
-impl From<CallbackEvent> for CallbackEventWrapper {
-    fn from(e: CallbackEvent) -> CallbackEventWrapper {
-        CallbackEventWrapper(e)
-    }
-}
-impl From<serde_json::Error> for ComputationErrorWrapper {
-    fn from(_: serde_json::Error) -> ComputationErrorWrapper {
-        ComputationErrorWrapper(ComputationError::InvalidData)
-    }
-}
-
-pub(crate) struct ComputationErrorWrapper(pub ComputationError);
-
-impl TryIntoJs for ComputationErrorWrapper {
-    fn try_to_js(self, js_env: &JsEnv) -> Result<napi_value, NjError> {
-        let value = serde_json::to_value(self.0).map_err(|e| NjError::Other(format!("{e}")))?;
-        value.try_to_js(js_env)
-    }
-}
-
-impl From<ComputationError> for ComputationErrorWrapper {
-    fn from(err: ComputationError) -> ComputationErrorWrapper {
-        ComputationErrorWrapper(err)
-    }
-}
-
-#[derive(Debug)]
-pub(crate) struct LifecycleTransitionWrapper(pub LifecycleTransition);
-
-impl TryIntoJs for LifecycleTransitionWrapper {
-    /// serialize into json object
-    fn try_to_js(self, js_env: &JsEnv) -> Result<napi_value, NjError> {
-        match serde_json::to_string(&self.0) {
-            Ok(s) => js_env.create_string_utf8(&s),
-            Err(e) => Err(NjError::Other(format!(
-                "Could not convert Callback event to json: {e}"
-            ))),
-        }
-    }
-}
diff --git a/application/apps/rustcore/rs-bindings/src/js/session/mod.rs b/application/apps/rustcore/rs-bindings/src/js/session/mod.rs
index 1d654d35a3..368763efb3 100644
--- a/application/apps/rustcore/rs-bindings/src/js/session/mod.rs
+++ b/application/apps/rustcore/rs-bindings/src/js/session/mod.rs
@@ -1,26 +1,12 @@
-pub mod events;
 pub mod progress_tracker;
 
-use crate::{
-    js::{
-        converting::{filter::WrappedSearchFilter, source::WrappedSourceDefinition},
-        session::events::ComputationErrorWrapper,
-    },
-    logging::targets,
-};
-use events::CallbackEventWrapper;
+use crate::{js::converting::filter::WrappedSearchFilter, logging::targets};
 use log::{debug, error, info, warn};
-use node_bindgen::derive::node_bindgen;
+use node_bindgen::{core::buffer::JSArrayBuffer, derive::node_bindgen};
 use processor::grabber::LineRange;
-use session::{
-    events::{CallbackEvent, ComputationError, NativeError, NativeErrorKind},
-    factory::ObserveOptions,
-    operations,
-    progress::Severity,
-    session::Session,
-};
-use sources::sde;
+use session::{operations, session::Session};
 use std::{convert::TryFrom, ops::RangeInclusive, path::PathBuf, thread};
+use stypes::GrabbedElementList;
 use tokio::{runtime::Runtime, sync::oneshot};
 use uuid::Uuid;
 
@@ -47,12 +33,12 @@ impl RustSession {
     }
 
     #[node_bindgen(mt)]
-    async fn init<F: Fn(CallbackEventWrapper) + Send + 'static>(
+    async fn init<F: Fn(stypes::CallbackEvent) + Send + 'static>(
         &mut self,
         callback: F,
-    ) -> Result<(), ComputationErrorWrapper> {
+    ) -> Result<(), stypes::ComputationError> {
         let rt = Runtime::new().map_err(|e| {
-            ComputationError::Process(format!("Could not start tokio runtime: {e}"))
+            stypes::ComputationError::Process(format!("Could not start tokio runtime: {e}"))
         })?;
         let (tx_session, rx_session) = oneshot::channel();
         let uuid = self.uuid;
@@ -66,10 +52,10 @@ impl RustSession {
                         }
                         debug!("task is started");
                         while let Some(event) = rx_callback_events.recv().await {
-                            callback(event.into())
+                            callback(event)
                         }
                         debug!("sending SessionDestroyed event");
-                        callback(CallbackEvent::SessionDestroyed.into());
+                        callback(stypes::CallbackEvent::SessionDestroyed);
                         debug!("task is finished");
                     }
                     Err(e) => {
@@ -82,22 +68,21 @@ impl RustSession {
             })
         });
         self.session = rx_session.await.map_err(|_| {
-            ComputationErrorWrapper(ComputationError::Communication(String::from(
+            stypes::ComputationError::Communication(String::from(
                 "Fail to get session instance to setup",
-            )))
+            ))
         })?;
         Ok(())
     }
 
     #[node_bindgen]
-    fn get_uuid(&self) -> Result<String, ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            Ok(session.get_uuid().to_string())
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+    fn get_uuid(&self) -> Result<String, stypes::ComputationError> {
+        Ok(self
+            .session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .get_uuid()
+            .to_string())
     }
 
     #[node_bindgen]
@@ -105,88 +90,63 @@ impl RustSession {
         &self,
         operation_id: String,
         target_id: String,
-    ) -> Result<(), ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            session
-                .abort(
-                    operations::uuid_from_str(&operation_id)?,
-                    operations::uuid_from_str(&target_id)?,
+    ) -> Result<(), stypes::ComputationError> {
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .abort(
+                operations::uuid_from_str(&operation_id)?,
+                operations::uuid_from_str(&target_id)?,
+            )
+    }
+
+    #[node_bindgen]
+    async fn stop(&self, operation_id: String) -> Result<(), stypes::ComputationError> {
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .stop(operations::uuid_from_str(&operation_id)?)
+            .await
+    }
+
+    #[node_bindgen]
+    async fn get_session_file(&self) -> Result<String, stypes::ComputationError> {
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .get_state()
+            .get_session_file()
+            .await
+            .map(|p| p.to_string_lossy().to_string())
+            .map_err(|e: stypes::NativeError| {
+                <stypes::ComputationError as Into<stypes::ComputationError>>::into(
+                    stypes::ComputationError::NativeError(e),
                 )
-                .map_err(ComputationErrorWrapper)
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
-    }
-
-    #[node_bindgen]
-    async fn stop(&self, operation_id: String) -> Result<(), ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            session
-                .stop(operations::uuid_from_str(&operation_id)?)
-                .await
-                .map_err(ComputationErrorWrapper)?;
-            Ok(())
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
-    }
-
-    #[node_bindgen]
-    async fn get_session_file(&self) -> Result<String, ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            session
-                .get_state()
-                .get_session_file()
-                .await
-                .map(|p| p.to_string_lossy().to_string())
-                .map_err(|e: NativeError| {
-                    <ComputationError as Into<ComputationErrorWrapper>>::into(
-                        ComputationError::NativeError(e),
-                    )
-                })
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+            })
     }
 
     #[node_bindgen]
-    async fn get_stream_len(&self) -> Result<i64, ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            session
-                .get_stream_len()
-                .await
-                .map(|r| r as i64)
-                .map_err(ComputationErrorWrapper)
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+    async fn get_stream_len(&self) -> Result<i64, stypes::ComputationError> {
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .get_stream_len()
+            .await
+            .map(|r| r as i64)
     }
 
     #[node_bindgen]
-    async fn get_search_len(&self) -> Result<i64, ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            session
-                .get_search_result_len()
-                .await
-                .map(|r| r as i64)
-                .map_err(ComputationErrorWrapper)
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+    async fn get_search_len(&self) -> Result<i64, stypes::ComputationError> {
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .get_search_result_len()
+            .await
+            .map(|r| r as i64)
     }
 
     #[node_bindgen]
-    async fn details(&self, _index: i64) -> Result<String, ComputationErrorWrapper> {
+    async fn details(&self, _index: i64) -> Result<String, stypes::ComputationError> {
         todo!("nyi");
         // Log
     }
@@ -205,7 +165,7 @@ impl RustSession {
     ///
     /// # Returns
     ///
-    /// * `Result<(), ComputationErrorWrapper>`:
+    /// * `Result<(), stypes::ComputationError>`:
     ///     - `Ok(())` if the export is successful.
     ///     - `Err(ComputationErrorWrapper)` if an error occurs during the export process.
     ///
@@ -218,39 +178,33 @@ impl RustSession {
         spliter: String,
         delimiter: String,
         operation_id: String,
-    ) -> Result<(), ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            session
-                .export(
-                    operations::uuid_from_str(&operation_id)?,
-                    PathBuf::from(out_path),
-                    ranges
-                        .iter()
-                        .map(|(s, e)| RangeInclusive::<u64>::new(*s as u64, *e as u64))
-                        .collect::<Vec<RangeInclusive<u64>>>(),
-                    columns
-                        .into_iter()
-                        .map(usize::try_from)
-                        .collect::<Result<Vec<usize>, _>>()
-                        .map_err(|_| {
-                            ComputationErrorWrapper(ComputationError::NativeError(NativeError {
-                                severity: Severity::ERROR,
-                                kind: NativeErrorKind::Io,
-                                message: Some(String::from(
-                                    "Fail to get valid columns list. Supported type: [u8]",
-                                )),
-                            }))
-                        })?,
-                    (!spliter.is_empty()).then_some(spliter),
-                    (!delimiter.is_empty()).then_some(delimiter),
-                )
-                .map_err(ComputationErrorWrapper)?;
-            Ok(())
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+    ) -> Result<(), stypes::ComputationError> {
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .export(
+                operations::uuid_from_str(&operation_id)?,
+                PathBuf::from(out_path),
+                ranges
+                    .iter()
+                    .map(|(s, e)| RangeInclusive::<u64>::new(*s as u64, *e as u64))
+                    .collect::<Vec<RangeInclusive<u64>>>(),
+                columns
+                    .into_iter()
+                    .map(usize::try_from)
+                    .collect::<Result<Vec<usize>, _>>()
+                    .map_err(|_| {
+                        stypes::ComputationError::NativeError(stypes::NativeError {
+                            severity: stypes::Severity::ERROR,
+                            kind: stypes::NativeErrorKind::Io,
+                            message: Some(String::from(
+                                "Fail to get valid columns list. Supported type: [u8]",
+                            )),
+                        })
+                    })?,
+                (!spliter.is_empty()).then_some(spliter),
+                (!delimiter.is_empty()).then_some(delimiter),
+            )
     }
 
     #[node_bindgen]
@@ -259,38 +213,27 @@ impl RustSession {
         out_path: String,
         ranges: Vec<(i64, i64)>,
         operation_id: String,
-    ) -> Result<(), ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            session
-                .export_raw(
-                    operations::uuid_from_str(&operation_id)?,
-                    PathBuf::from(out_path),
-                    ranges
-                        .iter()
-                        .map(|(s, e)| RangeInclusive::<u64>::new(*s as u64, *e as u64))
-                        .collect::<Vec<RangeInclusive<u64>>>(),
-                )
-                .map_err(ComputationErrorWrapper)?;
-            Ok(())
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+    ) -> Result<(), stypes::ComputationError> {
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .export_raw(
+                operations::uuid_from_str(&operation_id)?,
+                PathBuf::from(out_path),
+                ranges
+                    .iter()
+                    .map(|(s, e)| RangeInclusive::<u64>::new(*s as u64, *e as u64))
+                    .collect::<Vec<RangeInclusive<u64>>>(),
+            )
     }
 
     #[node_bindgen]
-    async fn is_raw_export_available(&self) -> Result<bool, ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            session
-                .is_raw_export_available()
-                .await
-                .map_err(ComputationErrorWrapper)
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+    async fn is_raw_export_available(&self) -> Result<bool, stypes::ComputationError> {
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .is_raw_export_available()
+            .await
     }
 
     #[node_bindgen]
@@ -298,22 +241,16 @@ impl RustSession {
         &self,
         start_line_index: i64,
         number_of_lines: i64,
-    ) -> Result<String, ComputationErrorWrapper> {
-        let start = u64::try_from(start_line_index)
-            .map_err(|_| ComputationErrorWrapper(ComputationError::InvalidData))?;
+    ) -> Result<GrabbedElementList, stypes::ComputationError> {
+        let start =
+            u64::try_from(start_line_index).map_err(|_| stypes::ComputationError::InvalidData)?;
         let end = u64::try_from(start_line_index + number_of_lines - 1)
-            .map_err(|_| ComputationErrorWrapper(ComputationError::InvalidData))?;
-        if let Some(ref session) = self.session {
-            let grabbed = session
-                .grab(LineRange::from(start..=end))
-                .await
-                .map_err(ComputationErrorWrapper)?;
-            Ok(serde_json::to_string(&grabbed)?)
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+            .map_err(|_| stypes::ComputationError::InvalidData)?;
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .grab(LineRange::from(start..=end))
+            .await
     }
 
     #[node_bindgen]
@@ -321,128 +258,81 @@ impl RustSession {
         &self,
         start_line_index: i64,
         number_of_lines: i64,
-    ) -> Result<String, ComputationErrorWrapper> {
-        let start = u64::try_from(start_line_index)
-            .map_err(|_| ComputationErrorWrapper(ComputationError::InvalidData))?;
+    ) -> Result<GrabbedElementList, stypes::ComputationError> {
+        let start =
+            u64::try_from(start_line_index).map_err(|_| stypes::ComputationError::InvalidData)?;
         let end = u64::try_from(start_line_index + number_of_lines - 1)
-            .map_err(|_| ComputationErrorWrapper(ComputationError::InvalidData))?;
-        if let Some(ref session) = self.session {
-            let grabbed = session
-                .grab_indexed(RangeInclusive::<u64>::new(start, end))
-                .await
-                .map_err(ComputationErrorWrapper)?;
-            Ok(serde_json::to_string(&grabbed)?)
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+            .map_err(|_| stypes::ComputationError::InvalidData)?;
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .grab_indexed(RangeInclusive::<u64>::new(start, end))
+            .await
     }
 
     #[node_bindgen]
-    async fn set_indexing_mode(&self, mode: i32) -> Result<(), ComputationErrorWrapper> {
-        let mode = u8::try_from(mode)
-            .map_err(|_| ComputationErrorWrapper(ComputationError::InvalidData))?;
-        if let Some(ref session) = self.session {
-            session
-                .set_indexing_mode(mode)
-                .await
-                .map_err(ComputationErrorWrapper)?;
-            Ok(())
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+    async fn set_indexing_mode(&self, mode: i32) -> Result<(), stypes::ComputationError> {
+        let mode = u8::try_from(mode).map_err(|_| stypes::ComputationError::InvalidData)?;
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .set_indexing_mode(mode)
+            .await
     }
 
     #[node_bindgen]
-    async fn get_indexed_len(&self) -> Result<i64, ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            session
-                .get_indexed_len()
-                .await
-                .map(|r| r as i64)
-                .map_err(ComputationErrorWrapper)
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+    async fn get_indexed_len(&self) -> Result<i64, stypes::ComputationError> {
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .get_indexed_len()
+            .await
+            .map(|r| r as i64)
     }
 
     #[node_bindgen]
     async fn get_around_indexes(
         &self,
         position: i64,
-    ) -> Result<(Option<i64>, Option<i64>), ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            session
-                .get_around_indexes(position as u64)
-                .await
-                .map(|(b, a)| (b.map(|p| p as i64), a.map(|p| p as i64)))
-                .map_err(ComputationErrorWrapper)
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+    ) -> Result<stypes::AroundIndexes, stypes::ComputationError> {
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .get_around_indexes(position as u64)
+            .await
     }
 
     #[node_bindgen]
-    async fn add_bookmark(&self, row: i64) -> Result<(), ComputationErrorWrapper> {
-        let row = u64::try_from(row)
-            .map_err(|_| ComputationErrorWrapper(ComputationError::InvalidData))?;
-        if let Some(ref session) = self.session {
-            session
-                .add_bookmark(row)
-                .await
-                .map_err(ComputationErrorWrapper)?;
-            Ok(())
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+    async fn add_bookmark(&self, row: i64) -> Result<(), stypes::ComputationError> {
+        let row = u64::try_from(row).map_err(|_| stypes::ComputationError::InvalidData)?;
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .add_bookmark(row)
+            .await
     }
 
     #[node_bindgen]
-    async fn set_bookmarks(&self, rows: Vec<i64>) -> Result<(), ComputationErrorWrapper> {
+    async fn set_bookmarks(&self, rows: Vec<i64>) -> Result<(), stypes::ComputationError> {
         let mut converted: Vec<u64> = vec![];
         for row in rows.iter() {
-            converted.push(
-                u64::try_from(*row)
-                    .map_err(|_| ComputationErrorWrapper(ComputationError::InvalidData))?,
-            );
-        }
-        if let Some(ref session) = self.session {
-            session
-                .set_bookmarks(converted)
-                .await
-                .map_err(ComputationErrorWrapper)?;
-            Ok(())
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
+            converted.push(u64::try_from(*row).map_err(|_| stypes::ComputationError::InvalidData)?);
         }
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .set_bookmarks(converted)
+            .await
     }
 
     #[node_bindgen]
-    async fn remove_bookmark(&self, row: i64) -> Result<(), ComputationErrorWrapper> {
-        let row = u64::try_from(row)
-            .map_err(|_| ComputationErrorWrapper(ComputationError::InvalidData))?;
-        if let Some(ref session) = self.session {
-            session
-                .remove_bookmark(row)
-                .await
-                .map_err(ComputationErrorWrapper)?;
-            Ok(())
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+    async fn remove_bookmark(&self, row: i64) -> Result<(), stypes::ComputationError> {
+        let row = u64::try_from(row).map_err(|_| stypes::ComputationError::InvalidData)?;
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .remove_bookmark(row)
+            .await
     }
 
     #[node_bindgen]
@@ -451,22 +341,15 @@ impl RustSession {
         seporator: i64,
         offset: i64,
         above: bool,
-    ) -> Result<(), ComputationErrorWrapper> {
-        let seporator = u64::try_from(seporator)
-            .map_err(|_| ComputationErrorWrapper(ComputationError::InvalidData))?;
-        let offset = u64::try_from(offset)
-            .map_err(|_| ComputationErrorWrapper(ComputationError::InvalidData))?;
-        if let Some(ref session) = self.session {
-            session
-                .expand_breadcrumbs(seporator, offset, above)
-                .await
-                .map_err(ComputationErrorWrapper)?;
-            Ok(())
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+    ) -> Result<(), stypes::ComputationError> {
+        let seporator =
+            u64::try_from(seporator).map_err(|_| stypes::ComputationError::InvalidData)?;
+        let offset = u64::try_from(offset).map_err(|_| stypes::ComputationError::InvalidData)?;
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .expand_breadcrumbs(seporator, offset, above)
+            .await
     }
 
     #[node_bindgen]
@@ -474,64 +357,47 @@ impl RustSession {
         &self,
         start_line_index: i64,
         number_of_lines: i64,
-    ) -> Result<String, ComputationErrorWrapper> {
-        let start = u64::try_from(start_line_index)
-            .map_err(|_| ComputationErrorWrapper(ComputationError::InvalidData))?;
+    ) -> Result<GrabbedElementList, stypes::ComputationError> {
+        let start =
+            u64::try_from(start_line_index).map_err(|_| stypes::ComputationError::InvalidData)?;
         let end = u64::try_from(start_line_index + number_of_lines - 1)
-            .map_err(|_| ComputationErrorWrapper(ComputationError::InvalidData))?;
-        if let Some(ref session) = self.session {
-            let grabbed = session
-                .grab_search(LineRange::from(start..=end))
-                .await
-                .map_err(ComputationErrorWrapper)?;
-            Ok(serde_json::to_string(&grabbed)?)
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+            .map_err(|_| stypes::ComputationError::InvalidData)?;
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .grab_search(LineRange::from(start..=end))
+            .await
     }
 
     #[node_bindgen]
     async fn grab_ranges(
         &self,
         ranges: Vec<(i64, i64)>,
-    ) -> Result<String, ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            let grabbed = session
-                .grab_ranges(
-                    ranges
-                        .iter()
-                        .map(|(s, e)| RangeInclusive::<u64>::new(*s as u64, *e as u64))
-                        .collect::<Vec<RangeInclusive<u64>>>(),
-                )
-                .await
-                .map_err(ComputationErrorWrapper)?;
-            Ok(serde_json::to_string(&grabbed)?)
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+    ) -> Result<GrabbedElementList, stypes::ComputationError> {
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .grab_ranges(
+                ranges
+                    .iter()
+                    .map(|(s, e)| RangeInclusive::<u64>::new(*s as u64, *e as u64))
+                    .collect::<Vec<RangeInclusive<u64>>>(),
+            )
+            .await
     }
 
     #[node_bindgen]
     async fn observe(
         &self,
-        options: String,
+        options: JSArrayBuffer,
         operation_id: String,
-    ) -> Result<(), ComputationErrorWrapper> {
-        let options: ObserveOptions = serde_json::from_str(&options)
-            .map_err(|e| ComputationError::Process(format!("Cannot parse source settings: {e}")))?;
-        if let Some(ref session) = self.session {
-            session
-                .observe(operations::uuid_from_str(&operation_id)?, options)
-                .map_err(ComputationErrorWrapper)
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+    ) -> Result<(), stypes::ComputationError> {
+        let options =
+            stypes::ObserveOptions::decode(&options).map_err(stypes::ComputationError::Decoding)?;
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .observe(operations::uuid_from_str(&operation_id)?, options)
     }
 
     #[node_bindgen]
@@ -539,25 +405,21 @@ impl RustSession {
         &self,
         filters: Vec<WrappedSearchFilter>,
         operation_id: String,
-    ) -> Result<(), ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            info!(
-                target: targets::SESSION,
-                "Search (operation: {}) will be done withing next filters: {:?}",
-                operation_id,
-                filters
-            );
-            session
-                .apply_search_filters(
-                    operations::uuid_from_str(&operation_id)?,
-                    filters.iter().map(|f| f.as_filter()).collect(),
-                )
-                .map_err(ComputationErrorWrapper)
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+    ) -> Result<(), stypes::ComputationError> {
+        let session = self
+            .session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?;
+        info!(
+            target: targets::SESSION,
+            "Search (operation: {}) will be done withing next filters: {:?}",
+            operation_id,
+            filters
+        );
+        session.apply_search_filters(
+            operations::uuid_from_str(&operation_id)?,
+            filters.iter().map(|f| f.as_filter()).collect(),
+        )
     }
 
     #[node_bindgen]
@@ -565,52 +427,36 @@ impl RustSession {
         &self,
         filters: Vec<String>,
         operation_id: String,
-    ) -> Result<(), ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            info!(
-                target: targets::SESSION,
-                "Search values (operation: {}) will be done withing next filters: {:?}",
-                operation_id,
-                filters
-            );
-            session
-                .apply_search_values_filters(operations::uuid_from_str(&operation_id)?, filters)
-                .map_err(ComputationErrorWrapper)
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+    ) -> Result<(), stypes::ComputationError> {
+        let session = self
+            .session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?;
+        info!(
+            target: targets::SESSION,
+            "Search values (operation: {}) will be done withing next filters: {:?}",
+            operation_id,
+            filters
+        );
+        session.apply_search_values_filters(operations::uuid_from_str(&operation_id)?, filters)
     }
 
     #[node_bindgen]
-    async fn drop_search(&self) -> Result<bool, ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            session.drop_search().await.map_err(ComputationErrorWrapper)
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+    async fn drop_search(&self) -> Result<bool, stypes::ComputationError> {
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .drop_search()
+            .await
     }
 
     #[node_bindgen]
-    async fn get_sources_definitions(
-        &self,
-    ) -> Result<Vec<WrappedSourceDefinition>, ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            Ok(session
-                .get_sources()
-                .await
-                .map_err(ComputationErrorWrapper)?
-                .iter()
-                .map(|s| WrappedSourceDefinition(s.clone()))
-                .collect::<Vec<WrappedSourceDefinition>>())
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+    async fn get_sources_definitions(&self) -> Result<stypes::Sources, stypes::ComputationError> {
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .get_sources()
+            .await
     }
 
     #[node_bindgen]
@@ -618,25 +464,21 @@ impl RustSession {
         &self,
         filters: Vec<WrappedSearchFilter>,
         operation_id: String,
-    ) -> Result<(), ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            info!(
-                target: targets::SESSION,
-                "Extract (operation: {}) will be done withing next filters: {:?}",
-                operation_id,
-                filters
-            );
-            session
-                .extract_matches(
-                    operations::uuid_from_str(&operation_id)?,
-                    filters.iter().map(|f| f.as_filter()).collect(),
-                )
-                .map_err(ComputationErrorWrapper)
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+    ) -> Result<(), stypes::ComputationError> {
+        let session = self
+            .session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?;
+        info!(
+            target: targets::SESSION,
+            "Extract (operation: {}) will be done withing next filters: {:?}",
+            operation_id,
+            filters
+        );
+        session.extract_matches(
+            operations::uuid_from_str(&operation_id)?,
+            filters.iter().map(|f| f.as_filter()).collect(),
+        )
     }
 
     #[node_bindgen]
@@ -646,42 +488,38 @@ impl RustSession {
         dataset_len: i32,
         from: Option<i64>,
         to: Option<i64>,
-    ) -> Result<(), ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            let mut range: Option<(u64, u64)> = None;
-            if let Some(from) = from {
-                if let Some(to) = to {
-                    if from >= 0 && to >= 0 {
-                        if from <= to {
-                            range = Some((from as u64, to as u64));
-                        } else {
-                            warn!(
-                                target: targets::SESSION,
-                                "Invalid range (operation: {}): from = {}; to = {}",
-                                operation_id,
-                                from,
-                                to
-                            );
-                        }
+    ) -> Result<(), stypes::ComputationError> {
+        let session = self
+            .session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?;
+        let mut range: Option<(u64, u64)> = None;
+        if let Some(from) = from {
+            if let Some(to) = to {
+                if from >= 0 && to >= 0 {
+                    if from <= to {
+                        range = Some((from as u64, to as u64));
+                    } else {
+                        warn!(
+                            target: targets::SESSION,
+                            "Invalid range (operation: {}): from = {}; to = {}",
+                            operation_id,
+                            from,
+                            to
+                        );
                     }
                 }
             }
-            info!(
-                target: targets::SESSION,
-                "Map requested (operation: {}). Range: {:?}", operation_id, range
-            );
-            session
-                .get_map(
-                    operations::uuid_from_str(&operation_id)?,
-                    dataset_len as u16,
-                    range,
-                )
-                .map_err(ComputationErrorWrapper)
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
         }
+        info!(
+            target: targets::SESSION,
+            "Map requested (operation: {}). Range: {:?}", operation_id, range
+        );
+        session.get_map(
+            operations::uuid_from_str(&operation_id)?,
+            dataset_len as u16,
+            range,
+        )
     }
 
     #[node_bindgen]
@@ -691,34 +529,30 @@ impl RustSession {
         dataset_len: i32,
         from: Option<i64>,
         to: Option<i64>,
-    ) -> Result<(), ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            let range: Option<RangeInclusive<u64>> = if let (Some(from), Some(to)) = (from, to) {
-                if from < 0 || to < 0 || from > to {
-                    return Err(ComputationErrorWrapper(ComputationError::InvalidArgs(
-                        format!("Invalid range:from = {from}; to = {to}"),
-                    )));
-                }
-                Some(RangeInclusive::new(from as u64, to as u64))
-            } else {
-                None
-            };
-            info!(
-                target: targets::SESSION,
-                "Values requested (operation: {}). Range: {:?}", operation_id, range
-            );
-            session
-                .get_values(
-                    operations::uuid_from_str(&operation_id)?,
-                    dataset_len as u16,
-                    range,
-                )
-                .map_err(ComputationErrorWrapper)
+    ) -> Result<(), stypes::ComputationError> {
+        let session = self
+            .session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?;
+        let range: Option<RangeInclusive<u64>> = if let (Some(from), Some(to)) = (from, to) {
+            if from < 0 || to < 0 || from > to {
+                return Err(stypes::ComputationError::InvalidArgs(format!(
+                    "Invalid range:from = {from}; to = {to}"
+                )));
+            }
+            Some(RangeInclusive::new(from as u64, to as u64))
         } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+            None
+        };
+        info!(
+            target: targets::SESSION,
+            "Values requested (operation: {}). Range: {:?}", operation_id, range
+        );
+        session.get_values(
+            operations::uuid_from_str(&operation_id)?,
+            dataset_len as u16,
+            range,
+        )
     }
 
     #[node_bindgen]
@@ -726,116 +560,91 @@ impl RustSession {
         &self,
         operation_id: String,
         position_in_stream: i64,
-    ) -> Result<(), ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            session
-                .get_nearest_to(
-                    operations::uuid_from_str(&operation_id)?,
-                    position_in_stream as u64,
-                )
-                .map_err(ComputationErrorWrapper)
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+    ) -> Result<(), stypes::ComputationError> {
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .get_nearest_to(
+                operations::uuid_from_str(&operation_id)?,
+                position_in_stream as u64,
+            )
     }
 
     #[node_bindgen]
     async fn send_into_sde(
         &self,
         target: String,
-        msg: String,
-    ) -> Result<String, ComputationErrorWrapper> {
-        let request = serde_json::from_str::<sde::SdeRequest>(&msg)
-            .map_err(|e| ComputationErrorWrapper(ComputationError::IoOperation(e.to_string())))?;
-        if let Some(ref session) = self.session {
-            let response = session
-                .send_into_sde(operations::uuid_from_str(&target)?, request)
-                .await
-                .map_err(ComputationErrorWrapper)?;
-            Ok(serde_json::to_string(&response).map_err(|e| {
-                ComputationErrorWrapper(ComputationError::IoOperation(e.to_string()))
-            })?)
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
-    }
-
-    #[node_bindgen]
-    async fn get_attachments(&self) -> Result<String, ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            let attachments = session
+        request: JSArrayBuffer,
+    ) -> Result<stypes::SdeResponse, stypes::ComputationError> {
+        let request =
+            stypes::SdeRequest::decode(&request).map_err(stypes::ComputationError::Decoding)?;
+        let session = self
+            .session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?;
+        session
+            .send_into_sde(operations::uuid_from_str(&target)?, request)
+            .await
+    }
+
+    #[node_bindgen]
+    async fn get_attachments(&self) -> Result<stypes::AttachmentList, stypes::ComputationError> {
+        let session = self
+            .session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?;
+        let attachments =
+            session
                 .state
                 .get_attachments()
                 .await
-                .map_err(|e: NativeError| {
-                    <ComputationError as Into<ComputationErrorWrapper>>::into(
-                        ComputationError::NativeError(e),
+                .map_err(|e: stypes::NativeError| {
+                    <stypes::ComputationError as Into<stypes::ComputationError>>::into(
+                        stypes::ComputationError::NativeError(e),
                     )
                 })?;
-            Ok(serde_json::to_string(&attachments).map_err(|e| {
-                ComputationErrorWrapper(ComputationError::IoOperation(e.to_string()))
-            })?)
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+        Ok(stypes::AttachmentList(attachments))
     }
 
     #[node_bindgen]
-    async fn get_indexed_ranges(&self) -> Result<String, ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            let ranges = session
+    async fn get_indexed_ranges(&self) -> Result<stypes::Ranges, stypes::ComputationError> {
+        let session = self
+            .session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?;
+        let ranges =
+            session
                 .state
                 .get_indexed_ranges()
                 .await
-                .map_err(|e: NativeError| {
-                    <ComputationError as Into<ComputationErrorWrapper>>::into(
-                        ComputationError::NativeError(e),
+                .map_err(|e: stypes::NativeError| {
+                    <stypes::ComputationError as Into<stypes::ComputationError>>::into(
+                        stypes::ComputationError::NativeError(e),
                     )
                 })?;
-            Ok(serde_json::to_string(&ranges).map_err(|e| {
-                ComputationErrorWrapper(ComputationError::IoOperation(e.to_string()))
-            })?)
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+        Ok(ranges.into())
     }
 
     #[node_bindgen]
-    async fn set_debug(&self, debug: bool) -> Result<(), ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            session
-                .state
-                .set_debug(debug)
-                .await
-                .map_err(|e: NativeError| ComputationError::NativeError(e).into())
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+    async fn set_debug(&self, debug: bool) -> Result<(), stypes::ComputationError> {
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .state
+            .set_debug(debug)
+            .await
+            .map_err(|e: stypes::NativeError| stypes::ComputationError::NativeError(e))
     }
 
     #[node_bindgen]
-    async fn get_operations_stat(&self) -> Result<String, ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            session
-                .tracker
-                .get_operations_stat()
-                .await
-                .map_err(|e: NativeError| ComputationError::NativeError(e).into())
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+    async fn get_operations_stat(&self) -> Result<String, stypes::ComputationError> {
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .tracker
+            .get_operations_stat()
+            .await
+            .map_err(|e: stypes::NativeError| stypes::ComputationError::NativeError(e))
     }
 
     #[node_bindgen]
@@ -844,47 +653,60 @@ impl RustSession {
         operation_id: String,
         ms: i64,
         ignore_cancellation: bool,
-    ) -> Result<(), ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            session
-                .sleep(
-                    operations::uuid_from_str(&operation_id)?,
-                    ms as u64,
-                    ignore_cancellation,
-                )
-                .map_err(ComputationErrorWrapper)
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
-        }
+    ) -> Result<(), stypes::ComputationError> {
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .sleep(
+                operations::uuid_from_str(&operation_id)?,
+                ms as u64,
+                ignore_cancellation,
+            )
     }
 
     #[node_bindgen]
-    async fn trigger_state_error(&self) -> Result<(), ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            session
-                .trigger_state_error()
-                .await
-                .map_err(ComputationErrorWrapper)
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
+    async fn trigger_state_error(&self) -> Result<(), stypes::ComputationError> {
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .trigger_state_error()
+            .await
+    }
+
+    #[node_bindgen]
+    async fn trigger_tracker_error(&self) -> Result<(), stypes::ComputationError> {
+        self.session
+            .as_ref()
+            .ok_or(stypes::ComputationError::SessionUnavailable)?
+            .trigger_tracker_error()
+            .await
+    }
+
+    #[node_bindgen]
+    fn test_grab_els_as_json(&self) -> Result<String, stypes::ComputationError> {
+        let mut els = Vec::new();
+        for pos in 0..50 {
+            els.push(stypes::GrabbedElement {
+                source_id: 0,
+                nature: 0,
+                content: format!("{pos}Test line content:{}", " test ".repeat(pos + 1)),
+                pos,
+            })
         }
+        serde_json::to_string(&els).map_err(|_| stypes::ComputationError::InvalidData)
     }
 
     #[node_bindgen]
-    async fn trigger_tracker_error(&self) -> Result<(), ComputationErrorWrapper> {
-        if let Some(ref session) = self.session {
-            session
-                .trigger_tracker_error()
-                .await
-                .map_err(ComputationErrorWrapper)
-        } else {
-            Err(ComputationErrorWrapper(
-                ComputationError::SessionUnavailable,
-            ))
+    fn test_grab_els_as_bin(&self) -> Result<stypes::GrabbedElementList, stypes::ComputationError> {
+        let mut els = Vec::new();
+        for pos in 0..50 {
+            els.push(stypes::GrabbedElement {
+                source_id: 0,
+                nature: 0,
+                content: format!("{pos}Test line content:{}", " test ".repeat(pos + 1)),
+                pos,
+            })
         }
+        Ok(stypes::GrabbedElementList(els))
     }
 }
diff --git a/application/apps/rustcore/rs-bindings/src/js/session/progress_tracker.rs b/application/apps/rustcore/rs-bindings/src/js/session/progress_tracker.rs
index fc8fce1940..3d79b56dde 100644
--- a/application/apps/rustcore/rs-bindings/src/js/session/progress_tracker.rs
+++ b/application/apps/rustcore/rs-bindings/src/js/session/progress_tracker.rs
@@ -1,11 +1,8 @@
-use super::events::{ComputationErrorWrapper, LifecycleTransitionWrapper};
 use log::trace;
 use node_bindgen::derive::node_bindgen;
-use session::{
-    events::ComputationError,
-    progress::{run_tracking, ProgressCommand, ProgressTrackerAPI},
-};
+use session::progress::{run_tracking, ProgressCommand, ProgressTrackerAPI};
 use std::thread;
+use stypes::LifecycleTransition;
 use tokio::{runtime::Runtime, sync::mpsc::UnboundedReceiver};
 
 struct RustProgressTracker {
@@ -25,12 +22,12 @@ impl RustProgressTracker {
     }
 
     #[node_bindgen(mt)]
-    async fn init<F: Fn(LifecycleTransitionWrapper) + Send + 'static>(
+    async fn init<F: Fn(LifecycleTransition) + Send + 'static>(
         &mut self,
         callback: F,
-    ) -> Result<(), ComputationErrorWrapper> {
+    ) -> Result<(), stypes::ComputationError> {
         let rt = Runtime::new().map_err(|e| {
-            ComputationError::Process(format!("Could not start tokio runtime: {e}"))
+            stypes::ComputationError::Process(format!("Could not start tokio runtime: {e}"))
         })?;
         if let Some(rx_events) = self.rx_events.take() {
             let (result_tx, result_rx) = std::sync::mpsc::channel();
@@ -41,7 +38,7 @@ impl RustProgressTracker {
                         Ok(mut rx) => {
                             let _ = result_tx.send(Ok(()));
                             while let Some(progress_report) = rx.recv().await {
-                                callback(LifecycleTransitionWrapper(progress_report))
+                                callback(progress_report)
                             }
                         }
                         Err(e) => {
@@ -50,34 +47,23 @@ impl RustProgressTracker {
                     }
                 })
             });
-            result_rx
-                .recv()
-                .map_err(|_| {
-                    ComputationErrorWrapper(ComputationError::Protocol(
-                        "could not setup tracking".to_string(),
-                    ))
-                })?
-                .map_err(ComputationErrorWrapper)
+            result_rx.recv().map_err(|_| {
+                stypes::ComputationError::Protocol("could not setup tracking".to_string())
+            })?
         } else {
-            Err(ComputationErrorWrapper(ComputationError::Protocol(
+            Err(stypes::ComputationError::Protocol(
                 "Could not init progress_tracker".to_string(),
-            )))
+            ))
         }
     }
 
     #[node_bindgen]
-    async fn stats(&self) -> Result<String, ComputationErrorWrapper> {
-        self.tracker_api
-            .content()
-            .await
-            .map_err(ComputationErrorWrapper)
+    async fn stats(&self) -> Result<String, stypes::ComputationError> {
+        self.tracker_api.content().await
     }
 
     #[node_bindgen]
-    async fn destroy(&self) -> Result<(), ComputationErrorWrapper> {
-        self.tracker_api
-            .abort()
-            .await
-            .map_err(ComputationErrorWrapper)
+    async fn destroy(&self) -> Result<(), stypes::ComputationError> {
+        self.tracker_api.abort().await
     }
 }
diff --git a/application/apps/rustcore/ts-bindings/package.json b/application/apps/rustcore/ts-bindings/package.json
index 088d7acb99..741b3cdf28 100644
--- a/application/apps/rustcore/ts-bindings/package.json
+++ b/application/apps/rustcore/ts-bindings/package.json
@@ -37,6 +37,7 @@
   },
   "dependencies": {
     "platform": "link:../../../platform",
+    "protocol": "link:../../protocol/pkg",
     "tslib": "^2.6.2",
     "uuid": "^9.0.1"
   },
diff --git a/application/apps/rustcore/ts-bindings/spec/_session.benchmark.spec.ts b/application/apps/rustcore/ts-bindings/spec/_session.benchmark.spec.ts
index fe255f9d45..2951c782d3 100644
--- a/application/apps/rustcore/ts-bindings/spec/_session.benchmark.spec.ts
+++ b/application/apps/rustcore/ts-bindings/spec/_session.benchmark.spec.ts
@@ -3,15 +3,10 @@
 import { initLogger } from './logger';
 initLogger();
 import { Factory } from '../src/api/session';
-import { IAttachmentsUpdatedUpdated } from '../src/api/session.provider';
-import { IAttachment } from 'platform/types/content';
 import { createSampleFile, finish, performanceReport, setMeasurement } from './common';
 import { readBenchmarkConfigurationFile } from './config_benchmarks';
 import { IndexingMode } from 'platform/types/content';
 import * as runners from './runners';
-import * as fs from 'fs';
-import * as os from 'os';
-import * as path from 'path';
 
 const config = readBenchmarkConfigurationFile().get().tests.benchmark;
 
@@ -34,119 +29,150 @@ describe('Benchmark Tests', function () {
                     file: '',
                 },
                 1,
-                async (logger, done, {session, stream, events, search}) => {
+                async (logger, done, { session, stream, events, search }) => {
                     const measurement = setMeasurement();
-                        let home_dir = (process.env as any)['SH_HOME_DIR'];
-                        if (!home_dir || typeof home_dir !== 'string' || home_dir.trim() === '') {
-                            throw new Error('Environment variable SH_HOME_DIR is not set or is invalid.');
-                        }
+                    let home_dir = (process.env as any)['SH_HOME_DIR'];
+                    if (!home_dir || typeof home_dir !== 'string' || home_dir.trim() === '') {
+                        throw new Error(
+                            'Environment variable SH_HOME_DIR is not set or is invalid.',
+                        );
+                    }
 
-                        switch (testId) {
-                            case 'test1':
-                                stream
-                                    .observe(
-                                        new Factory.File()
-                                            .asText()
-                                            .type(Factory.FileType.Text)
-                                            .file(`${home_dir}/${test.file}`)
-                                            .get()
-                                            .sterilized(),
-                                    )
-                                    .catch(finish.bind(null, session, done));
-                                break;
-                            case 'test2':
-                                stream
-                                    .observe(
-                                        new Factory.File()
-                                            .type(Factory.FileType.Binary)
-                                            .file(`${home_dir}/${test.file}`)
-                                            .asDlt({
-                                                filter_config: undefined,
-                                                fibex_file_paths: [],
-                                                with_storage_header: true,
-                                                tz: undefined,
-                                            })
-                                            .get()
-                                            .sterilized(),
-                                    )
-                                    .catch(finish.bind(null, session, done));
-                                break;
-                            case 'test3':
-                                stream
-                                    .observe(
-                                        new Factory.File()
-                                            .type(Factory.FileType.PcapNG)
-                                            .file(`${home_dir}/${test.file}`)
-                                            .asDlt({
-                                                filter_config: undefined,
-                                                fibex_file_paths: [],
-                                                with_storage_header: false,
-                                                tz: undefined,
-                                            })
-                                            .get()
-                                            .sterilized(),
-                                    )
-                                    .catch(finish.bind(null, session, done));
-                                break;
-                            case 'test4':
-                                const tmpobj1 = createSampleFile(
-                                    5000,
-                                    logger,
-                                    (i: number) => `some line data: ${i}\n`
-                                );
-
-                                let { session: startupSession, stream: startupStream, events: startupEvents, search: startupSearch } = await runners.initializeSession(testName);
-
-                                startupStream.observe(
-                                    new Factory.Stream()
+                    switch (testId) {
+                        case 'test1':
+                            stream
+                                .observe(
+                                    new Factory.File()
                                         .asText()
-                                        .process({
-                                            command: `less ${tmpobj1.name}`,
-                                            cwd: process.cwd(),
-                                            envs: process.env as { [key: string]: string },
+                                        .type(Factory.FileType.Text)
+                                        .file(`${home_dir}/${test.file}`)
+                                        .get()
+                                        .sterilized(),
+                                )
+                                .catch(finish.bind(null, session, done));
+                            break;
+                        case 'test2':
+                            stream
+                                .observe(
+                                    new Factory.File()
+                                        .type(Factory.FileType.Binary)
+                                        .file(`${home_dir}/${test.file}`)
+                                        .asDlt({
+                                            filter_config: undefined,
+                                            fibex_file_paths: [],
+                                            with_storage_header: true,
+                                            tz: undefined,
                                         })
                                         .get()
-                                        .sterilized()
-                                );
-                                const startupResults = measurement();
-                                const startupReport = performanceReport(testName, startupResults.ms, test.expectation_ms, `${home_dir}/${test.file}`);
-                                finish([startupSession, session], done, startupReport ? undefined : new Error(`${testName} is fail`));
-                                break;
-                            case 'test5':
-                                const tmpobj2 = createSampleFile(
-                                    5000,
-                                    logger,
-                                    (i: number) => `some line data: ${i}\n`
-                                );
-
-                                stream.observe(
-                                    new Factory.Stream()
-                                        .asText()
-                                        .process({
-                                            command: `less ${tmpobj2.name}`,
-                                            cwd: process.cwd(),
-                                            envs: process.env as { [key: string]: string },
+                                        .sterilized(),
+                                )
+                                .catch(finish.bind(null, session, done));
+                            break;
+                        case 'test3':
+                            stream
+                                .observe(
+                                    new Factory.File()
+                                        .type(Factory.FileType.PcapNG)
+                                        .file(`${home_dir}/${test.file}`)
+                                        .asDlt({
+                                            filter_config: undefined,
+                                            fibex_file_paths: [],
+                                            with_storage_header: false,
+                                            tz: undefined,
                                         })
                                         .get()
-                                        .sterilized()
+                                        .sterilized(),
+                                )
+                                .catch(finish.bind(null, session, done));
+                            break;
+                        case 'test4':
+                            const tmpobj1 = createSampleFile(
+                                5000,
+                                logger,
+                                (i: number) => `some line data: ${i}\n`,
+                            );
+
+                            let {
+                                session: startupSession,
+                                stream: startupStream,
+                                events: startupEvents,
+                                search: startupSearch,
+                            } = await runners.initializeSession(testName);
+
+                            startupStream.observe(
+                                new Factory.Stream()
+                                    .asText()
+                                    .process({
+                                        command: `less ${tmpobj1.name}`,
+                                        cwd: process.cwd(),
+                                        envs: process.env as { [key: string]: string },
+                                    })
+                                    .get()
+                                    .sterilized(),
+                            );
+                            const startupResults = measurement();
+                            const startupReport = performanceReport(
+                                testName,
+                                startupResults.ms,
+                                test.expectation_ms,
+                                `${home_dir}/${test.file}`,
+                            );
+                            finish(
+                                [startupSession, session],
+                                done,
+                                startupReport ? undefined : new Error(`${testName} is fail`),
+                            );
+                            break;
+                        case 'test5':
+                            const tmpobj2 = createSampleFile(
+                                5000,
+                                logger,
+                                (i: number) => `some line data: ${i}\n`,
+                            );
+
+                            stream.observe(
+                                new Factory.Stream()
+                                    .asText()
+                                    .process({
+                                        command: `less ${tmpobj2.name}`,
+                                        cwd: process.cwd(),
+                                        envs: process.env as { [key: string]: string },
+                                    })
+                                    .get()
+                                    .sterilized(),
+                            );
+                            const shutdownResult = measurement();
+                            const shutdownReport = performanceReport(
+                                testName,
+                                shutdownResult.ms,
+                                test.expectation_ms,
+                                `${home_dir}/${test.file}`,
+                            );
+                            finish(
+                                session,
+                                done,
+                                shutdownReport ? undefined : new Error(`${testName} is fail`),
+                            );
+                            break;
+                        case 'test6':
+                            const multiSessions = [session];
+                            for (let i = 0; i < 50; i++) {
+                                const file = createSampleFile(
+                                    100,
+                                    logger,
+                                    (j: number) => `file ${i} line data: ${j}\n`,
                                 );
-                                const shutdownResult = measurement();
-                                const shutdownReport = performanceReport(testName, shutdownResult.ms, test.expectation_ms, `${home_dir}/${test.file}`);
-                                finish(session, done, shutdownReport ? undefined : new Error(`${testName} is fail`));
-                                break;
-                            case 'test6':
-                                const multiSessions = [session];
-                                for (let i = 0; i < 50; i++) {
-                                    const file = createSampleFile(
-                                        100,
-                                        logger,
-                                        (j: number) => `file ${i} line data: ${j}\n`
-                                    );
 
-                                    let { session: multiSession, stream: multiSessionStream, events: multiSessionEvents, search: multiSessionSearch } = await runners.initializeSession(testName);
-                                    multiSessions.push(multiSession);
+                                let {
+                                    session: multiSession,
+                                    stream: multiSessionStream,
+                                    events: multiSessionEvents,
+                                    search: multiSessionSearch,
+                                } = await runners.initializeSession(testName);
+                                multiSessions.push(multiSession);
 
-                                    multiSessionStream.observe(
+                                multiSessionStream
+                                    .observe(
                                         new Factory.Stream()
                                             .asText()
                                             .process({
@@ -155,115 +181,137 @@ describe('Benchmark Tests', function () {
                                                 envs: process.env as { [key: string]: string },
                                             })
                                             .get()
-                                            .sterilized()
-                                    ).catch((err: Error) => `File ${i} failed to open: ${err.message}`);
-                                }
-                                const testResult = measurement();
-                                const testReport = performanceReport(testName, testResult.ms, test.expectation_ms, `${home_dir}/${test.file}`);
-                                finish(multiSessions, done, testReport ? undefined : new Error(`${testName} is fail`));
-                                break;
-                            case 'test7':
-                                let controlSum = 0;
-                                let countMatches = 0;
-                                let read: boolean = false;
-                                stream
-                                    .observe(
-                                        new Factory.File()
-                                            .asText()
-                                            .type(Factory.FileType.Text)
-                                            .file(`${home_dir}/${test.file}`)
-                                            .get().sterilized(),
+                                            .sterilized(),
                                     )
-                                    .catch(finish.bind(null, session, done));
-                                const updates: number[] = [];
-                                events.IndexedMapUpdated.subscribe((event: any) => {
-                                    event.len > 0 && updates.push(event.len);
-                                });
-                                events.StreamUpdated.subscribe(async () => {
-                                    read = true;
-                                    try {
-                                        await search.search([
+                                    .catch(
+                                        (err: Error) => `File ${i} failed to open: ${err.message}`,
+                                    );
+                            }
+                            const testResult = measurement();
+                            const testReport = performanceReport(
+                                testName,
+                                testResult.ms,
+                                test.expectation_ms,
+                                `${home_dir}/${test.file}`,
+                            );
+                            finish(
+                                multiSessions,
+                                done,
+                                testReport ? undefined : new Error(`${testName} is fail`),
+                            );
+                            break;
+                        case 'test7':
+                            let controlSum = 0;
+                            let countMatches = 0;
+                            let read: boolean = false;
+                            stream
+                                .observe(
+                                    new Factory.File()
+                                        .asText()
+                                        .type(Factory.FileType.Text)
+                                        .file(`${home_dir}/${test.file}`)
+                                        .get()
+                                        .sterilized(),
+                                )
+                                .catch(finish.bind(null, session, done));
+                            const updates: number[] = [];
+                            events.IndexedMapUpdated.subscribe((event: any) => {
+                                event.len > 0 && updates.push(event.len);
+                            });
+                            events.StreamUpdated.subscribe(async () => {
+                                read = true;
+                                try {
+                                    await search.search([
+                                        {
+                                            filter: 'HTTP',
+                                            flags: { reg: true, word: true, cases: false },
+                                        },
+                                    ]);
+                                    let items = await stream.grabIndexed(0, countMatches);
+                                    await stream.setIndexingMode(IndexingMode.Breadcrumbs);
+                                    finish(session, done);
+                                } catch (err) {
+                                    finish(
+                                        undefined,
+                                        done,
+                                        new Error(
+                                            `Fail to finish test due error: ${
+                                                err instanceof Error ? err.message : err
+                                            }`,
+                                        ),
+                                    );
+                                }
+                            });
+                            break;
+                        case 'test8':
+                            stream
+                                .observe(
+                                    new Factory.File()
+                                        .asText()
+                                        .type(Factory.FileType.Text)
+                                        .file(`${home_dir}/${test.file}`)
+                                        .get()
+                                        .sterilized(),
+                                )
+                                .on('processing', () => {
+                                    search
+                                        .search([
                                             {
-                                                filter: 'HTTP',
-                                                flags: { reg: true, word: true, cases: false },
+                                                filter: 'http',
+                                                flags: { reg: true, word: false, cases: false },
                                             },
-                                        ]);
-                                        let items = await stream.grabIndexed(0, countMatches);
-                                        await stream.setIndexingMode(IndexingMode.Breadcrumbs);
-                                        finish(session, done);
-                                    } catch (err) {
-                                        finish(
-                                            undefined,
-                                            done,
-                                            new Error(
-                                                `Fail to finish test due error: ${
-                                                    err instanceof Error ? err.message : err
-                                                }`,
-                                            ),
-                                        );
-                                    }
-                                });
-                                break;
-                            case 'test8':
-                                stream
-                                    .observe(
-                                        new Factory.File()
-                                            .asText()
-                                            .type(Factory.FileType.Text)
-                                            .file(`${home_dir}/${test.file}`)
-                                            .get()
-                                            .sterilized(),
-                                    )
-                                    .on('processing', () => {
-                                        search
-                                            .search([
-                                                {
-                                                    filter: 'http',
-                                                    flags: { reg: true, word: false, cases: false },
-                                                },
-                                            ])
-                                            .catch(finish.bind(null, session, done));
-                                    })
-                                    .catch(finish.bind(null, session, done));
-                                break;
-                            case 'test9':
-                                stream
-                                    .observe(
-                                        new Factory.File()
-                                            .asText()
-                                            .type(Factory.FileType.Text)
-                                            .file(`${home_dir}/${test.file}`)
-                                            .get()
-                                            .sterilized(),
-                                    )
-                                    .on('processing', () => {
-                                        search
-                                            .search([
-                                                {
-                                                    filter: 'http://www.almhuette-raith.at',
-                                                    flags: { reg: true, word: false, cases: false },
-                                                },
-                                                {
-                                                    filter: 'com.apple.hiservices-xpcservice',
-                                                    flags: { reg: true, word: false, cases: false },
-                                                },
-                                                {
-                                                    filter: 'Google Chrome Helper',
-                                                    flags: { reg: true, word: false, cases: false },
-                                                },
-                                            ])
-                                            .catch(finish.bind(null, session, done));
-                                    })
-                                    .catch(finish.bind(null, session, done));
-                                break;
-                            default:
-                                throw new Error(`Unsupported format or alias: ${test.alias}`);
-                        }
-                        events.FileRead.subscribe(() => {
-                            const results = measurement();
-                            const reportResult = performanceReport(testName, results.ms, test.expectation_ms, `${home_dir}/${test.file}`);
-                            finish(session, done, reportResult ? undefined : new Error(`${testName} is fail`));
-                        });
+                                        ])
+                                        .catch(finish.bind(null, session, done));
+                                })
+                                .catch(finish.bind(null, session, done));
+                            break;
+                        case 'test9':
+                            stream
+                                .observe(
+                                    new Factory.File()
+                                        .asText()
+                                        .type(Factory.FileType.Text)
+                                        .file(`${home_dir}/${test.file}`)
+                                        .get()
+                                        .sterilized(),
+                                )
+                                .on('processing', () => {
+                                    search
+                                        .search([
+                                            {
+                                                filter: 'http://www.almhuette-raith.at',
+                                                flags: { reg: true, word: false, cases: false },
+                                            },
+                                            {
+                                                filter: 'com.apple.hiservices-xpcservice',
+                                                flags: { reg: true, word: false, cases: false },
+                                            },
+                                            {
+                                                filter: 'Google Chrome Helper',
+                                                flags: { reg: true, word: false, cases: false },
+                                            },
+                                        ])
+                                        .catch(finish.bind(null, session, done));
+                                })
+                                .catch(finish.bind(null, session, done));
+                            break;
+                        default:
+                            throw new Error(`Unsupported format or alias: ${test.alias}`);
+                    }
+                    events.FileRead.subscribe(() => {
+                        const results = measurement();
+                        const reportResult = performanceReport(
+                            testName,
+                            results.ms,
+                            test.expectation_ms,
+                            `${home_dir}/${test.file}`,
+                        );
+                        finish(
+                            session,
+                            done,
+                            reportResult ? undefined : new Error(`${testName} is fail`),
+                        );
+                    });
                 },
             );
         });
diff --git a/application/apps/rustcore/ts-bindings/spec/config.ts b/application/apps/rustcore/ts-bindings/spec/config.ts
index 4db723d2b7..775deb23a1 100644
--- a/application/apps/rustcore/ts-bindings/spec/config.ts
+++ b/application/apps/rustcore/ts-bindings/spec/config.ts
@@ -31,6 +31,9 @@ export interface IConfiguration {
         observe: {
             regular: IRegularTests;
         };
+        protocol: {
+            regular: IRegularTests;
+        };
         stream: {
             regular: IRegularTests;
         };
diff --git a/application/apps/rustcore/ts-bindings/spec/defaults.json b/application/apps/rustcore/ts-bindings/spec/defaults.json
index b724d6154e..36e6779c1d 100644
--- a/application/apps/rustcore/ts-bindings/spec/defaults.json
+++ b/application/apps/rustcore/ts-bindings/spec/defaults.json
@@ -28,6 +28,18 @@
         }
       }
     },
+    "protocol": {
+      "regular": {
+          "execute_only": [2,3],
+          "list": {
+              "1": "Test 1. CallbackEvent",
+              "2": "Test 2. Check all messages",
+              "3": "Test 3. Comparing JSON vs Protobuf"
+          },
+          "files": {
+          }
+      }
+    },
     "stream": {
       "regular": {
         "execute_only": [],
diff --git a/application/apps/rustcore/ts-bindings/spec/session.concat.spec.ts b/application/apps/rustcore/ts-bindings/spec/session.concat.spec.ts
index c631772955..a2a6ac2848 100644
--- a/application/apps/rustcore/ts-bindings/spec/session.concat.spec.ts
+++ b/application/apps/rustcore/ts-bindings/spec/session.concat.spec.ts
@@ -6,7 +6,7 @@
 import { initLogger } from './logger';
 initLogger();
 import { Factory } from '../src/api/session';
-import { IGrabbedElement } from 'platform/types/content';
+import { GrabbedElement } from 'platform/types/bindings';
 import { createSampleFile, finish } from './common';
 import { readConfigurationFile } from './config';
 
@@ -45,7 +45,7 @@ describe('Concat', function () {
                 grabbing = true;
                 comps.stream
                     .grab(98, 4)
-                    .then((result: IGrabbedElement[]) => {
+                    .then((result: GrabbedElement[]) => {
                         logger.debug('result of grab was: ' + JSON.stringify(result));
                         expect(result.map((i) => i.content)).toEqual([
                             'file a: some line data: 98',
@@ -117,7 +117,7 @@ describe('Concat', function () {
     //                     grabbing = true;
     //                     stream
     //                         .grab(1, 10)
-    //                         .then((result: IGrabbedElement[]) => {
+    //                         .then((result: GrabbedElement[]) => {
     //                             expect(result.length).toEqual(10);
     //                             logger.debug('result of grab was: ' + JSON.stringify(result));
     //                             finish(session, done);
@@ -194,7 +194,7 @@ describe('Concat', function () {
     //                     grabbing = true;
     //                     stream
     //                         .grab(1, 10)
-    //                         .then((result: IGrabbedElement[]) => {
+    //                         .then((result: GrabbedElement[]) => {
     //                             expect(result.length).toEqual(10);
     //                             logger.debug('result of grab was: ' + JSON.stringify(result));
     //                             finish(session, done);
diff --git a/application/apps/rustcore/ts-bindings/spec/session.errors.spec.ts b/application/apps/rustcore/ts-bindings/spec/session.errors.spec.ts
index 9569da97f3..e73de8c107 100644
--- a/application/apps/rustcore/ts-bindings/spec/session.errors.spec.ts
+++ b/application/apps/rustcore/ts-bindings/spec/session.errors.spec.ts
@@ -6,7 +6,7 @@
 import { initLogger } from './logger';
 initLogger();
 import { Factory } from '../src/api/session';
-import { IGrabbedElement } from 'platform/types/content';
+import { GrabbedElement } from 'platform/types/bindings';
 import { finish, createSampleFile } from './common';
 import { readConfigurationFile } from './config';
 import { error } from 'platform/log/utils';
@@ -82,7 +82,7 @@ describe('Errors', () => {
                 // While we do not have operation id
                 comps.stream
                     .grab(6000, 1000)
-                    .then((_result: IGrabbedElement[]) => {
+                    .then((_result: GrabbedElement[]) => {
                         finish(comps.session, done, new Error(`grabber should not return results`));
                     })
                     .catch((err: Error) => {
@@ -127,7 +127,7 @@ describe('Errors', () => {
                                     expect(len).toEqual(55);
                                     comps.search
                                         .grab(6000, 1000)
-                                        .then((result: IGrabbedElement[]) => {
+                                        .then((result: GrabbedElement[]) => {
                                             finish(
                                                 comps.session,
                                                 done,
@@ -176,7 +176,7 @@ describe('Errors', () => {
                 }
                 comps.stream
                     .grab(1, -2)
-                    .then((_result: IGrabbedElement[]) => {
+                    .then((_result: GrabbedElement[]) => {
                         finish(
                             comps.session,
                             done,
@@ -217,7 +217,7 @@ describe('Errors', () => {
                 grabbing = true;
                 comps.stream
                     .grab(-1, 2)
-                    .then((_result: IGrabbedElement[]) =>
+                    .then((_result: GrabbedElement[]) =>
                         finish(comps.session, done, new Error('Grab from invalid start worked')),
                     )
                     .catch((err: Error) => {
diff --git a/application/apps/rustcore/ts-bindings/spec/session.exporting.spec.ts b/application/apps/rustcore/ts-bindings/spec/session.exporting.spec.ts
index 961418a494..2ef458d57d 100644
--- a/application/apps/rustcore/ts-bindings/spec/session.exporting.spec.ts
+++ b/application/apps/rustcore/ts-bindings/spec/session.exporting.spec.ts
@@ -6,7 +6,7 @@
 import { initLogger } from './logger';
 initLogger();
 import { Factory } from '../src/api/session';
-import { IGrabbedElement } from 'platform/types/content';
+import { GrabbedElement } from 'platform/types/bindings';
 import { createSampleFile, finish, relativePath, rootPath } from './common';
 import { readConfigurationFile } from './config';
 import { fromIndexes } from 'platform/types/range';
@@ -25,17 +25,17 @@ describe('Exporting', function () {
             let controlSum = 0;
             const ranges = [
                 {
-                    from: 50,
-                    to: 100,
+                    start: 50,
+                    end: 100,
                 },
                 {
-                    from: 200,
-                    to: 300,
+                    start: 200,
+                    end: 300,
                 },
             ];
             const tmpobj = createSampleFile(1000, logger, (i: number) => {
                 ranges.forEach((r) => {
-                    if (i >= r.from && i <= r.to) {
+                    if (i >= r.start && i <= r.end) {
                         controlSum += i;
                     }
                 });
@@ -109,23 +109,23 @@ describe('Exporting', function () {
         return runners.withSession(config.regular, 2, async (logger, done, comps) => {
             const ranges = [
                 {
-                    from: 50,
-                    to: 90,
+                    start: 50,
+                    end: 90,
                 },
                 {
-                    from: 101,
-                    to: 150,
+                    start: 101,
+                    end: 150,
                 },
             ];
             let controlSum = 0;
             const tmpobj_a = createSampleFile(100, logger, (i: number) => {
-                if (i >= ranges[0].from && i <= ranges[0].to) {
+                if (i >= ranges[0].start && i <= ranges[0].end) {
                     controlSum += i;
                 }
                 return `____${i}____\n`;
             });
             const tmpobj_b = createSampleFile(100, logger, (i: number) => {
-                if (i >= ranges[1].from - 100 && i <= ranges[1].to - 100) {
+                if (i >= ranges[1].start - 100 && i <= ranges[1].end - 100) {
                     controlSum += i * 1000;
                 }
                 return `____${i * 1000}____\n`;
@@ -232,17 +232,13 @@ describe('Exporting', function () {
                             const output = path.resolve(os.tmpdir(), `${v4()}.logs`);
                             comps.search
                                 .grab(range.from, range.to)
-                                .then((grabbed: IGrabbedElement[]) => {
+                                .then((grabbed: GrabbedElement[]) => {
                                     comps.stream
-                                        .export(
-                                            output,
-                                            fromIndexes(grabbed.map((el) => el.position)),
-                                            {
-                                                columns: [],
-                                                spliter: undefined,
-                                                delimiter: undefined,
-                                            },
-                                        )
+                                        .export(output, fromIndexes(grabbed.map((el) => el.pos)), {
+                                            columns: [],
+                                            spliter: undefined,
+                                            delimiter: undefined,
+                                        })
                                         .then((_done) => {
                                             fs.promises
                                                 .readFile(output, { encoding: 'utf-8' })
@@ -336,7 +332,7 @@ describe('Exporting', function () {
                     .then((grabbed) => {
                         const output = path.resolve(os.tmpdir(), `${v4()}.logs`);
                         comps.stream
-                            .export(output, [{ from: 0, to: 8 }], {
+                            .export(output, [{ start: 0, end: 8 }], {
                                 columns: [],
                                 spliter: undefined,
                                 delimiter: undefined,
@@ -354,7 +350,7 @@ describe('Exporting', function () {
                                                     comps.session,
                                                     done,
                                                     new Error(
-                                                        `Rows are dismatch. Stream position ${grabbed[i].position}.`,
+                                                        `Rows are dismatch. Stream position ${grabbed[i].pos}.`,
                                                     ),
                                                 );
                                             }
@@ -432,7 +428,7 @@ describe('Exporting', function () {
                     .then((grabbed) => {
                         const output = path.resolve(os.tmpdir(), `${v4()}.dlt`);
                         comps.stream
-                            .exportRaw(output, [{ from: 0, to: 8 }])
+                            .exportRaw(output, [{ start: 0, end: 8 }])
                             .then(async () => {
                                 comps.session
                                     .destroy()
@@ -476,7 +472,7 @@ describe('Exporting', function () {
                                                                 session,
                                                                 done,
                                                                 new Error(
-                                                                    `Rows are dismatch. Stream position ${grabbed[i].position}.`,
+                                                                    `Rows are dismatch. Stream position ${grabbed[i].pos}.`,
                                                                 ),
                                                             );
                                                         }
@@ -569,7 +565,7 @@ describe('Exporting', function () {
                         expect(grabbed[10].source_id).toEqual(1);
                         const output = path.resolve(os.tmpdir(), `${v4()}.logs`);
                         comps.stream
-                            .export(output, [{ from: 0, to: 14 }], {
+                            .export(output, [{ start: 0, end: 14 }], {
                                 columns: [],
                                 spliter: undefined,
                                 delimiter: undefined,
@@ -587,7 +583,7 @@ describe('Exporting', function () {
                                                     comps.session,
                                                     done,
                                                     new Error(
-                                                        `Rows are dismatch. Stream position ${grabbed[i].position}.`,
+                                                        `Rows are dismatch. Stream position ${grabbed[i].pos}.`,
                                                     ),
                                                 );
                                             }
@@ -668,7 +664,7 @@ describe('Exporting', function () {
                         expect(grabbed[10].source_id).toEqual(1);
                         const output = path.resolve(os.tmpdir(), `${v4()}.logs`);
                         comps.stream
-                            .exportRaw(output, [{ from: 0, to: 14 }])
+                            .exportRaw(output, [{ start: 0, end: 14 }])
                             .then(() => {
                                 comps.session
                                     .destroy()
@@ -719,7 +715,7 @@ describe('Exporting', function () {
                                                                 session,
                                                                 done,
                                                                 new Error(
-                                                                    `Rows are dismatch. Stream position ${grabbed[i].position}.`,
+                                                                    `Rows are dismatch. Stream position ${grabbed[i].pos}.`,
                                                                 ),
                                                             );
                                                         }
@@ -806,26 +802,26 @@ describe('Exporting', function () {
                 }
                 const ranges = [
                     {
-                        from: 0,
-                        to: 5,
+                        start: 0,
+                        end: 5,
                     },
                     {
-                        from: 9,
-                        to: 14,
+                        start: 9,
+                        end: 14,
                     },
                 ];
                 gotten = true;
-                Promise.all(ranges.map((r) => comps.stream.grab(r.from, r.to - r.from)))
+                Promise.all(ranges.map((r) => comps.stream.grab(r.start, r.end - r.start)))
                     .then((results) => {
-                        let grabbed: IGrabbedElement[] = [];
+                        let grabbed: GrabbedElement[] = [];
                         results.forEach((g) => (grabbed = grabbed.concat(g)));
-                        grabbed.sort((a, b) => (a.position > b.position ? 1 : -1));
+                        grabbed.sort((a, b) => (a.pos > b.pos ? 1 : -1));
                         const output = path.resolve(os.tmpdir(), `${v4()}.logs`);
                         comps.stream
                             .exportRaw(
                                 output,
                                 ranges.map((r) => {
-                                    return { from: r.from, to: r.to - 1 };
+                                    return { start: r.start, end: r.end - 1 };
                                 }),
                             )
                             .then(() => {
@@ -870,7 +866,7 @@ describe('Exporting', function () {
                                                                 session,
                                                                 done,
                                                                 new Error(
-                                                                    `Rows are dismatch. Stream position ${grabbed[i].position}.`,
+                                                                    `Rows are dismatch. Stream position ${grabbed[i].pos}.`,
                                                                 ),
                                                             );
                                                         }
@@ -960,7 +956,7 @@ describe('Exporting', function () {
                     .then((grabbed) => {
                         const output = path.resolve(os.tmpdir(), `${v4()}.txt`);
                         comps.stream
-                            .export(output, [{ from: 0, to: 8 }], {
+                            .export(output, [{ start: 0, end: 8 }], {
                                 columns: [0, 1],
                                 spliter: '\u0004',
                                 delimiter: ';',
@@ -1053,7 +1049,7 @@ describe('Exporting', function () {
                     .then((grabbed) => {
                         const output = path.resolve(os.tmpdir(), `${v4()}.txt`);
                         comps.stream
-                            .export(output, [{ from: 0, to: 8 }], {
+                            .export(output, [{ start: 0, end: 8 }], {
                                 columns: [9, 10],
                                 spliter: '\u0004',
                                 delimiter: ';',
@@ -1145,7 +1141,7 @@ describe('Exporting', function () {
                     .then((grabbed) => {
                         const output = path.resolve(os.tmpdir(), `${v4()}.txt`);
                         comps.stream
-                            .export(output, [{ from: 0, to: 8 }], {
+                            .export(output, [{ start: 0, end: 8 }], {
                                 columns: [10],
                                 spliter: '\u0004',
                                 delimiter: ';',
@@ -1279,7 +1275,7 @@ describe('Exporting', function () {
                     cases.map((usecase) => {
                         const output = usecase.output;
                         return comps.stream
-                            .export(output, [{ from: 0, to: 8 }], usecase.options)
+                            .export(output, [{ start: 0, end: 8 }], usecase.options)
                             .then(async () => {
                                 fs.promises
                                     .readFile(output, { encoding: 'utf-8' })
diff --git a/application/apps/rustcore/ts-bindings/spec/session.indexes.spec.ts b/application/apps/rustcore/ts-bindings/spec/session.indexes.spec.ts
index c024569138..1d2d26b63b 100644
--- a/application/apps/rustcore/ts-bindings/spec/session.indexes.spec.ts
+++ b/application/apps/rustcore/ts-bindings/spec/session.indexes.spec.ts
@@ -18,241 +18,231 @@ describe('Indexes', function () {
     it(config.regular.list[1], function () {
         return runners.withSession(config.regular, 1, async (logger, done, comps) => {
             (async () => {
-                    let controlSum = 0;
-                    let countMatches = 0;
-                    const tmpobj = createSampleFile(50, logger, (i: number) => {
-                        controlSum += i % 10 == 0 ? i : 0;
-                        countMatches += i % 10 == 0 ? 1 : 0;
-                        return `${i}: some line data: ${i % 10 == 0 ? `match A` : ''}\n`;
-                    });
-                    let read: boolean = false;
-                    comps.stream
-                        .observe(
-                            new Factory.File()
-                                .asText()
-                                .type(Factory.FileType.Text)
-                                .file(tmpobj.name)
-                                .get()
-                                .sterilized(),
-                        )
-                        .catch(finish.bind(null, comps.session, done));
-                    const updates: number[] = [];
-                    comps.events.IndexedMapUpdated.subscribe((event) => {
-                        event.len > 0 && updates.push(event.len);
-                    });
-                    comps.events.StreamUpdated.subscribe(async (rows: number) => {
-                        if (rows < 50 || read) {
-                            return;
-                        }
-                        read = true;
-                        try {
-                            await comps.search.search([
-                                {
-                                    filter: 'match A',
-                                    flags: { reg: true, word: true, cases: false },
-                                },
-                            ]);
-                            let items = await comps.stream.grabIndexed(0, countMatches);
-                            expect(items.length).toEqual(countMatches);
-                            expect(
-                                items
-                                    .map((item) =>
-                                        parseInt(
-                                            (item.content.match(/\d*/) as unknown as string)[0],
-                                            10,
-                                        ),
-                                    )
-                                    .reduce((partialSum, a) => partialSum + a, 0),
-                            ).toEqual(controlSum);
-                            expect(
-                                items.map((i) => [i.position, new Nature(i.nature).getTypes()]),
-                            ).toEqual([
-                                [0, [NatureTypes.Search]],
-                                [10, [NatureTypes.Search]],
-                                [20, [NatureTypes.Search]],
-                                [30, [NatureTypes.Search]],
-                                [40, [NatureTypes.Search]],
-                            ]);
-                            await comps.stream.setIndexingMode(IndexingMode.Breadcrumbs);
-                            let len = await comps.stream.getIndexedLen();
-                            expect(len).toEqual(30);
-                            items = await comps.stream.grabIndexed(0, len);
-                            expect(items.length).toEqual(len);
-                            expect(
-                                items.map((i) => [i.position, new Nature(i.nature).getTypes()]),
-                            ).toEqual([
-                                [0, [NatureTypes.Search]],
-                                [1, [NatureTypes.Breadcrumb]],
-                                [2, [NatureTypes.Breadcrumb]],
-                                [5, [NatureTypes.BreadcrumbSeporator]],
-                                [8, [NatureTypes.Breadcrumb]],
-                                [9, [NatureTypes.Breadcrumb]],
-                                [10, [NatureTypes.Search]],
-                                [11, [NatureTypes.Breadcrumb]],
-                                [12, [NatureTypes.Breadcrumb]],
-                                [15, [NatureTypes.BreadcrumbSeporator]],
-                                [18, [NatureTypes.Breadcrumb]],
-                                [19, [NatureTypes.Breadcrumb]],
-                                [20, [NatureTypes.Search]],
-                                [21, [NatureTypes.Breadcrumb]],
-                                [22, [NatureTypes.Breadcrumb]],
-                                [25, [NatureTypes.BreadcrumbSeporator]],
-                                [28, [NatureTypes.Breadcrumb]],
-                                [29, [NatureTypes.Breadcrumb]],
-                                [30, [NatureTypes.Search]],
-                                [31, [NatureTypes.Breadcrumb]],
-                                [32, [NatureTypes.Breadcrumb]],
-                                [35, [NatureTypes.BreadcrumbSeporator]],
-                                [38, [NatureTypes.Breadcrumb]],
-                                [39, [NatureTypes.Breadcrumb]],
-                                [40, [NatureTypes.Search]],
-                                [41, [NatureTypes.Breadcrumb]],
-                                [42, [NatureTypes.Breadcrumb]],
-                                [45, [NatureTypes.BreadcrumbSeporator]],
-                                [48, [NatureTypes.Breadcrumb]],
-                                [49, [NatureTypes.Breadcrumb]],
-                            ]);
-                            await comps.stream.expandBreadcrumbs(45, 2, false);
-                            len = await comps.stream.getIndexedLen();
-                            expect(len).toEqual(32);
-                            items = await comps.stream.grabIndexed(0, len);
-                            expect(items.length).toEqual(len);
-                            expect(
-                                items.map((i) => [i.position, new Nature(i.nature).getTypes()]),
-                            ).toEqual([
-                                [0, [NatureTypes.Search]],
-                                [1, [NatureTypes.Breadcrumb]],
-                                [2, [NatureTypes.Breadcrumb]],
-                                [5, [NatureTypes.BreadcrumbSeporator]],
-                                [8, [NatureTypes.Breadcrumb]],
-                                [9, [NatureTypes.Breadcrumb]],
-                                [10, [NatureTypes.Search]],
-                                [11, [NatureTypes.Breadcrumb]],
-                                [12, [NatureTypes.Breadcrumb]],
-                                [15, [NatureTypes.BreadcrumbSeporator]],
-                                [18, [NatureTypes.Breadcrumb]],
-                                [19, [NatureTypes.Breadcrumb]],
-                                [20, [NatureTypes.Search]],
-                                [21, [NatureTypes.Breadcrumb]],
-                                [22, [NatureTypes.Breadcrumb]],
-                                [25, [NatureTypes.BreadcrumbSeporator]],
-                                [28, [NatureTypes.Breadcrumb]],
-                                [29, [NatureTypes.Breadcrumb]],
-                                [30, [NatureTypes.Search]],
-                                [31, [NatureTypes.Breadcrumb]],
-                                [32, [NatureTypes.Breadcrumb]],
-                                [35, [NatureTypes.BreadcrumbSeporator]],
-                                [38, [NatureTypes.Breadcrumb]],
-                                [39, [NatureTypes.Breadcrumb]],
-                                [40, [NatureTypes.Search]],
-                                [41, [NatureTypes.Breadcrumb]],
-                                [42, [NatureTypes.Breadcrumb]],
-                                [44, [NatureTypes.BreadcrumbSeporator]],
-                                [46, [NatureTypes.Breadcrumb]],
-                                [47, [NatureTypes.Breadcrumb]],
-                                [48, [NatureTypes.Breadcrumb]],
-                                [49, [NatureTypes.Breadcrumb]],
-                            ]);
-                            await comps.stream.expandBreadcrumbs(44, 2, true);
-                            len = await comps.stream.getIndexedLen();
-                            expect(len).toEqual(34);
-                            items = await comps.stream.grabIndexed(0, len);
-                            expect(items.length).toEqual(len);
-                            expect(
-                                items.map((i) => [i.position, new Nature(i.nature).getTypes()]),
-                            ).toEqual([
-                                [0, [NatureTypes.Search]],
-                                [1, [NatureTypes.Breadcrumb]],
-                                [2, [NatureTypes.Breadcrumb]],
-                                [5, [NatureTypes.BreadcrumbSeporator]],
-                                [8, [NatureTypes.Breadcrumb]],
-                                [9, [NatureTypes.Breadcrumb]],
-                                [10, [NatureTypes.Search]],
-                                [11, [NatureTypes.Breadcrumb]],
-                                [12, [NatureTypes.Breadcrumb]],
-                                [15, [NatureTypes.BreadcrumbSeporator]],
-                                [18, [NatureTypes.Breadcrumb]],
-                                [19, [NatureTypes.Breadcrumb]],
-                                [20, [NatureTypes.Search]],
-                                [21, [NatureTypes.Breadcrumb]],
-                                [22, [NatureTypes.Breadcrumb]],
-                                [25, [NatureTypes.BreadcrumbSeporator]],
-                                [28, [NatureTypes.Breadcrumb]],
-                                [29, [NatureTypes.Breadcrumb]],
-                                [30, [NatureTypes.Search]],
-                                [31, [NatureTypes.Breadcrumb]],
-                                [32, [NatureTypes.Breadcrumb]],
-                                [35, [NatureTypes.BreadcrumbSeporator]],
-                                [38, [NatureTypes.Breadcrumb]],
-                                [39, [NatureTypes.Breadcrumb]],
-                                [40, [NatureTypes.Search]],
-                                [41, [NatureTypes.Breadcrumb]],
-                                [42, [NatureTypes.Breadcrumb]],
-                                [43, [NatureTypes.Breadcrumb]],
-                                [44, [NatureTypes.Breadcrumb]],
-                                [45, [NatureTypes.BreadcrumbSeporator]],
-                                [46, [NatureTypes.Breadcrumb]],
-                                [47, [NatureTypes.Breadcrumb]],
-                                [48, [NatureTypes.Breadcrumb]],
-                                [49, [NatureTypes.Breadcrumb]],
-                            ]);
-                            await comps.stream.expandBreadcrumbs(45, 1, true);
-                            len = await comps.stream.getIndexedLen();
-                            expect(len).toEqual(34);
-                            items = await comps.stream.grabIndexed(0, len);
-                            expect(items.length).toEqual(len);
-                            expect(
-                                items.map((i) => [i.position, new Nature(i.nature).getTypes()]),
-                            ).toEqual([
-                                [0, [NatureTypes.Search]],
-                                [1, [NatureTypes.Breadcrumb]],
-                                [2, [NatureTypes.Breadcrumb]],
-                                [5, [NatureTypes.BreadcrumbSeporator]],
-                                [8, [NatureTypes.Breadcrumb]],
-                                [9, [NatureTypes.Breadcrumb]],
-                                [10, [NatureTypes.Search]],
-                                [11, [NatureTypes.Breadcrumb]],
-                                [12, [NatureTypes.Breadcrumb]],
-                                [15, [NatureTypes.BreadcrumbSeporator]],
-                                [18, [NatureTypes.Breadcrumb]],
-                                [19, [NatureTypes.Breadcrumb]],
-                                [20, [NatureTypes.Search]],
-                                [21, [NatureTypes.Breadcrumb]],
-                                [22, [NatureTypes.Breadcrumb]],
-                                [25, [NatureTypes.BreadcrumbSeporator]],
-                                [28, [NatureTypes.Breadcrumb]],
-                                [29, [NatureTypes.Breadcrumb]],
-                                [30, [NatureTypes.Search]],
-                                [31, [NatureTypes.Breadcrumb]],
-                                [32, [NatureTypes.Breadcrumb]],
-                                [35, [NatureTypes.BreadcrumbSeporator]],
-                                [38, [NatureTypes.Breadcrumb]],
-                                [39, [NatureTypes.Breadcrumb]],
-                                [40, [NatureTypes.Search]],
-                                [41, [NatureTypes.Breadcrumb]],
-                                [42, [NatureTypes.Breadcrumb]],
-                                [43, [NatureTypes.Breadcrumb]],
-                                [44, [NatureTypes.Breadcrumb]],
-                                [45, [NatureTypes.Breadcrumb]],
-                                [46, [NatureTypes.Breadcrumb]],
-                                [47, [NatureTypes.Breadcrumb]],
-                                [48, [NatureTypes.Breadcrumb]],
-                                [49, [NatureTypes.Breadcrumb]],
-                            ]);
-                            finish(comps.session, done);
-                        } catch (err) {
-                            finish(
-                                undefined,
-                                done,
-                                new Error(
-                                    `Fail to finish test due error: ${
-                                        err instanceof Error ? err.message : err
-                                    }`,
-                                ),
-                            );
-                        }
-                    });
-                    return Promise.resolve();
+                let controlSum = 0;
+                let countMatches = 0;
+                const tmpobj = createSampleFile(50, logger, (i: number) => {
+                    controlSum += i % 10 == 0 ? i : 0;
+                    countMatches += i % 10 == 0 ? 1 : 0;
+                    return `${i}: some line data: ${i % 10 == 0 ? `match A` : ''}\n`;
+                });
+                let read: boolean = false;
+                comps.stream
+                    .observe(
+                        new Factory.File()
+                            .asText()
+                            .type(Factory.FileType.Text)
+                            .file(tmpobj.name)
+                            .get()
+                            .sterilized(),
+                    )
+                    .catch(finish.bind(null, comps.session, done));
+                const updates: number[] = [];
+                comps.events.IndexedMapUpdated.subscribe((event) => {
+                    event.len > 0 && updates.push(event.len);
+                });
+                comps.events.StreamUpdated.subscribe(async (rows: number) => {
+                    if (rows < 50 || read) {
+                        return;
+                    }
+                    read = true;
+                    try {
+                        await comps.search.search([
+                            {
+                                filter: 'match A',
+                                flags: { reg: true, word: true, cases: false },
+                            },
+                        ]);
+                        let items = await comps.stream.grabIndexed(0, countMatches);
+                        expect(items.length).toEqual(countMatches);
+                        expect(
+                            items
+                                .map((item) =>
+                                    parseInt(
+                                        (item.content.match(/\d*/) as unknown as string)[0],
+                                        10,
+                                    ),
+                                )
+                                .reduce((partialSum, a) => partialSum + a, 0),
+                        ).toEqual(controlSum);
+                        expect(items.map((i) => [i.pos, new Nature(i.nature).getTypes()])).toEqual([
+                            [0, [NatureTypes.Search]],
+                            [10, [NatureTypes.Search]],
+                            [20, [NatureTypes.Search]],
+                            [30, [NatureTypes.Search]],
+                            [40, [NatureTypes.Search]],
+                        ]);
+                        await comps.stream.setIndexingMode(IndexingMode.Breadcrumbs);
+                        let len = await comps.stream.getIndexedLen();
+                        expect(len).toEqual(30);
+                        items = await comps.stream.grabIndexed(0, len);
+                        expect(items.length).toEqual(len);
+                        expect(items.map((i) => [i.pos, new Nature(i.nature).getTypes()])).toEqual([
+                            [0, [NatureTypes.Search]],
+                            [1, [NatureTypes.Breadcrumb]],
+                            [2, [NatureTypes.Breadcrumb]],
+                            [5, [NatureTypes.BreadcrumbSeporator]],
+                            [8, [NatureTypes.Breadcrumb]],
+                            [9, [NatureTypes.Breadcrumb]],
+                            [10, [NatureTypes.Search]],
+                            [11, [NatureTypes.Breadcrumb]],
+                            [12, [NatureTypes.Breadcrumb]],
+                            [15, [NatureTypes.BreadcrumbSeporator]],
+                            [18, [NatureTypes.Breadcrumb]],
+                            [19, [NatureTypes.Breadcrumb]],
+                            [20, [NatureTypes.Search]],
+                            [21, [NatureTypes.Breadcrumb]],
+                            [22, [NatureTypes.Breadcrumb]],
+                            [25, [NatureTypes.BreadcrumbSeporator]],
+                            [28, [NatureTypes.Breadcrumb]],
+                            [29, [NatureTypes.Breadcrumb]],
+                            [30, [NatureTypes.Search]],
+                            [31, [NatureTypes.Breadcrumb]],
+                            [32, [NatureTypes.Breadcrumb]],
+                            [35, [NatureTypes.BreadcrumbSeporator]],
+                            [38, [NatureTypes.Breadcrumb]],
+                            [39, [NatureTypes.Breadcrumb]],
+                            [40, [NatureTypes.Search]],
+                            [41, [NatureTypes.Breadcrumb]],
+                            [42, [NatureTypes.Breadcrumb]],
+                            [45, [NatureTypes.BreadcrumbSeporator]],
+                            [48, [NatureTypes.Breadcrumb]],
+                            [49, [NatureTypes.Breadcrumb]],
+                        ]);
+                        await comps.stream.expandBreadcrumbs(45, 2, false);
+                        len = await comps.stream.getIndexedLen();
+                        expect(len).toEqual(32);
+                        items = await comps.stream.grabIndexed(0, len);
+                        expect(items.length).toEqual(len);
+                        expect(items.map((i) => [i.pos, new Nature(i.nature).getTypes()])).toEqual([
+                            [0, [NatureTypes.Search]],
+                            [1, [NatureTypes.Breadcrumb]],
+                            [2, [NatureTypes.Breadcrumb]],
+                            [5, [NatureTypes.BreadcrumbSeporator]],
+                            [8, [NatureTypes.Breadcrumb]],
+                            [9, [NatureTypes.Breadcrumb]],
+                            [10, [NatureTypes.Search]],
+                            [11, [NatureTypes.Breadcrumb]],
+                            [12, [NatureTypes.Breadcrumb]],
+                            [15, [NatureTypes.BreadcrumbSeporator]],
+                            [18, [NatureTypes.Breadcrumb]],
+                            [19, [NatureTypes.Breadcrumb]],
+                            [20, [NatureTypes.Search]],
+                            [21, [NatureTypes.Breadcrumb]],
+                            [22, [NatureTypes.Breadcrumb]],
+                            [25, [NatureTypes.BreadcrumbSeporator]],
+                            [28, [NatureTypes.Breadcrumb]],
+                            [29, [NatureTypes.Breadcrumb]],
+                            [30, [NatureTypes.Search]],
+                            [31, [NatureTypes.Breadcrumb]],
+                            [32, [NatureTypes.Breadcrumb]],
+                            [35, [NatureTypes.BreadcrumbSeporator]],
+                            [38, [NatureTypes.Breadcrumb]],
+                            [39, [NatureTypes.Breadcrumb]],
+                            [40, [NatureTypes.Search]],
+                            [41, [NatureTypes.Breadcrumb]],
+                            [42, [NatureTypes.Breadcrumb]],
+                            [44, [NatureTypes.BreadcrumbSeporator]],
+                            [46, [NatureTypes.Breadcrumb]],
+                            [47, [NatureTypes.Breadcrumb]],
+                            [48, [NatureTypes.Breadcrumb]],
+                            [49, [NatureTypes.Breadcrumb]],
+                        ]);
+                        await comps.stream.expandBreadcrumbs(44, 2, true);
+                        len = await comps.stream.getIndexedLen();
+                        expect(len).toEqual(34);
+                        items = await comps.stream.grabIndexed(0, len);
+                        expect(items.length).toEqual(len);
+                        expect(items.map((i) => [i.pos, new Nature(i.nature).getTypes()])).toEqual([
+                            [0, [NatureTypes.Search]],
+                            [1, [NatureTypes.Breadcrumb]],
+                            [2, [NatureTypes.Breadcrumb]],
+                            [5, [NatureTypes.BreadcrumbSeporator]],
+                            [8, [NatureTypes.Breadcrumb]],
+                            [9, [NatureTypes.Breadcrumb]],
+                            [10, [NatureTypes.Search]],
+                            [11, [NatureTypes.Breadcrumb]],
+                            [12, [NatureTypes.Breadcrumb]],
+                            [15, [NatureTypes.BreadcrumbSeporator]],
+                            [18, [NatureTypes.Breadcrumb]],
+                            [19, [NatureTypes.Breadcrumb]],
+                            [20, [NatureTypes.Search]],
+                            [21, [NatureTypes.Breadcrumb]],
+                            [22, [NatureTypes.Breadcrumb]],
+                            [25, [NatureTypes.BreadcrumbSeporator]],
+                            [28, [NatureTypes.Breadcrumb]],
+                            [29, [NatureTypes.Breadcrumb]],
+                            [30, [NatureTypes.Search]],
+                            [31, [NatureTypes.Breadcrumb]],
+                            [32, [NatureTypes.Breadcrumb]],
+                            [35, [NatureTypes.BreadcrumbSeporator]],
+                            [38, [NatureTypes.Breadcrumb]],
+                            [39, [NatureTypes.Breadcrumb]],
+                            [40, [NatureTypes.Search]],
+                            [41, [NatureTypes.Breadcrumb]],
+                            [42, [NatureTypes.Breadcrumb]],
+                            [43, [NatureTypes.Breadcrumb]],
+                            [44, [NatureTypes.Breadcrumb]],
+                            [45, [NatureTypes.BreadcrumbSeporator]],
+                            [46, [NatureTypes.Breadcrumb]],
+                            [47, [NatureTypes.Breadcrumb]],
+                            [48, [NatureTypes.Breadcrumb]],
+                            [49, [NatureTypes.Breadcrumb]],
+                        ]);
+                        await comps.stream.expandBreadcrumbs(45, 1, true);
+                        len = await comps.stream.getIndexedLen();
+                        expect(len).toEqual(34);
+                        items = await comps.stream.grabIndexed(0, len);
+                        expect(items.length).toEqual(len);
+                        expect(items.map((i) => [i.pos, new Nature(i.nature).getTypes()])).toEqual([
+                            [0, [NatureTypes.Search]],
+                            [1, [NatureTypes.Breadcrumb]],
+                            [2, [NatureTypes.Breadcrumb]],
+                            [5, [NatureTypes.BreadcrumbSeporator]],
+                            [8, [NatureTypes.Breadcrumb]],
+                            [9, [NatureTypes.Breadcrumb]],
+                            [10, [NatureTypes.Search]],
+                            [11, [NatureTypes.Breadcrumb]],
+                            [12, [NatureTypes.Breadcrumb]],
+                            [15, [NatureTypes.BreadcrumbSeporator]],
+                            [18, [NatureTypes.Breadcrumb]],
+                            [19, [NatureTypes.Breadcrumb]],
+                            [20, [NatureTypes.Search]],
+                            [21, [NatureTypes.Breadcrumb]],
+                            [22, [NatureTypes.Breadcrumb]],
+                            [25, [NatureTypes.BreadcrumbSeporator]],
+                            [28, [NatureTypes.Breadcrumb]],
+                            [29, [NatureTypes.Breadcrumb]],
+                            [30, [NatureTypes.Search]],
+                            [31, [NatureTypes.Breadcrumb]],
+                            [32, [NatureTypes.Breadcrumb]],
+                            [35, [NatureTypes.BreadcrumbSeporator]],
+                            [38, [NatureTypes.Breadcrumb]],
+                            [39, [NatureTypes.Breadcrumb]],
+                            [40, [NatureTypes.Search]],
+                            [41, [NatureTypes.Breadcrumb]],
+                            [42, [NatureTypes.Breadcrumb]],
+                            [43, [NatureTypes.Breadcrumb]],
+                            [44, [NatureTypes.Breadcrumb]],
+                            [45, [NatureTypes.Breadcrumb]],
+                            [46, [NatureTypes.Breadcrumb]],
+                            [47, [NatureTypes.Breadcrumb]],
+                            [48, [NatureTypes.Breadcrumb]],
+                            [49, [NatureTypes.Breadcrumb]],
+                        ]);
+                        finish(comps.session, done);
+                    } catch (err) {
+                        finish(
+                            undefined,
+                            done,
+                            new Error(
+                                `Fail to finish test due error: ${
+                                    err instanceof Error ? err.message : err
+                                }`,
+                            ),
+                        );
+                    }
+                });
+                return Promise.resolve();
             })().catch((err: Error) => {
                 finish(
                     undefined,
diff --git a/application/apps/rustcore/ts-bindings/spec/session.jobs.spec.ts b/application/apps/rustcore/ts-bindings/spec/session.jobs.spec.ts
index c2dcf2a0cf..8fd83ce41c 100644
--- a/application/apps/rustcore/ts-bindings/spec/session.jobs.spec.ts
+++ b/application/apps/rustcore/ts-bindings/spec/session.jobs.spec.ts
@@ -101,7 +101,7 @@ describe('Jobs', function () {
                 include: { files: true, folders: true },
             })
                 .then((ls) => {
-                    expect(typeof ls).toEqual('string');
+                    expect(ls.list instanceof Array).toEqual(true);
                     const job = jobs
                         .listContent({
                             depth: 10,
diff --git a/application/apps/rustcore/ts-bindings/spec/session.observe.spec.ts b/application/apps/rustcore/ts-bindings/spec/session.observe.spec.ts
index 22610729a7..644cdab84f 100644
--- a/application/apps/rustcore/ts-bindings/spec/session.observe.spec.ts
+++ b/application/apps/rustcore/ts-bindings/spec/session.observe.spec.ts
@@ -6,9 +6,8 @@
 import { initLogger } from './logger';
 initLogger();
 import { Factory } from '../src/api/session';
-import { IGrabbedElement } from 'platform/types/content';
+import { GrabbedElement, AttachmentInfo } from 'platform/types/bindings';
 import { IAttachmentsUpdatedUpdated } from '../src/api/session.provider';
-import { IAttachment } from 'platform/types/content';
 import { createSampleFile, finish } from './common';
 import { readConfigurationFile } from './config';
 
@@ -20,715 +19,778 @@ const config = readConfigurationFile().get().tests.observe;
 describe('Observe', function () {
     it(config.regular.list[1], function () {
         return runners.withSession(config.regular, 1, async (logger, done, comps) => {
-                    const tmpobj = createSampleFile(
-                        5000,
-                        logger,
-                        (i: number) => `some line data: ${i}\n`,
-                    );
-                    comps.stream
-                        .observe(
-                            new Factory.File()
-                                .asText()
-                                .type(Factory.FileType.Text)
-                                .file(tmpobj.name)
-                                .get()
-                                .sterilized(),
-                        )
-                        .catch(finish.bind(null, comps.session, done));
-                    let grabbing: boolean = false;
-                    comps.events.StreamUpdated.subscribe((rows: number) => {
-                        if (rows === 0 || grabbing) {
-                            return;
-                        }
-                        grabbing = true;
-                        comps.stream
-                            .grab(500, 7)
-                            .then((result: IGrabbedElement[]) => {
-                                logger.debug('result of grab was: ' + JSON.stringify(result));
-                                expect(result.map((i) => i.content)).toEqual([
-                                    'some line data: 500',
-                                    'some line data: 501',
-                                    'some line data: 502',
-                                    'some line data: 503',
-                                    'some line data: 504',
-                                    'some line data: 505',
-                                    'some line data: 506',
-                                ]);
-                                finish(comps.session, done);
-                            })
-                            .catch((err: Error) => {
-                                finish(
-                                    comps.session,
-                                    done,
-                                    new Error(
-                                        `Fail to grab data due error: ${
-                                            err instanceof Error ? err.message : err
-                                        }`,
-                                    ),
-                                );
-                            });
+            const tmpobj = createSampleFile(5000, logger, (i: number) => `some line data: ${i}\n`);
+            comps.stream
+                .observe(
+                    new Factory.File()
+                        .asText()
+                        .type(Factory.FileType.Text)
+                        .file(tmpobj.name)
+                        .get()
+                        .sterilized(),
+                )
+                .catch(finish.bind(null, comps.session, done));
+            let grabbing: boolean = false;
+            comps.events.StreamUpdated.subscribe((rows: number) => {
+                console.log(`Event came: ${rows}`);
+                if (rows === 0 || grabbing) {
+                    return;
+                }
+                grabbing = true;
+                comps.stream
+                    .grab(500, 7)
+                    .then((result: GrabbedElement[]) => {
+                        logger.debug('result of grab was: ' + JSON.stringify(result));
+                        expect(result.map((i) => i.content)).toEqual([
+                            'some line data: 500',
+                            'some line data: 501',
+                            'some line data: 502',
+                            'some line data: 503',
+                            'some line data: 504',
+                            'some line data: 505',
+                            'some line data: 506',
+                        ]);
+                        finish(comps.session, done);
+                    })
+                    .catch((err: Error) => {
+                        finish(
+                            comps.session,
+                            done,
+                            new Error(
+                                `Fail to grab data due error: ${
+                                    err instanceof Error ? err.message : err
+                                }`,
+                            ),
+                        );
                     });
+            });
         });
     });
 
     it(config.regular.list[2], function () {
         return runners.withSession(config.regular, 2, async (logger, done, comps) => {
-                    comps.stream
-                        .observe(
-                            new Factory.File()
-                                .type(Factory.FileType.PcapNG)
-                                .file(config.regular.files['pcapng'])
-                                .asDlt({
-                                    filter_config: undefined,
-                                    fibex_file_paths: [],
-                                    with_storage_header: false,
-                                    tz: undefined,
-                                })
-                                .get()
-                                .sterilized(),
-                        )
-                        .catch(finish.bind(null, comps.session, done));
-                    let grabbing: boolean = false;
-                    let received: number = 0;
-                    const timeout = setTimeout(() => {
+            comps.stream
+                .observe(
+                    new Factory.File()
+                        .type(Factory.FileType.PcapNG)
+                        .file(config.regular.files['pcapng'])
+                        .asDlt({
+                            filter_config: undefined,
+                            fibex_file_paths: [],
+                            with_storage_header: false,
+                            tz: undefined,
+                        })
+                        .get()
+                        .sterilized(),
+                )
+                .catch(finish.bind(null, comps.session, done));
+            let grabbing: boolean = false;
+            let received: number = 0;
+            const timeout = setTimeout(() => {
+                finish(
+                    comps.session,
+                    done,
+                    new Error(
+                        `Failed because timeout. Waited for at least 100 rows. Has been gotten: ${received}`,
+                    ),
+                );
+            }, 20000);
+            comps.events.StreamUpdated.subscribe((rows: number) => {
+                received = rows;
+                if (rows < 100 || grabbing) {
+                    return;
+                }
+                clearTimeout(timeout);
+                grabbing = true;
+                comps.stream
+                    .grab(1, 10)
+                    .then((result: GrabbedElement[]) => {
+                        expect(result.length).toEqual(10);
+                        logger.debug('result of grab was: ' + JSON.stringify(result));
+                        finish(comps.session, done);
+                    })
+                    .catch((err: Error) => {
                         finish(
                             comps.session,
                             done,
                             new Error(
-                                `Failed because timeout. Waited for at least 100 rows. Has been gotten: ${received}`,
+                                `Fail to grab data due error: ${
+                                    err instanceof Error ? err.message : err
+                                }`,
                             ),
                         );
-                    }, 20000);
-                    comps.events.StreamUpdated.subscribe((rows: number) => {
-                        received = rows;
-                        if (rows < 100 || grabbing) {
-                            return;
-                        }
-                        clearTimeout(timeout);
-                        grabbing = true;
-                        comps.stream
-                            .grab(1, 10)
-                            .then((result: IGrabbedElement[]) => {
-                                expect(result.length).toEqual(10);
-                                logger.debug('result of grab was: ' + JSON.stringify(result));
-                                finish(comps.session, done);
-                            })
-                            .catch((err: Error) => {
-                                finish(
-                                    comps.session,
-                                    done,
-                                    new Error(
-                                        `Fail to grab data due error: ${
-                                            err instanceof Error ? err.message : err
-                                        }`,
-                                    ),
-                                );
-                            });
                     });
+            });
         });
     });
 
     it(config.regular.list[3], function () {
         return runners.withSession(config.regular, 3, async (logger, done, comps) => {
-                    comps.stream
-                        .observe(
-                            new Factory.File()
-                                .type(Factory.FileType.Binary)
-                                .file(config.regular.files['dlt'])
-                                .asDlt({
-                                    filter_config: undefined,
-                                    fibex_file_paths: [],
-                                    with_storage_header: true,
-                                    tz: undefined,
-                                })
-                                .get()
-                                .sterilized(),
-                        )
-                        .catch(finish.bind(null, comps.session, done));
-                    let grabbing: boolean = false;
-                    let received: number = 0;
-                    const timeout = setTimeout(() => {
+            comps.stream
+                .observe(
+                    new Factory.File()
+                        .type(Factory.FileType.Binary)
+                        .file(config.regular.files['dlt'])
+                        .asDlt({
+                            filter_config: undefined,
+                            fibex_file_paths: [],
+                            with_storage_header: true,
+                            tz: undefined,
+                        })
+                        .get()
+                        .sterilized(),
+                )
+                .catch(finish.bind(null, comps.session, done));
+            let grabbing: boolean = false;
+            let received: number = 0;
+            const timeout = setTimeout(() => {
+                finish(
+                    comps.session,
+                    done,
+                    new Error(
+                        `Failed because timeout. Waited for at least 100 rows. Has been gotten: ${received}`,
+                    ),
+                );
+            }, 20000);
+            comps.events.StreamUpdated.subscribe((rows: number) => {
+                received = rows;
+                if (rows < 100 || grabbing) {
+                    return;
+                }
+                clearTimeout(timeout);
+                grabbing = true;
+                comps.stream
+                    .grab(1, 10)
+                    .then((result: GrabbedElement[]) => {
+                        expect(result.length).toEqual(10);
+                        logger.debug('result of grab was: ' + JSON.stringify(result));
+                        finish(comps.session, done);
+                    })
+                    .catch((err: Error) => {
                         finish(
                             comps.session,
                             done,
                             new Error(
-                                `Failed because timeout. Waited for at least 100 rows. Has been gotten: ${received}`,
+                                `Fail to grab data due error: ${
+                                    err instanceof Error ? err.message : err
+                                }`,
                             ),
                         );
-                    }, 20000);
-                    comps.events.StreamUpdated.subscribe((rows: number) => {
-                        received = rows;
-                        if (rows < 100 || grabbing) {
-                            return;
-                        }
-                        clearTimeout(timeout);
-                        grabbing = true;
-                        comps.stream
-                            .grab(1, 10)
-                            .then((result: IGrabbedElement[]) => {
-                                expect(result.length).toEqual(10);
-                                logger.debug('result of grab was: ' + JSON.stringify(result));
-                                finish(comps.session, done);
-                            })
-                            .catch((err: Error) => {
-                                finish(
-                                    comps.session,
-                                    done,
-                                    new Error(
-                                        `Fail to grab data due error: ${
-                                            err instanceof Error ? err.message : err
-                                        }`,
-                                    ),
-                                );
-                            });
                     });
+            });
         });
     });
 
     it(config.regular.list[4], function () {
         return runners.withSession(config.regular, 4, async (logger, done, comps) => {
-                    comps.stream
-                        .observe(
-                            new Factory.File()
-                                .type(Factory.FileType.Binary)
-                                .file(config.regular.files['attachments'])
-                                .asDlt({
-                                    filter_config: undefined,
-                                    fibex_file_paths: undefined,
-                                    with_storage_header: true,
-                                    tz: undefined,
-                                })
-                                .get()
-                                .sterilized(),
-                        )
-                        .catch(finish.bind(null, comps.session, done));
-                    let updates: IAttachmentsUpdatedUpdated[] = [];
-                    const timeout = setTimeout(() => {
-                        finish(
-                            comps.session,
-                            done,
-                            new Error(
-                                `Failed because timeout. Waited for at least 3 attachments. Has been gotten: ${updates.length}`,
-                            ),
-                        );
-                    }, 20000);
-                    comps.events.AttachmentsUpdated.subscribe((update: IAttachmentsUpdatedUpdated) => {
-                        updates.push(update);
-                        if (updates.length >= 3) {
-                            clearTimeout(timeout);
-                            expect(updates[0].len).toEqual(1);
-                            expect(updates[1].len).toEqual(2);
-                            expect(updates[2].len).toEqual(3);
-                            {
-                                let attachment: IAttachment = updates[0].attachment;
-                                expect(attachment.name).toEqual('test1.txt');
-                                expect(attachment.size).toEqual(5);
-                                expect(attachment.ext).toEqual('txt');
-                                expect(attachment.mime).toEqual('text/plain');
-                                expect(attachment.messages).toEqual([0, 2, 6]);
-                                expect(fs.readFileSync(attachment.filepath, 'utf8')).toEqual(
-                                    'test1',
-                                );
-                            }
-                            {
-                                let attachment: IAttachment = updates[1].attachment;
-                                expect(attachment.name).toEqual('test2.txt');
-                                expect(attachment.size).toEqual(6);
-                                expect(attachment.ext).toEqual('txt');
-                                expect(attachment.mime).toEqual('text/plain');
-                                expect(attachment.messages).toEqual([1, 3, 7]);
-                                expect(fs.readFileSync(attachment.filepath, 'utf8')).toEqual(
-                                    'test22',
-                                );
-                            }
-                            {
-                                let attachment: IAttachment = updates[2].attachment;
-                                expect(attachment.name).toEqual('test3.txt');
-                                expect(attachment.size).toEqual(7);
-                                expect(attachment.ext).toEqual('txt');
-                                expect(attachment.mime).toEqual('text/plain');
-                                expect(attachment.messages).toEqual([4, 5, 8]);
-                                expect(fs.readFileSync(attachment.filepath, 'utf8')).toEqual(
-                                    'test333',
-                                );
-                            }
-                            finish(comps.session, done);
-                        }
-                    });
+            comps.stream
+                .observe(
+                    new Factory.File()
+                        .type(Factory.FileType.Binary)
+                        .file(config.regular.files['attachments'])
+                        .asDlt({
+                            filter_config: undefined,
+                            fibex_file_paths: undefined,
+                            with_storage_header: true,
+                            tz: undefined,
+                        })
+                        .get()
+                        .sterilized(),
+                )
+                .catch(finish.bind(null, comps.session, done));
+            let updates: IAttachmentsUpdatedUpdated[] = [];
+            const timeout = setTimeout(() => {
+                finish(
+                    comps.session,
+                    done,
+                    new Error(
+                        `Failed because timeout. Waited for at least 3 attachments. Has been gotten: ${updates.length}`,
+                    ),
+                );
+            }, 20000);
+            comps.events.AttachmentsUpdated.subscribe((update: IAttachmentsUpdatedUpdated) => {
+                updates.push(update);
+                if (updates.length >= 3) {
+                    clearTimeout(timeout);
+                    expect(updates[0].len).toEqual(1);
+                    expect(updates[1].len).toEqual(2);
+                    expect(updates[2].len).toEqual(3);
+                    {
+                        let attachment: AttachmentInfo = updates[0].attachment;
+                        expect(attachment.name).toEqual('test1.txt');
+                        expect(attachment.size).toEqual(5);
+                        expect(attachment.ext).toEqual('txt');
+                        expect(attachment.mime).toEqual('text/plain');
+                        expect(attachment.messages).toEqual([0, 2, 6]);
+                        expect(fs.readFileSync(attachment.filepath, 'utf8')).toEqual('test1');
+                    }
+                    {
+                        let attachment: AttachmentInfo = updates[1].attachment;
+                        expect(attachment.name).toEqual('test2.txt');
+                        expect(attachment.size).toEqual(6);
+                        expect(attachment.ext).toEqual('txt');
+                        expect(attachment.mime).toEqual('text/plain');
+                        expect(attachment.messages).toEqual([1, 3, 7]);
+                        expect(fs.readFileSync(attachment.filepath, 'utf8')).toEqual('test22');
+                    }
+                    {
+                        let attachment: AttachmentInfo = updates[2].attachment;
+                        expect(attachment.name).toEqual('test3.txt');
+                        expect(attachment.size).toEqual(7);
+                        expect(attachment.ext).toEqual('txt');
+                        expect(attachment.mime).toEqual('text/plain');
+                        expect(attachment.messages).toEqual([4, 5, 8]);
+                        expect(fs.readFileSync(attachment.filepath, 'utf8')).toEqual('test333');
+                    }
+                    finish(comps.session, done);
+                }
+            });
         });
     });
 
     it(config.regular.list[5], function () {
         return runners.withSession(config.regular, 5, async (logger, done, comps) => {
-                    comps.stream
-                        .observe(
-                            new Factory.File()
-                                .type(Factory.FileType.PcapNG)
-                                .file(config.regular.files['someip-pcapng'])
-                                .asSomeip({
-                                    fibex_file_paths: [],
-                                })
-                                .get()
-                                .sterilized(),
-                        )
-                        .catch(finish.bind(null, comps.session, done));
-                    let grabbing: boolean = false;
-                    let received: number = 0;
-                    const timeout = setTimeout(() => {
+            comps.stream
+                .observe(
+                    new Factory.File()
+                        .type(Factory.FileType.PcapNG)
+                        .file(config.regular.files['someip-pcapng'])
+                        .asSomeip({
+                            fibex_file_paths: [],
+                        })
+                        .get()
+                        .sterilized(),
+                )
+                .catch(finish.bind(null, comps.session, done));
+            let grabbing: boolean = false;
+            let received: number = 0;
+            const timeout = setTimeout(() => {
+                finish(
+                    comps.session,
+                    done,
+                    new Error(
+                        `Failed because timeout. Waited for at least 55 rows. Has been gotten: ${received}`,
+                    ),
+                );
+            }, 20000);
+            comps.events.StreamUpdated.subscribe((rows: number) => {
+                received = rows;
+                if (rows < 55 || grabbing) {
+                    return;
+                }
+                clearTimeout(timeout);
+                grabbing = true;
+                comps.stream
+                    .grab(0, 4)
+                    .then((result: GrabbedElement[]) => {
+                        expect(result.length).toEqual(4);
+                        expect(result[0].content.split('\u0004')).toEqual([
+                            'SD',
+                            /* Header */
+                            '65535', // Service-ID
+                            '33024', // Method-ID
+                            '60', // Length-Field
+                            '0', // Client-ID
+                            '0', // Session-ID
+                            '1', // Interface-Version
+                            '2', // Message-Type
+                            '0', // Return-Type
+                            /* Payload */
+                            'Flags [C0], Offer 123 v1.0 Inst 1 Ttl 3 UDP 192.168.178.58:30000 TCP 192.168.178.58:30000',
+                        ]);
+                        expect(result[3].content.split('\u0004')).toEqual([
+                            'RPC',
+                            /* Header */
+                            '123', // Service-ID
+                            '32773', // Method-ID
+                            '16', // Length-Field
+                            '1', // Client-ID
+                            '0', // Session-ID
+                            '1', // Interface-Version
+                            '2', // Message-Type
+                            '0', // Return-Type
+                            /* Payload */
+                            '[00, 00, 01, 88, 01, C3, C4, 1D]',
+                        ]);
+                        logger.debug('result of grab was: ' + JSON.stringify(result));
+                        finish(comps.session, done);
+                    })
+                    .catch((err: Error) => {
                         finish(
                             comps.session,
                             done,
                             new Error(
-                                `Failed because timeout. Waited for at least 55 rows. Has been gotten: ${received}`,
+                                `Fail to grab data due error: ${
+                                    err instanceof Error ? err.message : err
+                                }`,
                             ),
                         );
-                    }, 20000);
-                    comps.events.StreamUpdated.subscribe((rows: number) => {
-                        received = rows;
-                        if (rows < 55 || grabbing) {
-                            return;
-                        }
-                        clearTimeout(timeout);
-                        grabbing = true;
-                        comps.stream
-                            .grab(0, 4)
-                            .then((result: IGrabbedElement[]) => {
-                                expect(result.length).toEqual(4);
-                                expect(result[0].content.split('\u0004')).toEqual([
-                                    'SD',
-                                    /* Header */
-                                    '65535', // Service-ID
-                                    '33024', // Method-ID
-                                    '60', // Length-Field
-                                    '0', // Client-ID
-                                    '0', // Session-ID
-                                    '1', // Interface-Version
-                                    '2', // Message-Type
-                                    '0', // Return-Type
-                                    /* Payload */
-                                    'Flags [C0], Offer 123 v1.0 Inst 1 Ttl 3 UDP 192.168.178.58:30000 TCP 192.168.178.58:30000',
-                                ]);
-                                expect(result[3].content.split('\u0004')).toEqual([
-                                    'RPC',
-                                    /* Header */
-                                    '123', // Service-ID
-                                    '32773', // Method-ID
-                                    '16', // Length-Field
-                                    '1', // Client-ID
-                                    '0', // Session-ID
-                                    '1', // Interface-Version
-                                    '2', // Message-Type
-                                    '0', // Return-Type
-                                    /* Payload */
-                                    '[00, 00, 01, 88, 01, C3, C4, 1D]',
-                                ]);
-                                logger.debug('result of grab was: ' + JSON.stringify(result));
-                                finish(comps.session, done);
-                            })
-                            .catch((err: Error) => {
-                                finish(
-                                    comps.session,
-                                    done,
-                                    new Error(
-                                        `Fail to grab data due error: ${
-                                            err instanceof Error ? err.message : err
-                                        }`,
-                                    ),
-                                );
-                            });
                     });
+            });
         });
     });
 
     it(config.regular.list[6], function () {
         return runners.withSession(config.regular, 6, async (logger, done, comps) => {
-                    comps.stream
-                        .observe(
-                            new Factory.File()
-                                .type(Factory.FileType.PcapNG)
-                                .file(config.regular.files['someip-pcapng'])
-                                .asSomeip({
-                                    fibex_file_paths: [config.regular.files['someip-fibex']],
-                                })
-                                .get()
-                                .sterilized(),
-                        )
-                        .catch(finish.bind(null, comps.session, done));
-                    let grabbing: boolean = false;
-                    let received: number = 0;
-                    const timeout = setTimeout(() => {
+            comps.stream
+                .observe(
+                    new Factory.File()
+                        .type(Factory.FileType.PcapNG)
+                        .file(config.regular.files['someip-pcapng'])
+                        .asSomeip({
+                            fibex_file_paths: [config.regular.files['someip-fibex']],
+                        })
+                        .get()
+                        .sterilized(),
+                )
+                .catch(finish.bind(null, comps.session, done));
+            let grabbing: boolean = false;
+            let received: number = 0;
+            const timeout = setTimeout(() => {
+                finish(
+                    comps.session,
+                    done,
+                    new Error(
+                        `Failed because timeout. Waited for at least 55 rows. Has been gotten: ${received}`,
+                    ),
+                );
+            }, 20000);
+            comps.events.StreamUpdated.subscribe((rows: number) => {
+                received = rows;
+                if (rows < 55 || grabbing) {
+                    return;
+                }
+                clearTimeout(timeout);
+                grabbing = true;
+                comps.stream
+                    .grab(0, 4)
+                    .then((result: GrabbedElement[]) => {
+                        expect(result.length).toEqual(4);
+                        expect(result[0].content.split('\u0004')).toEqual([
+                            'SD',
+                            /* Header */
+                            '65535', // Service-ID
+                            '33024', // Method-ID
+                            '60', // Length-Field
+                            '0', // Client-ID
+                            '0', // Session-ID
+                            '1', // Interface-Version
+                            '2', // Message-Type
+                            '0', // Return-Type
+                            /* Payload */
+                            'Flags [C0], Offer 123 v1.0 Inst 1 Ttl 3 UDP 192.168.178.58:30000 TCP 192.168.178.58:30000',
+                        ]);
+                        expect(result[3].content.split('\u0004')).toEqual([
+                            'RPC',
+                            /* Header */
+                            '123', // Service-ID
+                            '32773', // Method-ID
+                            '16', // Length-Field
+                            '1', // Client-ID
+                            '0', // Session-ID
+                            '1', // Interface-Version
+                            '2', // Message-Type
+                            '0', // Return-Type
+                            /* Payload */
+                            'TestService::timeEvent {\u0006\ttimestamp (INT64) : 1683656786973,\u0006}',
+                        ]);
+                        logger.debug('result of grab was: ' + JSON.stringify(result));
+                        finish(comps.session, done);
+                    })
+                    .catch((err: Error) => {
                         finish(
                             comps.session,
                             done,
                             new Error(
-                                `Failed because timeout. Waited for at least 55 rows. Has been gotten: ${received}`,
+                                `Fail to grab data due error: ${
+                                    err instanceof Error ? err.message : err
+                                }`,
                             ),
                         );
-                    }, 20000);
-                    comps.events.StreamUpdated.subscribe((rows: number) => {
-                        received = rows;
-                        if (rows < 55 || grabbing) {
-                            return;
-                        }
-                        clearTimeout(timeout);
-                        grabbing = true;
-                        comps.stream
-                            .grab(0, 4)
-                            .then((result: IGrabbedElement[]) => {
-                                expect(result.length).toEqual(4);
-                                expect(result[0].content.split('\u0004')).toEqual([
-                                    'SD',
-                                    /* Header */
-                                    '65535', // Service-ID
-                                    '33024', // Method-ID
-                                    '60', // Length-Field
-                                    '0', // Client-ID
-                                    '0', // Session-ID
-                                    '1', // Interface-Version
-                                    '2', // Message-Type
-                                    '0', // Return-Type
-                                    /* Payload */
-                                    'Flags [C0], Offer 123 v1.0 Inst 1 Ttl 3 UDP 192.168.178.58:30000 TCP 192.168.178.58:30000',
-                                ]);
-                                expect(result[3].content.split('\u0004')).toEqual([
-                                    'RPC',
-                                    /* Header */
-                                    '123', // Service-ID
-                                    '32773', // Method-ID
-                                    '16', // Length-Field
-                                    '1', // Client-ID
-                                    '0', // Session-ID
-                                    '1', // Interface-Version
-                                    '2', // Message-Type
-                                    '0', // Return-Type
-                                    /* Payload */
-                                    'TestService::timeEvent {\u0006\ttimestamp (INT64) : 1683656786973,\u0006}',
-                                ]);
-                                logger.debug('result of grab was: ' + JSON.stringify(result));
-                                finish(comps.session, done);
-                            })
-                            .catch((err: Error) => {
-                                finish(
-                                    comps.session,
-                                    done,
-                                    new Error(
-                                        `Fail to grab data due error: ${
-                                            err instanceof Error ? err.message : err
-                                        }`,
-                                    ),
-                                );
-                            });
                     });
+            });
         });
     });
 
     it(config.regular.list[7], function () {
         return runners.withSession(config.regular, 7, async (logger, done, comps) => {
-                    comps.stream
-                        .observe(
-                            new Factory.File()
-                                .type(Factory.FileType.PcapLegacy)
-                                .file(config.regular.files['someip-pcap'])
-                                .asSomeip({
-                                    fibex_file_paths: [],
-                                })
-                                .get()
-                                .sterilized(),
-                        )
-                        .catch(finish.bind(null, comps.session, done));
-                    let grabbing: boolean = false;
-                    let received: number = 0;
-                    const timeout = setTimeout(() => {
+            comps.stream
+                .observe(
+                    new Factory.File()
+                        .type(Factory.FileType.PcapLegacy)
+                        .file(config.regular.files['someip-pcap'])
+                        .asSomeip({
+                            fibex_file_paths: [],
+                        })
+                        .get()
+                        .sterilized(),
+                )
+                .catch(finish.bind(null, comps.session, done));
+            let grabbing: boolean = false;
+            let received: number = 0;
+            const timeout = setTimeout(() => {
+                finish(
+                    comps.session,
+                    done,
+                    new Error(
+                        `Failed because timeout. Waited for at least 55 rows. Has been gotten: ${received}`,
+                    ),
+                );
+            }, 20000);
+            comps.events.StreamUpdated.subscribe((rows: number) => {
+                received = rows;
+                if (rows < 55 || grabbing) {
+                    return;
+                }
+                clearTimeout(timeout);
+                grabbing = true;
+                comps.stream
+                    .grab(0, 4)
+                    .then((result: GrabbedElement[]) => {
+                        expect(result.length).toEqual(4);
+                        expect(result[0].content.split('\u0004')).toEqual([
+                            'SD',
+                            /* Header */
+                            '65535', // Service-ID
+                            '33024', // Method-ID
+                            '60', // Length-Field
+                            '0', // Client-ID
+                            '0', // Session-ID
+                            '1', // Interface-Version
+                            '2', // Message-Type
+                            '0', // Return-Type
+                            /* Payload */
+                            'Flags [C0], Offer 123 v1.0 Inst 1 Ttl 3 UDP 192.168.178.58:30000 TCP 192.168.178.58:30000',
+                        ]);
+                        expect(result[3].content.split('\u0004')).toEqual([
+                            'RPC',
+                            /* Header */
+                            '123', // Service-ID
+                            '32773', // Method-ID
+                            '16', // Length-Field
+                            '1', // Client-ID
+                            '0', // Session-ID
+                            '1', // Interface-Version
+                            '2', // Message-Type
+                            '0', // Return-Type
+                            /* Payload */
+                            '[00, 00, 01, 88, 01, C3, C4, 1D]',
+                        ]);
+                        logger.debug('result of grab was: ' + JSON.stringify(result));
+                        finish(comps.session, done);
+                    })
+                    .catch((err: Error) => {
                         finish(
                             comps.session,
                             done,
                             new Error(
-                                `Failed because timeout. Waited for at least 55 rows. Has been gotten: ${received}`,
+                                `Fail to grab data due error: ${
+                                    err instanceof Error ? err.message : err
+                                }`,
                             ),
                         );
-                    }, 20000);
-                    comps.events.StreamUpdated.subscribe((rows: number) => {
-                        received = rows;
-                        if (rows < 55 || grabbing) {
-                            return;
-                        }
-                        clearTimeout(timeout);
-                        grabbing = true;
-                        comps.stream
-                            .grab(0, 4)
-                            .then((result: IGrabbedElement[]) => {
-                                expect(result.length).toEqual(4);
-                                expect(result[0].content.split('\u0004')).toEqual([
-                                    'SD',
-                                    /* Header */
-                                    '65535', // Service-ID
-                                    '33024', // Method-ID
-                                    '60', // Length-Field
-                                    '0', // Client-ID
-                                    '0', // Session-ID
-                                    '1', // Interface-Version
-                                    '2', // Message-Type
-                                    '0', // Return-Type
-                                    /* Payload */
-                                    'Flags [C0], Offer 123 v1.0 Inst 1 Ttl 3 UDP 192.168.178.58:30000 TCP 192.168.178.58:30000',
-                                ]);
-                                expect(result[3].content.split('\u0004')).toEqual([
-                                    'RPC',
-                                    /* Header */
-                                    '123', // Service-ID
-                                    '32773', // Method-ID
-                                    '16', // Length-Field
-                                    '1', // Client-ID
-                                    '0', // Session-ID
-                                    '1', // Interface-Version
-                                    '2', // Message-Type
-                                    '0', // Return-Type
-                                    /* Payload */
-                                    '[00, 00, 01, 88, 01, C3, C4, 1D]',
-                                ]);
-                                logger.debug('result of grab was: ' + JSON.stringify(result));
-                                finish(comps.session, done);
-                            })
-                            .catch((err: Error) => {
-                                finish(
-                                    comps.session,
-                                    done,
-                                    new Error(
-                                        `Fail to grab data due error: ${
-                                            err instanceof Error ? err.message : err
-                                        }`,
-                                    ),
-                                );
-                            });
                     });
+            });
         });
     });
 
     it(config.regular.list[8], function () {
         return runners.withSession(config.regular, 8, async (logger, done, comps) => {
-                    comps.stream
-                        .observe(
-                            new Factory.File()
-                                .type(Factory.FileType.PcapLegacy)
-                                .file(config.regular.files['someip-pcap'])
-                                .asSomeip({
-                                    fibex_file_paths: [config.regular.files['someip-fibex']],
-                                })
-                                .get()
-                                .sterilized(),
-                        )
-                        .catch(finish.bind(null, comps.session, done));
-                    let grabbing: boolean = false;
-                    let received: number = 0;
-                    const timeout = setTimeout(() => {
+            comps.stream
+                .observe(
+                    new Factory.File()
+                        .type(Factory.FileType.PcapLegacy)
+                        .file(config.regular.files['someip-pcap'])
+                        .asSomeip({
+                            fibex_file_paths: [config.regular.files['someip-fibex']],
+                        })
+                        .get()
+                        .sterilized(),
+                )
+                .catch(finish.bind(null, comps.session, done));
+            let grabbing: boolean = false;
+            let received: number = 0;
+            const timeout = setTimeout(() => {
+                finish(
+                    comps.session,
+                    done,
+                    new Error(
+                        `Failed because timeout. Waited for at least 55 rows. Has been gotten: ${received}`,
+                    ),
+                );
+            }, 20000);
+            comps.events.StreamUpdated.subscribe((rows: number) => {
+                received = rows;
+                if (rows < 55 || grabbing) {
+                    return;
+                }
+                clearTimeout(timeout);
+                grabbing = true;
+                comps.stream
+                    .grab(0, 4)
+                    .then((result: GrabbedElement[]) => {
+                        expect(result.length).toEqual(4);
+                        expect(result[0].content.split('\u0004')).toEqual([
+                            'SD',
+                            /* Header */
+                            '65535', // Service-ID
+                            '33024', // Method-ID
+                            '60', // Length-Field
+                            '0', // Client-ID
+                            '0', // Session-ID
+                            '1', // Interface-Version
+                            '2', // Message-Type
+                            '0', // Return-Type
+                            /* Payload */
+                            'Flags [C0], Offer 123 v1.0 Inst 1 Ttl 3 UDP 192.168.178.58:30000 TCP 192.168.178.58:30000',
+                        ]);
+                        expect(result[3].content.split('\u0004')).toEqual([
+                            'RPC',
+                            /* Header */
+                            '123', // Service-ID
+                            '32773', // Method-ID
+                            '16', // Length-Field
+                            '1', // Client-ID
+                            '0', // Session-ID
+                            '1', // Interface-Version
+                            '2', // Message-Type
+                            '0', // Return-Type
+                            /* Payload */
+                            'TestService::timeEvent {\u0006\ttimestamp (INT64) : 1683656786973,\u0006}',
+                        ]);
+                        logger.debug('result of grab was: ' + JSON.stringify(result));
+                        finish(comps.session, done);
+                    })
+                    .catch((err: Error) => {
                         finish(
                             comps.session,
                             done,
                             new Error(
-                                `Failed because timeout. Waited for at least 55 rows. Has been gotten: ${received}`,
+                                `Fail to grab data due error: ${
+                                    err instanceof Error ? err.message : err
+                                }`,
                             ),
                         );
-                    }, 20000);
-                    comps.events.StreamUpdated.subscribe((rows: number) => {
-                        received = rows;
-                        if (rows < 55 || grabbing) {
-                            return;
-                        }
-                        clearTimeout(timeout);
-                        grabbing = true;
-                        comps.stream
-                            .grab(0, 4)
-                            .then((result: IGrabbedElement[]) => {
-                                expect(result.length).toEqual(4);
-                                expect(result[0].content.split('\u0004')).toEqual([
-                                    'SD',
-                                    /* Header */
-                                    '65535', // Service-ID
-                                    '33024', // Method-ID
-                                    '60', // Length-Field
-                                    '0', // Client-ID
-                                    '0', // Session-ID
-                                    '1', // Interface-Version
-                                    '2', // Message-Type
-                                    '0', // Return-Type
-                                    /* Payload */
-                                    'Flags [C0], Offer 123 v1.0 Inst 1 Ttl 3 UDP 192.168.178.58:30000 TCP 192.168.178.58:30000',
-                                ]);
-                                expect(result[3].content.split('\u0004')).toEqual([
-                                    'RPC',
-                                    /* Header */
-                                    '123', // Service-ID
-                                    '32773', // Method-ID
-                                    '16', // Length-Field
-                                    '1', // Client-ID
-                                    '0', // Session-ID
-                                    '1', // Interface-Version
-                                    '2', // Message-Type
-                                    '0', // Return-Type
-                                    /* Payload */
-                                    'TestService::timeEvent {\u0006\ttimestamp (INT64) : 1683656786973,\u0006}',
-                                ]);
-                                logger.debug('result of grab was: ' + JSON.stringify(result));
-                                finish(comps.session, done);
-                            })
-                            .catch((err: Error) => {
-                                finish(
-                                    comps.session,
-                                    done,
-                                    new Error(
-                                        `Fail to grab data due error: ${
-                                            err instanceof Error ? err.message : err
-                                        }`,
-                                    ),
-                                );
-                            });
                     });
+            });
         });
     });
 
     it(config.regular.list[9], function () {
         return runners.withSession(config.regular, 9, async (logger, done, comps) => {
-                    comps.stream
-                        .observe(
-                            new Factory.File()
-                                .type(Factory.FileType.Binary)
-                                .file(config.regular.files['someip-dlt'])
-                                .asDlt({
-                                    filter_config: undefined,
-                                    fibex_file_paths: [],
-                                    with_storage_header: true,
-                                    tz: undefined,
-                                })
-                                .get()
-                                .sterilized(),
-                        )
-                        .catch(finish.bind(null, comps.session, done));
-                    let grabbing: boolean = false;
-                    let received: number = 0;
-                    const timeout = setTimeout(() => {
+            comps.stream
+                .observe(
+                    new Factory.File()
+                        .type(Factory.FileType.Binary)
+                        .file(config.regular.files['someip-dlt'])
+                        .asDlt({
+                            filter_config: undefined,
+                            fibex_file_paths: [],
+                            with_storage_header: true,
+                            tz: undefined,
+                        })
+                        .get()
+                        .sterilized(),
+                )
+                .catch(finish.bind(null, comps.session, done));
+            let grabbing: boolean = false;
+            let received: number = 0;
+            const timeout = setTimeout(() => {
+                finish(
+                    comps.session,
+                    done,
+                    new Error(
+                        `Failed because timeout. Waited for at least 55 rows. Has been gotten: ${received}`,
+                    ),
+                );
+            }, 20000);
+            comps.events.StreamUpdated.subscribe((rows: number) => {
+                received = rows;
+                if (rows < 6 || grabbing) {
+                    return;
+                }
+                clearTimeout(timeout);
+                grabbing = true;
+                comps.stream
+                    .grab(0, 6)
+                    .then((result: GrabbedElement[]) => {
+                        expect(result.length).toEqual(6);
+                        expect(result[0].content.split('\u0004')).toEqual([
+                            '2024-02-20T13:17:26.713537000Z',
+                            'ECU1',
+                            '1',
+                            '571',
+                            '204',
+                            '28138506',
+                            'ECU1',
+                            'APP1',
+                            'C1',
+                            'IPC',
+                            'SOME/IP 0.0.0.0:0 >> INST:1 RPC SERV:123 METH:32773 LENG:16 CLID:0 SEID:58252 IVER:1 MSTP:2 RETC:0 [00, 00, 01, 88, 01, C3, C4, 1D]',
+                        ]);
+                        expect(result[5].content.split('\u0004')).toEqual([
+                            '2024-02-20T13:17:26.713537000Z',
+                            'ECU1',
+                            '1',
+                            '571',
+                            '209',
+                            '28138506',
+                            'ECU1',
+                            'APP1',
+                            'C1',
+                            'IPC',
+                            "SOME/IP 'Parse error: Not enough data: min: 25, actual: 24' [00, 7B, 80, 05, 00, 00, 00, 11, 00, 00, E3, 8C, 01, 01, 02, 00, 00, 00, 01, 88, 01, C3, C4, 1D]",
+                        ]);
+                        logger.debug('result of grab was: ' + JSON.stringify(result));
+                        finish(comps.session, done);
+                    })
+                    .catch((err: Error) => {
                         finish(
                             comps.session,
                             done,
                             new Error(
-                                `Failed because timeout. Waited for at least 55 rows. Has been gotten: ${received}`,
+                                `Fail to grab data due error: ${
+                                    err instanceof Error ? err.message : err
+                                }`,
                             ),
                         );
-                    }, 20000);
-                    comps.events.StreamUpdated.subscribe((rows: number) => {
-                        received = rows;
-                        if (rows < 6 || grabbing) {
-                            return;
-                        }
-                        clearTimeout(timeout);
-                        grabbing = true;
-                        comps.stream
-                            .grab(0, 6)
-                            .then((result: IGrabbedElement[]) => {
-                                expect(result.length).toEqual(6);
-                                expect(result[0].content.split('\u0004')).toEqual([
-                                    '2024-02-20T13:17:26.713537000Z', 'ECU1', '1', '571', '204', '28138506', 'ECU1', 'APP1', 'C1', 'IPC',
-                                    'SOME/IP 0.0.0.0:0 >> INST:1 RPC SERV:123 METH:32773 LENG:16 CLID:0 SEID:58252 IVER:1 MSTP:2 RETC:0 [00, 00, 01, 88, 01, C3, C4, 1D]',
-                                ]);
-                                expect(result[5].content.split('\u0004')).toEqual([
-                                    '2024-02-20T13:17:26.713537000Z', 'ECU1', '1', '571', '209', '28138506', 'ECU1', 'APP1', 'C1', 'IPC',
-                                    'SOME/IP \'Parse error: Not enough data: min: 25, actual: 24\' [00, 7B, 80, 05, 00, 00, 00, 11, 00, 00, E3, 8C, 01, 01, 02, 00, 00, 00, 01, 88, 01, C3, C4, 1D]',
-                                ]);
-                                logger.debug('result of grab was: ' + JSON.stringify(result));
-                                finish(comps.session, done);
-                            })
-                            .catch((err: Error) => {
-                                finish(
-                                    comps.session,
-                                    done,
-                                    new Error(
-                                        `Fail to grab data due error: ${
-                                            err instanceof Error ? err.message : err
-                                        }`,
-                                    ),
-                                );
-                            });
                     });
+            });
         });
     });
 
     it(config.regular.list[10], function () {
         return runners.withSession(config.regular, 10, async (logger, done, comps) => {
-                    comps.stream
-                        .observe(
-                            new Factory.File()
-                                .type(Factory.FileType.Binary)
-                                .file(config.regular.files['someip-dlt'])
-                                .asDlt({
-                                    filter_config: undefined,
-                                    fibex_file_paths: [config.regular.files['someip-fibex']],
-                                    with_storage_header: true,
-                                    tz: undefined,
-                                })
-                                .get()
-                                .sterilized(),
-                        )
-                        .catch(finish.bind(null, comps.session, done));
-                    let grabbing: boolean = false;
-                    let received: number = 0;
-                    const timeout = setTimeout(() => {
+            comps.stream
+                .observe(
+                    new Factory.File()
+                        .type(Factory.FileType.Binary)
+                        .file(config.regular.files['someip-dlt'])
+                        .asDlt({
+                            filter_config: undefined,
+                            fibex_file_paths: [config.regular.files['someip-fibex']],
+                            with_storage_header: true,
+                            tz: undefined,
+                        })
+                        .get()
+                        .sterilized(),
+                )
+                .catch(finish.bind(null, comps.session, done));
+            let grabbing: boolean = false;
+            let received: number = 0;
+            const timeout = setTimeout(() => {
+                finish(
+                    comps.session,
+                    done,
+                    new Error(
+                        `Failed because timeout. Waited for at least 55 rows. Has been gotten: ${received}`,
+                    ),
+                );
+            }, 20000);
+            comps.events.StreamUpdated.subscribe((rows: number) => {
+                received = rows;
+                if (rows < 6 || grabbing) {
+                    return;
+                }
+                clearTimeout(timeout);
+                grabbing = true;
+                comps.stream
+                    .grab(0, 6)
+                    .then((result: GrabbedElement[]) => {
+                        expect(result.length).toEqual(6);
+                        expect(result[0].content.split('\u0004')).toEqual([
+                            '2024-02-20T13:17:26.713537000Z',
+                            'ECU1',
+                            '1',
+                            '571',
+                            '204',
+                            '28138506',
+                            'ECU1',
+                            'APP1',
+                            'C1',
+                            'IPC',
+                            'SOME/IP 0.0.0.0:0 >> INST:1 RPC SERV:123 METH:32773 LENG:16 CLID:0 SEID:58252 IVER:1 MSTP:2 RETC:0 TestService::timeEvent {\u0006\ttimestamp (INT64) : 1683656786973,\u0006}',
+                        ]);
+                        expect(result[1].content.split('\u0004')).toEqual([
+                            '2024-02-20T13:17:26.713537000Z',
+                            'ECU1',
+                            '1',
+                            '571',
+                            '205',
+                            '28138506',
+                            'ECU1',
+                            'APP1',
+                            'C1',
+                            'IPC',
+                            'SOME/IP 0.0.0.0:0 >> INST:1 RPC SERV:124 METH:32773 LENG:16 CLID:0 SEID:58252 IVER:1 MSTP:2 RETC:0 UnknownService [00, 00, 01, 88, 01, C3, C4, 1D]',
+                        ]);
+                        expect(result[2].content.split('\u0004')).toEqual([
+                            '2024-02-20T13:17:26.713537000Z',
+                            'ECU1',
+                            '1',
+                            '571',
+                            '206',
+                            '28138506',
+                            'ECU1',
+                            'APP1',
+                            'C1',
+                            'IPC',
+                            'SOME/IP 0.0.0.0:0 >> INST:1 RPC SERV:123 METH:32773 LENG:16 CLID:0 SEID:58252 IVER:3 MSTP:2 RETC:0 TestService<1?>::timeEvent {\u0006\ttimestamp (INT64) : 1683656786973,\u0006}',
+                        ]);
+                        expect(result[3].content.split('\u0004')).toEqual([
+                            '2024-02-20T13:17:26.713537000Z',
+                            'ECU1',
+                            '1',
+                            '571',
+                            '207',
+                            '28138506',
+                            'ECU1',
+                            'APP1',
+                            'C1',
+                            'IPC',
+                            'SOME/IP 0.0.0.0:0 >> INST:1 RPC SERV:123 METH:32774 LENG:16 CLID:0 SEID:58252 IVER:1 MSTP:2 RETC:0 TestService::UnknownMethod [00, 00, 01, 88, 01, C3, C4, 1D]',
+                        ]);
+                        expect(result[4].content.split('\u0004')).toEqual([
+                            '2024-02-20T13:17:26.713537000Z',
+                            'ECU1',
+                            '1',
+                            '571',
+                            '208',
+                            '28138506',
+                            'ECU1',
+                            'APP1',
+                            'C1',
+                            'IPC',
+                            "SOME/IP 0.0.0.0:0 >> INST:1 RPC SERV:123 METH:32773 LENG:15 CLID:0 SEID:58252 IVER:1 MSTP:2 RETC:0 TestService::timeEvent 'SOME/IP Error: Parser exhausted at offset 0 for Object size 8' [00, 00, 01, 88, 01, C3, C4]",
+                        ]);
+                        expect(result[5].content.split('\u0004')).toEqual([
+                            '2024-02-20T13:17:26.713537000Z',
+                            'ECU1',
+                            '1',
+                            '571',
+                            '209',
+                            '28138506',
+                            'ECU1',
+                            'APP1',
+                            'C1',
+                            'IPC',
+                            "SOME/IP 'Parse error: Not enough data: min: 25, actual: 24' [00, 7B, 80, 05, 00, 00, 00, 11, 00, 00, E3, 8C, 01, 01, 02, 00, 00, 00, 01, 88, 01, C3, C4, 1D]",
+                        ]);
+                        logger.debug('result of grab was: ' + JSON.stringify(result));
+                        finish(comps.session, done);
+                    })
+                    .catch((err: Error) => {
                         finish(
                             comps.session,
                             done,
                             new Error(
-                                `Failed because timeout. Waited for at least 55 rows. Has been gotten: ${received}`,
+                                `Fail to grab data due error: ${
+                                    err instanceof Error ? err.message : err
+                                }`,
                             ),
                         );
-                    }, 20000);
-                    comps.events.StreamUpdated.subscribe((rows: number) => {
-                        received = rows;
-                        if (rows < 6 || grabbing) {
-                            return;
-                        }
-                        clearTimeout(timeout);
-                        grabbing = true;
-                        comps.stream
-                            .grab(0, 6)
-                            .then((result: IGrabbedElement[]) => {
-                                expect(result.length).toEqual(6);
-                                expect(result[0].content.split('\u0004')).toEqual([
-                                    '2024-02-20T13:17:26.713537000Z', 'ECU1', '1', '571', '204', '28138506', 'ECU1', 'APP1', 'C1', 'IPC',
-                                    'SOME/IP 0.0.0.0:0 >> INST:1 RPC SERV:123 METH:32773 LENG:16 CLID:0 SEID:58252 IVER:1 MSTP:2 RETC:0 TestService::timeEvent {\u0006\ttimestamp (INT64) : 1683656786973,\u0006}',
-                                ]);
-                                expect(result[1].content.split('\u0004')).toEqual([
-                                    '2024-02-20T13:17:26.713537000Z', 'ECU1', '1', '571', '205', '28138506', 'ECU1', 'APP1', 'C1', 'IPC',
-                                    'SOME/IP 0.0.0.0:0 >> INST:1 RPC SERV:124 METH:32773 LENG:16 CLID:0 SEID:58252 IVER:1 MSTP:2 RETC:0 UnknownService [00, 00, 01, 88, 01, C3, C4, 1D]',
-                                ]);
-                                expect(result[2].content.split('\u0004')).toEqual([
-                                    '2024-02-20T13:17:26.713537000Z', 'ECU1', '1', '571', '206', '28138506', 'ECU1', 'APP1', 'C1', 'IPC',
-                                    'SOME/IP 0.0.0.0:0 >> INST:1 RPC SERV:123 METH:32773 LENG:16 CLID:0 SEID:58252 IVER:3 MSTP:2 RETC:0 TestService<1?>::timeEvent {\u0006\ttimestamp (INT64) : 1683656786973,\u0006}',
-                                ]);
-                                expect(result[3].content.split('\u0004')).toEqual([
-                                    '2024-02-20T13:17:26.713537000Z', 'ECU1', '1', '571', '207', '28138506', 'ECU1', 'APP1', 'C1', 'IPC',
-                                    'SOME/IP 0.0.0.0:0 >> INST:1 RPC SERV:123 METH:32774 LENG:16 CLID:0 SEID:58252 IVER:1 MSTP:2 RETC:0 TestService::UnknownMethod [00, 00, 01, 88, 01, C3, C4, 1D]',
-                                ]);
-                                expect(result[4].content.split('\u0004')).toEqual([
-                                    '2024-02-20T13:17:26.713537000Z', 'ECU1', '1', '571', '208', '28138506', 'ECU1', 'APP1', 'C1', 'IPC',
-                                    'SOME/IP 0.0.0.0:0 >> INST:1 RPC SERV:123 METH:32773 LENG:15 CLID:0 SEID:58252 IVER:1 MSTP:2 RETC:0 TestService::timeEvent \'SOME/IP Error: Parser exhausted at offset 0 for Object size 8\' [00, 00, 01, 88, 01, C3, C4]',
-                                ]);
-                                expect(result[5].content.split('\u0004')).toEqual([
-                                    '2024-02-20T13:17:26.713537000Z', 'ECU1', '1', '571', '209', '28138506', 'ECU1', 'APP1', 'C1', 'IPC',
-                                    'SOME/IP \'Parse error: Not enough data: min: 25, actual: 24\' [00, 7B, 80, 05, 00, 00, 00, 11, 00, 00, E3, 8C, 01, 01, 02, 00, 00, 00, 01, 88, 01, C3, C4, 1D]',
-                                ]);
-                                logger.debug('result of grab was: ' + JSON.stringify(result));
-                                finish(comps.session, done);
-                            })
-                            .catch((err: Error) => {
-                                finish(
-                                    comps.session,
-                                    done,
-                                    new Error(
-                                        `Fail to grab data due error: ${
-                                            err instanceof Error ? err.message : err
-                                        }`,
-                                    ),
-                                );
-                            });
                     });
+            });
         });
     });
 });
diff --git a/application/apps/rustcore/ts-bindings/spec/session.protocol.spec.ts b/application/apps/rustcore/ts-bindings/spec/session.protocol.spec.ts
new file mode 100644
index 0000000000..5da85e9094
--- /dev/null
+++ b/application/apps/rustcore/ts-bindings/spec/session.protocol.spec.ts
@@ -0,0 +1,327 @@
+// tslint:disable
+
+// We need to provide path to TypeScript types definitions
+/// <reference path="../node_modules/@types/jasmine/index.d.ts" />
+/// <reference path="../node_modules/@types/node/index.d.ts" />
+import { initLogger } from './logger';
+initLogger();
+
+import { finish } from './common';
+import { readConfigurationFile } from './config';
+
+import * as protocol from 'protocol';
+import * as $ from 'platform/types/observe';
+import * as runners from './runners';
+import * as fs from 'fs';
+import * as path from 'path';
+
+const config = readConfigurationFile().get().tests.protocol;
+
+function deepEqualObj(a: any, b: any, depth = Infinity): boolean {
+    if (depth < 1 || (typeof a !== 'object' && typeof b !== 'object')) {
+        return a === b || (a == null && b == null);
+    }
+    if (a == null || b == null) {
+        return a == null && b == null;
+    }
+    if (Array.isArray(a) && Array.isArray(b)) {
+        if (a.length !== b.length) return false;
+        return a.every((item, index) => deepEqualObj(item, b[index], depth - 1));
+    }
+    if (a instanceof Map && b instanceof Map) {
+        if (a.size !== b.size) {
+            return false;
+        }
+        for (let [key, value] of a) {
+            if (!b.has(key)) return false;
+            if (!deepEqualObj(b.get(key), value)) return false;
+        }
+    } else if (typeof a === 'object' && typeof b === 'object') {
+        const keys1 = Object.keys(a);
+        const keys2 = Object.keys(b);
+
+        if (keys1.length !== keys2.length) return false;
+        if (!keys1.every((key) => keys2.includes(key))) return false;
+
+        return keys1.every((key) => deepEqualObj(a[key], b[key], depth - 1));
+    }
+    return a === b;
+}
+
+const MAP: { [key: string]: (buf: Uint8Array) => any } = {
+    AroundIndexes: protocol.decodeAroundIndexes,
+    AttachmentInfo: protocol.decodeAttachmentInfo,
+    AttachmentList: protocol.decodeAttachmentList,
+    CallbackEvent: protocol.decodeCallbackEvent,
+    CommandOutcome_bool: protocol.decodeCommandOutcomeWithbool,
+    CommandOutcome_FoldersScanningResult: protocol.decodeCommandOutcomeWithFoldersScanningResult,
+    CommandOutcome_DltStatisticInfo: protocol.decodeCommandOutcomeWithDltStatisticInfo,
+    CommandOutcome_ProfileList: protocol.decodeCommandOutcomeWithProfileList,
+    CommandOutcome_MapKeyValue: protocol.decodeCommandOutcomeWithMapKeyValue,
+    CommandOutcome_i64: protocol.decodeCommandOutcomeWithi64,
+    CommandOutcome_Option_String: protocol.decodeCommandOutcomeWithOptionString,
+    CommandOutcome_SerialPortsList: protocol.decodeCommandOutcomeWithSerialPortsList,
+    CommandOutcome_String: protocol.decodeCommandOutcomeWithString,
+    CommandOutcome_Void: protocol.decodeCommandOutcomeWithVoid,
+    ComputationError: protocol.decodeComputationError,
+    DltParserSettings: protocol.decodeDltParserSettings,
+    FileFormat: protocol.decodeFileFormat,
+    FilterMatch: protocol.decodeFilterMatch,
+    FilterMatchList: protocol.decodeFilterMatchList,
+    FolderEntity: protocol.decodeFolderEntity,
+    FolderEntityDetails: protocol.decodeFolderEntityDetails,
+    FolderEntityType: protocol.decodeFolderEntityType,
+    FoldersScanningResult: protocol.decodeFoldersScanningResult,
+    GrabbedElement: protocol.decodeGrabbedElement,
+    GrabbedElementList: protocol.decodeGrabbedElementList,
+    LifecycleTransition: protocol.decodeLifecycleTransition,
+    MulticastInfo: protocol.decodeMulticastInfo,
+    NativeError: protocol.decodeNativeError,
+    NativeErrorKind: protocol.decodeNativeErrorKind,
+    ObserveOptions: protocol.decodeObserveOptions,
+    ObserveOrigin: protocol.decodeObserveOrigin,
+    OperationDone: protocol.decodeOperationDone,
+    ParserType: protocol.decodeParserType,
+    ProcessTransportConfig: protocol.decodeProcessTransportConfig,
+    Progress: protocol.decodeProgress,
+    Ranges: protocol.decodeRanges,
+    SdeRequest: protocol.decodeSdeRequest,
+    SdeResponse: protocol.decodeSdeResponse,
+    SerialPortsList: protocol.decodeSerialPortsList,
+    SerialTransportConfig: protocol.decodeSerialTransportConfig,
+    Severity: protocol.decodeSeverity,
+    SomeIpParserSettings: protocol.decodeSomeIpParserSettings,
+    SourceDefinition: protocol.decodeSourceDefinition,
+    Sources: protocol.decodeSources,
+    TCPTransportConfig: protocol.decodeTCPTransportConfig,
+    Transport: protocol.decodeTransport,
+    UdpConnectionInfo: protocol.decodeUdpConnectionInfo,
+    UDPTransportConfig: protocol.decodeUDPTransportConfig,
+    DltStatisticInfo: protocol.decodeDltStatisticInfo,
+    Profile: protocol.decodeProfile,
+    ProfileList: protocol.decodeProfileList,
+    ExtractedMatchValue: protocol.decodeExtractedMatchValue,
+    ResultExtractedMatchValues: protocol.decodeResultExtractedMatchValues,
+    ResultU64: protocol.decodeResultU64,
+    ResultBool: protocol.decodeResultBool,
+    ResultSleep: protocol.decodeResultSleep,
+    NearestPosition: protocol.decodeNearestPosition,
+    ResultNearestPosition: protocol.decodeResultNearestPosition,
+    Point: protocol.decodePoint,
+    ResultSearchValues: protocol.decodeResultSearchValues,
+    ResultScaledDistribution: protocol.decodeResultScaledDistribution,
+    DltLevelDistribution: protocol.decodeDltLevelDistribution,
+};
+
+const OUTPUT_PATH_ENVVAR = 'CHIPMUNK_PROTOCOL_TEST_OUTPUT';
+
+describe('Protocol', function () {
+    it(config.regular.list[1], function () {
+        return runners.noSession(config.regular, 1, async (logger, done) => {
+            function check(origin: $.IObserve) {
+                const bytes = protocol.encodeObserveOptions(origin);
+                const decoded = protocol.decodeObserveOptions(bytes);
+                expect(deepEqualObj(decoded, origin)).toBe(true);
+            }
+            check({
+                origin: { File: ['somefile', $.Types.File.FileType.Text, 'path_to_file'] },
+                parser: { Text: null },
+            });
+            check({
+                origin: {
+                    Stream: ['stream', { TCP: { bind_addr: '0.0.0.0' } }],
+                },
+                parser: { Text: null },
+            });
+            check({
+                origin: {
+                    Stream: [
+                        'stream',
+                        {
+                            Process: {
+                                command: 'command',
+                                cwd: 'cwd',
+                                envs: { one: 'one' },
+                            },
+                        },
+                    ],
+                },
+                parser: { Text: null },
+            });
+            check({
+                origin: {
+                    Concat: [
+                        ['somefile1', $.Types.File.FileType.Text, 'path_to_file'],
+                        ['somefile2', $.Types.File.FileType.Text, 'path_to_file'],
+                        ['somefile3', $.Types.File.FileType.Text, 'path_to_file'],
+                    ],
+                },
+                parser: { Text: null },
+            });
+            check({
+                origin: {
+                    File: ['somefile', $.Types.File.FileType.Binary, 'path_to_file'],
+                },
+                parser: {
+                    Dlt: {
+                        fibex_file_paths: ['path'],
+                        filter_config: undefined,
+                        with_storage_header: true,
+                        tz: 'zz',
+                    },
+                },
+            });
+            check({
+                origin: {
+                    File: ['somefile', $.Types.File.FileType.Binary, 'path_to_file'],
+                },
+                parser: {
+                    Dlt: {
+                        fibex_file_paths: [],
+                        filter_config: undefined,
+                        with_storage_header: true,
+                        tz: 'zz',
+                    },
+                },
+            });
+            check({
+                origin: {
+                    File: ['somefile', $.Types.File.FileType.Binary, 'path_to_file'],
+                },
+                parser: {
+                    Dlt: {
+                        fibex_file_paths: undefined,
+                        filter_config: undefined,
+                        with_storage_header: true,
+                        tz: 'zz',
+                    },
+                },
+            });
+            check({
+                origin: {
+                    File: ['somefile', $.Types.File.FileType.Binary, 'path_to_file'],
+                },
+                parser: {
+                    Dlt: {
+                        fibex_file_paths: ['path'],
+                        filter_config: {
+                            min_log_level: 1,
+                            app_id_count: 1,
+                            context_id_count: 1,
+                            app_ids: ['test'],
+                            ecu_ids: ['test'],
+                            context_ids: ['test'],
+                        },
+                        with_storage_header: true,
+                        tz: 'zz',
+                    },
+                },
+            });
+            check({
+                origin: {
+                    File: ['somefile', $.Types.File.FileType.PcapNG, 'path_to_file'],
+                },
+                parser: {
+                    SomeIp: {
+                        fibex_file_paths: ['path'],
+                    },
+                },
+            });
+            check({
+                origin: {
+                    File: ['somefile', $.Types.File.FileType.PcapNG, 'path_to_file'],
+                },
+                parser: {
+                    SomeIp: {
+                        fibex_file_paths: [],
+                    },
+                },
+            });
+            check({
+                origin: {
+                    File: ['somefile', $.Types.File.FileType.PcapNG, 'path_to_file'],
+                },
+                parser: {
+                    SomeIp: {
+                        fibex_file_paths: undefined,
+                    },
+                },
+            });
+            finish(undefined, done);
+        });
+    });
+    it(config.regular.list[2], function () {
+        return runners.noSession(config.regular, 2, async (logger, done) => {
+            const casesPath = process.env[OUTPUT_PATH_ENVVAR];
+            if (typeof casesPath !== 'string' || casesPath.trim() === '') {
+                console.log(
+                    `${'='.repeat(50)}\n${logger.warn(
+                        `Testing of all use-cases is skipped because ${OUTPUT_PATH_ENVVAR} isn't defined`,
+                    )}\n${'='.repeat(50)}`,
+                );
+                return finish(undefined, done);
+            }
+            if (!fs.existsSync(casesPath)) {
+                return finish(
+                    undefined,
+                    done,
+                    new Error(
+                        `Fail to find data passed due ${OUTPUT_PATH_ENVVAR}: ${casesPath} doesn't exist`,
+                    ),
+                );
+            }
+            const folders = fs.readdirSync(casesPath);
+            for (let typeOfMessage of folders) {
+                const targetFullPath = path.join(casesPath, typeOfMessage);
+                if (!fs.statSync(targetFullPath).isDirectory()) {
+                    continue;
+                }
+                const cases = fs.readdirSync(targetFullPath);
+                for (let testCase of cases) {
+                    const fullPath = path.join(targetFullPath, testCase);
+                    if (!fs.statSync(fullPath).isFile()) {
+                        continue;
+                    }
+                    const buffer = fs.readFileSync(fullPath);
+                    const decoder = MAP[typeOfMessage];
+                    if (decoder === undefined) {
+                        return finish(
+                            undefined,
+                            done,
+                            new Error(`Fail to find decoder for ${typeOfMessage}`),
+                        );
+                    }
+                    const _msg = decoder(Uint8Array.from(buffer));
+                }
+                console.log(`[OK]: ${typeOfMessage}`);
+            }
+            finish(undefined, done);
+        });
+    });
+    it(config.regular.list[3], function () {
+        return runners.withSession(config.regular, 3, async (logger, done, comps) => {
+            const MESSAGES_COUNT = 100;
+            const meausere: { json: number; bin: number } = { json: 0, bin: 0 };
+            meausere.json = Date.now();
+            for (let i = MESSAGES_COUNT; i >= 0; i -= 1) {
+                const msg = comps.session.getNativeSession().testGrabElsAsJson();
+                expect(msg instanceof Array).toBe(true);
+            }
+            meausere.json = Date.now() - meausere.json;
+            meausere.bin = Date.now();
+            for (let i = MESSAGES_COUNT; i >= 0; i -= 1) {
+                const msg = comps.session.getNativeSession().testGrabElsAsBin();
+                expect(msg instanceof Array).toBe(true);
+            }
+            meausere.bin = Date.now() - meausere.bin;
+            console.log(
+                `Grabbing messages (with decoding) count: ${MESSAGES_COUNT}\nJSON: ${
+                    meausere.json
+                }ms (per msg ${(meausere.json / MESSAGES_COUNT).toFixed(4)});\nBIN: ${
+                    meausere.bin
+                }ms (per msg ${(meausere.bin / MESSAGES_COUNT).toFixed(4)})`,
+            );
+            finish(comps.session, done);
+        });
+    });
+});
diff --git a/application/apps/rustcore/ts-bindings/spec/session.ranges.spec.ts b/application/apps/rustcore/ts-bindings/spec/session.ranges.spec.ts
index a7ed56fd72..b04c5c48bc 100644
--- a/application/apps/rustcore/ts-bindings/spec/session.ranges.spec.ts
+++ b/application/apps/rustcore/ts-bindings/spec/session.ranges.spec.ts
@@ -6,7 +6,7 @@
 import { initLogger } from './logger';
 initLogger();
 import { Factory } from '../src/api/session';
-import { IGrabbedElement } from 'platform/types/content';
+import { GrabbedElement } from 'platform/types/bindings';
 import { createSampleFile, finish } from './common';
 import { readConfigurationFile } from './config';
 
@@ -44,8 +44,8 @@ describe('Grab ranges', function () {
                 grabbing = true;
                 Promise.all([
                     comps.stream
-                        .grabRanges([{ from: 0, to: 99 }])
-                        .then((result: IGrabbedElement[]) => {
+                        .grabRanges([{ start: 0, end: 99 }])
+                        .then((result: GrabbedElement[]) => {
                             logger.debug('result of grab was: ' + JSON.stringify(result));
                             expect(
                                 result
@@ -55,10 +55,10 @@ describe('Grab ranges', function () {
                         }),
                     comps.stream
                         .grabRanges([
-                            { from: 0, to: 0 },
-                            { from: 10, to: 10 },
+                            { start: 0, end: 0 },
+                            { start: 10, end: 10 },
                         ])
-                        .then((result: IGrabbedElement[]) => {
+                        .then((result: GrabbedElement[]) => {
                             logger.debug('result of grab was: ' + JSON.stringify(result));
                             expect(result.length).toEqual(2);
                             expect(parseInt(result[0].content, 10)).toEqual(0);
@@ -66,12 +66,12 @@ describe('Grab ranges', function () {
                         }),
                     comps.stream
                         .grabRanges([
-                            { from: 0, to: 10 },
-                            { from: 99, to: 200 },
-                            { from: 299, to: 300 },
-                            { from: 599, to: 600 },
+                            { start: 0, end: 10 },
+                            { start: 99, end: 200 },
+                            { start: 299, end: 300 },
+                            { start: 599, end: 600 },
                         ])
-                        .then((result: IGrabbedElement[]) => {
+                        .then((result: GrabbedElement[]) => {
                             logger.debug('result of grab was: ' + JSON.stringify(result));
                             expect(
                                 result
diff --git a/application/apps/rustcore/ts-bindings/spec/session.search.spec.ts b/application/apps/rustcore/ts-bindings/spec/session.search.spec.ts
index ab4e218e6e..593e2d9c40 100644
--- a/application/apps/rustcore/ts-bindings/spec/session.search.spec.ts
+++ b/application/apps/rustcore/ts-bindings/spec/session.search.spec.ts
@@ -6,7 +6,7 @@
 import { initLogger } from './logger';
 initLogger();
 import { Factory } from '../src/api/session';
-import { IGrabbedElement } from 'platform/types/content';
+import { GrabbedElement } from 'platform/types/bindings';
 import { finish, createSampleFile } from './common';
 import { readConfigurationFile } from './config';
 
@@ -17,245 +17,73 @@ const config = readConfigurationFile().get().tests.search;
 describe('Search', function () {
     it(config.regular.list[1], function () {
         return runners.withSession(config.regular, 1, async (logger, done, comps) => {
-                    const tmpobj = createSampleFile(
-                        5000,
-                        logger,
-                        (i: number) =>
-                            `[${i}]:: ${
-                                i % 100 === 0 || i <= 5
-                                    ? `some match line data\n`
-                                    : `some line data\n`
-                            }`,
-                    );
-                    comps.stream
-                        .observe(
-                            new Factory.File()
-                                .asText()
-                                .type(Factory.FileType.Text)
-                                .file(tmpobj.name)
-                                .get()
-                                .sterilized(),
-                        )
-                        .on('processing', () => {
+            const tmpobj = createSampleFile(
+                5000,
+                logger,
+                (i: number) =>
+                    `[${i}]:: ${
+                        i % 100 === 0 || i <= 5 ? `some match line data\n` : `some line data\n`
+                    }`,
+            );
+            comps.stream
+                .observe(
+                    new Factory.File()
+                        .asText()
+                        .type(Factory.FileType.Text)
+                        .file(tmpobj.name)
+                        .get()
+                        .sterilized(),
+                )
+                .on('processing', () => {
+                    comps.search
+                        .search([
+                            {
+                                filter: 'match',
+                                flags: { reg: true, word: false, cases: false },
+                            },
+                        ])
+                        .then((_) => {
                             comps.search
-                                .search([
-                                    {
-                                        filter: 'match',
-                                        flags: { reg: true, word: false, cases: false },
-                                    },
-                                ])
-                                .then((_) => {
-                                    comps.search
-                                        .getMap(54)
-                                        .then((map) => {
-                                            logger.verbose(map);
-                                            comps.search
-                                                .grab(0, 11)
-                                                .then((result: IGrabbedElement[]) => {
-                                                    expect(result.map((i) => i.content)).toEqual([
-                                                        '[0]:: some match line data',
-                                                        '[1]:: some match line data',
-                                                        '[2]:: some match line data',
-                                                        '[3]:: some match line data',
-                                                        '[4]:: some match line data',
-                                                        '[5]:: some match line data',
-                                                        '[100]:: some match line data',
-                                                        '[200]:: some match line data',
-                                                        '[300]:: some match line data',
-                                                        '[400]:: some match line data',
-                                                        '[500]:: some match line data',
-                                                    ]);
-                                                    expect(result.map((i) => i.position)).toEqual([
-                                                        0, // 0
-                                                        1, // 1
-                                                        2, // 2
-                                                        3, // 3
-                                                        4, // 4
-                                                        5, // 5
-                                                        100, // 6
-                                                        200, // 7
-                                                        300, // 8
-                                                        400, // 9
-                                                        500, // 10
-                                                    ]);
-                                                    Promise.allSettled(
-                                                        [
-                                                            [10, 5, 5],
-                                                            [110, 6, 100],
-                                                            [390, 9, 400],
-                                                            [600, 11, 600],
-                                                        ].map((data) => {
-                                                            return comps.search
-                                                                .getNearest(data[0])
-                                                                .then((nearest) => {
-                                                                    expect(typeof nearest).toEqual(
-                                                                        'object',
-                                                                    );
-                                                                    expect(
-                                                                        (nearest as any).index,
-                                                                    ).toEqual(data[1]);
-                                                                    expect(
-                                                                        (nearest as any).position,
-                                                                    ).toEqual(data[2]);
-                                                                })
-                                                                .catch((err: Error) => {
-                                                                    fail(err);
-                                                                });
-                                                        }),
-                                                    )
-                                                        .then(() => {
-                                                            comps.search
-                                                                .len()
-                                                                .then((len: number) => {
-                                                                    expect(len).toEqual(55);
-                                                                    expect(
-                                                                        searchStreamUpdated,
-                                                                    ).toEqual(true);
-                                                                    comps.stream
-                                                                        .getIndexedRanges()
-                                                                        .then((ranges) => {
-                                                                            expect(
-                                                                                ranges[0].from,
-                                                                            ).toEqual(0);
-                                                                            expect(
-                                                                                ranges[0].to,
-                                                                            ).toEqual(5);
-                                                                            expect(
-                                                                                ranges.length,
-                                                                            ).toEqual(50);
-                                                                            for (
-                                                                                let i = 1;
-                                                                                i < 50;
-                                                                                i += 1
-                                                                            ) {
-                                                                                expect(
-                                                                                    ranges[i].from,
-                                                                                ).toEqual(i * 100);
-                                                                                expect(
-                                                                                    ranges[i].to,
-                                                                                ).toEqual(i * 100);
-                                                                            }
-                                                                            finish(comps.session, done);
-                                                                        })
-                                                                        .catch((err: Error) => {
-                                                                            finish(
-                                                                                comps.session,
-                                                                                done,
-                                                                                err,
-                                                                            );
-                                                                        });
-                                                                })
-                                                                .catch((err: Error) => {
-                                                                    finish(comps.session, done, err);
-                                                                });
-                                                        })
-                                                        .catch((err: Error) => {
-                                                            finish(comps.session, done, err);
-                                                        });
-                                                })
-                                                .catch((err: Error) => {
-                                                    finish(
-                                                        comps.session,
-                                                        done,
-                                                        new Error(
-                                                            `Fail to grab data due error: ${
-                                                                err instanceof Error
-                                                                    ? err.message
-                                                                    : err
-                                                            }`,
-                                                        ),
-                                                    );
-                                                });
-                                        })
-                                        .catch(finish.bind(null, comps.session, done));
-                                })
-                                .catch(finish.bind(null, comps.session, done));
-                        })
-                        .catch(finish.bind(null, comps.session, done));
-                    let searchStreamUpdated = false;
-                    comps.events.SearchUpdated.subscribe((event) => {
-                        searchStreamUpdated = true;
-                    });
-        });
-    });
-
-    it(config.regular.list[2], function () {
-        return runners.withSession(config.regular, 2, async (logger, done, comps) => {
-                    const tmpobj = createSampleFile(
-                        5000,
-                        logger,
-                        (i: number) =>
-                            `[${i}]:: ${
-                                i % 100 === 0 || i <= 5
-                                    ? `some match A line data\n`
-                                    : i % 50 === 0
-                                    ? `some match B line data\n`
-                                    : i === 9
-                                    ? `some 666 line data\n`
-                                    : `some line data\n`
-                            }`,
-                    );
-                    comps.stream
-                        .observe(
-                            new Factory.File()
-                                .asText()
-                                .type(Factory.FileType.Text)
-                                .file(tmpobj.name)
-                                .get()
-                                .sterilized(),
-                        )
-                        .on('processing', () => {
-                            comps.search
-                                .search([
-                                    {
-                                        filter: 'match A',
-                                        flags: { reg: true, word: false, cases: false },
-                                    },
-                                    {
-                                        filter: 'match B',
-                                        flags: { reg: true, word: false, cases: false },
-                                    },
-                                    {
-                                        filter: '666',
-                                        flags: { reg: true, word: false, cases: false },
-                                    },
-                                ])
-                                .then((result) => {
+                                .getMap(54)
+                                .then((map) => {
+                                    logger.verbose(map);
                                     comps.search
                                         .grab(0, 11)
-                                        .then((result: IGrabbedElement[]) => {
+                                        .then((result: GrabbedElement[]) => {
                                             expect(result.map((i) => i.content)).toEqual([
-                                                '[0]:: some match A line data',
-                                                '[1]:: some match A line data',
-                                                '[2]:: some match A line data',
-                                                '[3]:: some match A line data',
-                                                '[4]:: some match A line data',
-                                                '[5]:: some match A line data',
-                                                '[9]:: some 666 line data',
-                                                '[50]:: some match B line data',
-                                                '[100]:: some match A line data',
-                                                '[150]:: some match B line data',
-                                                '[200]:: some match A line data',
+                                                '[0]:: some match line data',
+                                                '[1]:: some match line data',
+                                                '[2]:: some match line data',
+                                                '[3]:: some match line data',
+                                                '[4]:: some match line data',
+                                                '[5]:: some match line data',
+                                                '[100]:: some match line data',
+                                                '[200]:: some match line data',
+                                                '[300]:: some match line data',
+                                                '[400]:: some match line data',
+                                                '[500]:: some match line data',
                                             ]);
-                                            expect(result.map((i) => i.position)).toEqual([
+                                            console.log(`GRABBED`);
+                                            console.log(result);
+                                            expect(result.map((i) => i.pos)).toEqual([
                                                 0, // 0
                                                 1, // 1
                                                 2, // 2
                                                 3, // 3
                                                 4, // 4
                                                 5, // 5
-                                                9, // 6
-                                                50, // 7
-                                                100, // 8
-                                                150, // 9
-                                                200, // 10
+                                                100, // 6
+                                                200, // 7
+                                                300, // 8
+                                                400, // 9
+                                                500, // 10
                                             ]);
                                             Promise.allSettled(
                                                 [
-                                                    [5, 5, 5],
-                                                    [10, 6, 9],
-                                                    [55, 7, 50],
-                                                    [190, 10, 200],
+                                                    [10, 5, 5],
+                                                    [110, 6, 100],
+                                                    [390, 9, 400],
+                                                    [600, 11, 600],
                                                 ].map((data) => {
                                                     return comps.search
                                                         .getNearest(data[0])
@@ -279,8 +107,43 @@ describe('Search', function () {
                                                     comps.search
                                                         .len()
                                                         .then((len: number) => {
-                                                            expect(len).toEqual(111);
-                                                            finish(comps.session, done);
+                                                            expect(len).toEqual(55);
+                                                            expect(searchStreamUpdated).toEqual(
+                                                                true,
+                                                            );
+                                                            comps.stream
+                                                                .getIndexedRanges()
+                                                                .then((ranges) => {
+                                                                    expect(ranges[0].start).toEqual(
+                                                                        0,
+                                                                    );
+                                                                    expect(ranges[0].end).toEqual(
+                                                                        5,
+                                                                    );
+                                                                    expect(ranges.length).toEqual(
+                                                                        50,
+                                                                    );
+                                                                    for (
+                                                                        let i = 1;
+                                                                        i < 50;
+                                                                        i += 1
+                                                                    ) {
+                                                                        expect(
+                                                                            ranges[i].start,
+                                                                        ).toEqual(i * 100);
+                                                                        expect(
+                                                                            ranges[i].end,
+                                                                        ).toEqual(i * 100);
+                                                                    }
+                                                                    finish(comps.session, done);
+                                                                })
+                                                                .catch((err: Error) => {
+                                                                    finish(
+                                                                        comps.session,
+                                                                        done,
+                                                                        err,
+                                                                    );
+                                                                });
                                                         })
                                                         .catch((err: Error) => {
                                                             finish(comps.session, done, err);
@@ -305,507 +168,622 @@ describe('Search', function () {
                                 .catch(finish.bind(null, comps.session, done));
                         })
                         .catch(finish.bind(null, comps.session, done));
+                })
+                .catch(finish.bind(null, comps.session, done));
+            let searchStreamUpdated = false;
+            comps.events.SearchUpdated.subscribe((event) => {
+                searchStreamUpdated = true;
+            });
         });
     });
 
-    it(config.regular.list[3], function () {
-        return runners.withSession(config.regular, 3, async (logger, done, comps) => {
-                    const tmpobj = createSampleFile(
-                        5000,
-                        logger,
-                        (i: number) => `[${i}]:: some line data\n`,
-                    );
-                    comps.stream
-                        .observe(
-                            new Factory.File()
-                                .asText()
-                                .type(Factory.FileType.Text)
-                                .file(tmpobj.name)
-                                .get()
-                                .sterilized(),
-                        )
-                        .on('processing', () => {
+    it(config.regular.list[2], function () {
+        return runners.withSession(config.regular, 2, async (logger, done, comps) => {
+            const tmpobj = createSampleFile(
+                5000,
+                logger,
+                (i: number) =>
+                    `[${i}]:: ${
+                        i % 100 === 0 || i <= 5
+                            ? `some match A line data\n`
+                            : i % 50 === 0
+                            ? `some match B line data\n`
+                            : i === 9
+                            ? `some 666 line data\n`
+                            : `some line data\n`
+                    }`,
+            );
+            comps.stream
+                .observe(
+                    new Factory.File()
+                        .asText()
+                        .type(Factory.FileType.Text)
+                        .file(tmpobj.name)
+                        .get()
+                        .sterilized(),
+                )
+                .on('processing', () => {
+                    comps.search
+                        .search([
+                            {
+                                filter: 'match A',
+                                flags: { reg: true, word: false, cases: false },
+                            },
+                            {
+                                filter: 'match B',
+                                flags: { reg: true, word: false, cases: false },
+                            },
+                            {
+                                filter: '666',
+                                flags: { reg: true, word: false, cases: false },
+                            },
+                        ])
+                        .then((result) => {
                             comps.search
-                                .search([
-                                    {
-                                        filter: 'not relevant search',
-                                        flags: { reg: true, word: false, cases: false },
-                                    },
-                                ])
-                                .then((found) => {
-                                    expect(found).toEqual(0);
-                                    finish(comps.session, done);
+                                .grab(0, 11)
+                                .then((result: GrabbedElement[]) => {
+                                    expect(result.map((i) => i.content)).toEqual([
+                                        '[0]:: some match A line data',
+                                        '[1]:: some match A line data',
+                                        '[2]:: some match A line data',
+                                        '[3]:: some match A line data',
+                                        '[4]:: some match A line data',
+                                        '[5]:: some match A line data',
+                                        '[9]:: some 666 line data',
+                                        '[50]:: some match B line data',
+                                        '[100]:: some match A line data',
+                                        '[150]:: some match B line data',
+                                        '[200]:: some match A line data',
+                                    ]);
+                                    expect(result.map((i) => i.pos)).toEqual([
+                                        0, // 0
+                                        1, // 1
+                                        2, // 2
+                                        3, // 3
+                                        4, // 4
+                                        5, // 5
+                                        9, // 6
+                                        50, // 7
+                                        100, // 8
+                                        150, // 9
+                                        200, // 10
+                                    ]);
+                                    Promise.allSettled(
+                                        [
+                                            [5, 5, 5],
+                                            [10, 6, 9],
+                                            [55, 7, 50],
+                                            [190, 10, 200],
+                                        ].map((data) => {
+                                            return comps.search
+                                                .getNearest(data[0])
+                                                .then((nearest) => {
+                                                    expect(typeof nearest).toEqual('object');
+                                                    expect((nearest as any).index).toEqual(data[1]);
+                                                    expect((nearest as any).position).toEqual(
+                                                        data[2],
+                                                    );
+                                                })
+                                                .catch((err: Error) => {
+                                                    fail(err);
+                                                });
+                                        }),
+                                    )
+                                        .then(() => {
+                                            comps.search
+                                                .len()
+                                                .then((len: number) => {
+                                                    expect(len).toEqual(111);
+                                                    finish(comps.session, done);
+                                                })
+                                                .catch((err: Error) => {
+                                                    finish(comps.session, done, err);
+                                                });
+                                        })
+                                        .catch((err: Error) => {
+                                            finish(comps.session, done, err);
+                                        });
                                 })
-                                .catch(finish.bind(null, comps.session, done));
+                                .catch((err: Error) => {
+                                    finish(
+                                        comps.session,
+                                        done,
+                                        new Error(
+                                            `Fail to grab data due error: ${
+                                                err instanceof Error ? err.message : err
+                                            }`,
+                                        ),
+                                    );
+                                });
+                        })
+                        .catch(finish.bind(null, comps.session, done));
+                })
+                .catch(finish.bind(null, comps.session, done));
+        });
+    });
+
+    it(config.regular.list[3], function () {
+        return runners.withSession(config.regular, 3, async (logger, done, comps) => {
+            const tmpobj = createSampleFile(
+                5000,
+                logger,
+                (i: number) => `[${i}]:: some line data\n`,
+            );
+            comps.stream
+                .observe(
+                    new Factory.File()
+                        .asText()
+                        .type(Factory.FileType.Text)
+                        .file(tmpobj.name)
+                        .get()
+                        .sterilized(),
+                )
+                .on('processing', () => {
+                    comps.search
+                        .search([
+                            {
+                                filter: 'not relevant search',
+                                flags: { reg: true, word: false, cases: false },
+                            },
+                        ])
+                        .then((found) => {
+                            expect(found).toEqual(0);
+                            finish(comps.session, done);
                         })
                         .catch(finish.bind(null, comps.session, done));
+                })
+                .catch(finish.bind(null, comps.session, done));
         });
     });
 
     it(config.regular.list[4], function () {
         return runners.withSession(config.regular, 4, async (logger, done, comps) => {
-                    const tmpobj = createSampleFile(
-                        5000,
-                        logger,
-                        (i: number) =>
-                            `[${i}]:: ${
-                                i % 100 === 0 || i <= 5
-                                    ? `some mAtCh line data\n`
-                                    : `some line data\n`
-                            }`,
-                    );
-                    comps.stream
-                        .observe(
-                            new Factory.File()
-                                .asText()
-                                .type(Factory.FileType.Text)
-                                .file(tmpobj.name)
-                                .get()
-                                .sterilized(),
-                        )
-                        .on('processing', () => {
+            const tmpobj = createSampleFile(
+                5000,
+                logger,
+                (i: number) =>
+                    `[${i}]:: ${
+                        i % 100 === 0 || i <= 5 ? `some mAtCh line data\n` : `some line data\n`
+                    }`,
+            );
+            comps.stream
+                .observe(
+                    new Factory.File()
+                        .asText()
+                        .type(Factory.FileType.Text)
+                        .file(tmpobj.name)
+                        .get()
+                        .sterilized(),
+                )
+                .on('processing', () => {
+                    comps.search
+                        .search([
+                            {
+                                filter: 'match',
+                                flags: { reg: true, word: false, cases: false },
+                            },
+                        ])
+                        .then((_) => {
+                            // search results available on rust side
                             comps.search
-                                .search([
-                                    {
-                                        filter: 'match',
-                                        flags: { reg: true, word: false, cases: false },
-                                    },
-                                ])
-                                .then((_) => {
-                                    // search results available on rust side
+                                .getMap(54)
+                                .then((map) => {
                                     comps.search
-                                        .getMap(54)
-                                        .then((map) => {
-                                            comps.search
-                                                .grab(0, 11)
-                                                .then((result: IGrabbedElement[]) => {
-                                                    expect(result.map((i) => i.content)).toEqual([
-                                                        '[0]:: some mAtCh line data',
-                                                        '[1]:: some mAtCh line data',
-                                                        '[2]:: some mAtCh line data',
-                                                        '[3]:: some mAtCh line data',
-                                                        '[4]:: some mAtCh line data',
-                                                        '[5]:: some mAtCh line data',
-                                                        '[100]:: some mAtCh line data',
-                                                        '[200]:: some mAtCh line data',
-                                                        '[300]:: some mAtCh line data',
-                                                        '[400]:: some mAtCh line data',
-                                                        '[500]:: some mAtCh line data',
-                                                    ]);
-                                                    expect(result.map((i) => i.position)).toEqual([
-                                                        0, // 0
-                                                        1, // 1
-                                                        2, // 2
-                                                        3, // 3
-                                                        4, // 4
-                                                        5, // 5
-                                                        100, // 6
-                                                        200, // 7
-                                                        300, // 8
-                                                        400, // 9
-                                                        500, // 10
-                                                    ]);
-                                                    Promise.allSettled(
-                                                        [
-                                                            [10, 5, 5],
-                                                            [110, 6, 100],
-                                                            [390, 9, 400],
-                                                            [600, 11, 600],
-                                                        ].map((data) => {
-                                                            return comps.search
-                                                                .getNearest(data[0])
-                                                                .then((nearest) => {
-                                                                    expect(typeof nearest).toEqual(
-                                                                        'object',
-                                                                    );
-                                                                    expect(
-                                                                        (nearest as any).index,
-                                                                    ).toEqual(data[1]);
-                                                                    expect(
-                                                                        (nearest as any).position,
-                                                                    ).toEqual(data[2]);
-                                                                })
-                                                                .catch((err: Error) => {
-                                                                    fail(err);
-                                                                });
-                                                        }),
-                                                    )
-                                                        .then(() => {
-                                                            comps.search
-                                                                .len()
-                                                                .then((len: number) => {
-                                                                    expect(len).toEqual(55);
-                                                                    finish(comps.session, done);
-                                                                })
-                                                                .catch((err: Error) => {
-                                                                    finish(comps.session, done, err);
-                                                                });
+                                        .grab(0, 11)
+                                        .then((result: GrabbedElement[]) => {
+                                            expect(result.map((i) => i.content)).toEqual([
+                                                '[0]:: some mAtCh line data',
+                                                '[1]:: some mAtCh line data',
+                                                '[2]:: some mAtCh line data',
+                                                '[3]:: some mAtCh line data',
+                                                '[4]:: some mAtCh line data',
+                                                '[5]:: some mAtCh line data',
+                                                '[100]:: some mAtCh line data',
+                                                '[200]:: some mAtCh line data',
+                                                '[300]:: some mAtCh line data',
+                                                '[400]:: some mAtCh line data',
+                                                '[500]:: some mAtCh line data',
+                                            ]);
+                                            expect(result.map((i) => i.pos)).toEqual([
+                                                0, // 0
+                                                1, // 1
+                                                2, // 2
+                                                3, // 3
+                                                4, // 4
+                                                5, // 5
+                                                100, // 6
+                                                200, // 7
+                                                300, // 8
+                                                400, // 9
+                                                500, // 10
+                                            ]);
+                                            Promise.allSettled(
+                                                [
+                                                    [10, 5, 5],
+                                                    [110, 6, 100],
+                                                    [390, 9, 400],
+                                                    [600, 11, 600],
+                                                ].map((data) => {
+                                                    return comps.search
+                                                        .getNearest(data[0])
+                                                        .then((nearest) => {
+                                                            expect(typeof nearest).toEqual(
+                                                                'object',
+                                                            );
+                                                            expect((nearest as any).index).toEqual(
+                                                                data[1],
+                                                            );
+                                                            expect(
+                                                                (nearest as any).position,
+                                                            ).toEqual(data[2]);
+                                                        })
+                                                        .catch((err: Error) => {
+                                                            fail(err);
+                                                        });
+                                                }),
+                                            )
+                                                .then(() => {
+                                                    comps.search
+                                                        .len()
+                                                        .then((len: number) => {
+                                                            expect(len).toEqual(55);
+                                                            finish(comps.session, done);
                                                         })
                                                         .catch((err: Error) => {
                                                             finish(comps.session, done, err);
                                                         });
                                                 })
                                                 .catch((err: Error) => {
-                                                    finish(
-                                                        comps.session,
-                                                        done,
-                                                        new Error(
-                                                            `Fail to grab data due error: ${
-                                                                err instanceof Error
-                                                                    ? err.message
-                                                                    : err
-                                                            }`,
-                                                        ),
-                                                    );
+                                                    finish(comps.session, done, err);
                                                 });
                                         })
-                                        .catch(finish.bind(null, comps.session, done));
+                                        .catch((err: Error) => {
+                                            finish(
+                                                comps.session,
+                                                done,
+                                                new Error(
+                                                    `Fail to grab data due error: ${
+                                                        err instanceof Error ? err.message : err
+                                                    }`,
+                                                ),
+                                            );
+                                        });
                                 })
                                 .catch(finish.bind(null, comps.session, done));
                         })
                         .catch(finish.bind(null, comps.session, done));
+                })
+                .catch(finish.bind(null, comps.session, done));
         });
     });
 
     it(config.regular.list[5], function () {
         return runners.withSession(config.regular, 5, async (logger, done, comps) => {
-                    const tmpobj = createSampleFile(
-                        5000,
-                        logger,
-                        (i: number) =>
-                            `[${i}]:: ${
-                                i % 100 === 0 || i <= 5
-                                    ? `some match line data\n`
-                                    : `some line matchmatchmatch data\n`
-                            }`,
-                    );
-                    comps.stream
-                        .observe(
-                            new Factory.File()
-                                .asText()
-                                .type(Factory.FileType.Text)
-                                .file(tmpobj.name)
-                                .get()
-                                .sterilized(),
-                        )
-                        .on('processing', () => {
+            const tmpobj = createSampleFile(
+                5000,
+                logger,
+                (i: number) =>
+                    `[${i}]:: ${
+                        i % 100 === 0 || i <= 5
+                            ? `some match line data\n`
+                            : `some line matchmatchmatch data\n`
+                    }`,
+            );
+            comps.stream
+                .observe(
+                    new Factory.File()
+                        .asText()
+                        .type(Factory.FileType.Text)
+                        .file(tmpobj.name)
+                        .get()
+                        .sterilized(),
+                )
+                .on('processing', () => {
+                    comps.search
+                        .search([
+                            {
+                                filter: 'match',
+                                flags: { reg: true, word: true, cases: false },
+                            },
+                        ])
+                        .then((_) => {
+                            // search results available on rust side
                             comps.search
-                                .search([
-                                    {
-                                        filter: 'match',
-                                        flags: { reg: true, word: true, cases: false },
-                                    },
-                                ])
-                                .then((_) => {
-                                    // search results available on rust side
+                                .getMap(54)
+                                .then((map) => {
                                     comps.search
-                                        .getMap(54)
-                                        .then((map) => {
-                                            comps.search
-                                                .grab(0, 11)
-                                                .then((result: IGrabbedElement[]) => {
-                                                    expect(result.map((i) => i.content)).toEqual([
-                                                        '[0]:: some match line data',
-                                                        '[1]:: some match line data',
-                                                        '[2]:: some match line data',
-                                                        '[3]:: some match line data',
-                                                        '[4]:: some match line data',
-                                                        '[5]:: some match line data',
-                                                        '[100]:: some match line data',
-                                                        '[200]:: some match line data',
-                                                        '[300]:: some match line data',
-                                                        '[400]:: some match line data',
-                                                        '[500]:: some match line data',
-                                                    ]);
-                                                    expect(result.map((i) => i.position)).toEqual([
-                                                        0, // 0
-                                                        1, // 1
-                                                        2, // 2
-                                                        3, // 3
-                                                        4, // 4
-                                                        5, // 5
-                                                        100, // 6
-                                                        200, // 7
-                                                        300, // 8
-                                                        400, // 9
-                                                        500, // 10
-                                                    ]);
-                                                    Promise.allSettled(
-                                                        [
-                                                            [10, 5, 5],
-                                                            [110, 6, 100],
-                                                            [390, 9, 400],
-                                                            [600, 11, 600],
-                                                        ].map((data) => {
-                                                            return comps.search
-                                                                .getNearest(data[0])
-                                                                .then((nearest) => {
-                                                                    expect(typeof nearest).toEqual(
-                                                                        'object',
-                                                                    );
-                                                                    expect(
-                                                                        (nearest as any).index,
-                                                                    ).toEqual(data[1]);
-                                                                    expect(
-                                                                        (nearest as any).position,
-                                                                    ).toEqual(data[2]);
-                                                                })
-                                                                .catch((err: Error) => {
-                                                                    fail(err);
-                                                                });
-                                                        }),
-                                                    )
-                                                        .then(() => {
-                                                            comps.search
-                                                                .len()
-                                                                .then((len: number) => {
-                                                                    expect(len).toEqual(55);
-                                                                    finish(comps.session, done);
-                                                                })
-                                                                .catch((err: Error) => {
-                                                                    finish(comps.session, done, err);
-                                                                });
+                                        .grab(0, 11)
+                                        .then((result: GrabbedElement[]) => {
+                                            expect(result.map((i) => i.content)).toEqual([
+                                                '[0]:: some match line data',
+                                                '[1]:: some match line data',
+                                                '[2]:: some match line data',
+                                                '[3]:: some match line data',
+                                                '[4]:: some match line data',
+                                                '[5]:: some match line data',
+                                                '[100]:: some match line data',
+                                                '[200]:: some match line data',
+                                                '[300]:: some match line data',
+                                                '[400]:: some match line data',
+                                                '[500]:: some match line data',
+                                            ]);
+                                            expect(result.map((i) => i.pos)).toEqual([
+                                                0, // 0
+                                                1, // 1
+                                                2, // 2
+                                                3, // 3
+                                                4, // 4
+                                                5, // 5
+                                                100, // 6
+                                                200, // 7
+                                                300, // 8
+                                                400, // 9
+                                                500, // 10
+                                            ]);
+                                            Promise.allSettled(
+                                                [
+                                                    [10, 5, 5],
+                                                    [110, 6, 100],
+                                                    [390, 9, 400],
+                                                    [600, 11, 600],
+                                                ].map((data) => {
+                                                    return comps.search
+                                                        .getNearest(data[0])
+                                                        .then((nearest) => {
+                                                            expect(typeof nearest).toEqual(
+                                                                'object',
+                                                            );
+                                                            expect((nearest as any).index).toEqual(
+                                                                data[1],
+                                                            );
+                                                            expect(
+                                                                (nearest as any).position,
+                                                            ).toEqual(data[2]);
+                                                        })
+                                                        .catch((err: Error) => {
+                                                            fail(err);
+                                                        });
+                                                }),
+                                            )
+                                                .then(() => {
+                                                    comps.search
+                                                        .len()
+                                                        .then((len: number) => {
+                                                            expect(len).toEqual(55);
+                                                            finish(comps.session, done);
                                                         })
                                                         .catch((err: Error) => {
                                                             finish(comps.session, done, err);
                                                         });
                                                 })
                                                 .catch((err: Error) => {
-                                                    finish(
-                                                        comps.session,
-                                                        done,
-                                                        new Error(
-                                                            `Fail to grab data due error: ${
-                                                                err instanceof Error
-                                                                    ? err.message
-                                                                    : err
-                                                            }`,
-                                                        ),
-                                                    );
+                                                    finish(comps.session, done, err);
                                                 });
                                         })
-                                        .catch(finish.bind(null, comps.session, done));
+                                        .catch((err: Error) => {
+                                            finish(
+                                                comps.session,
+                                                done,
+                                                new Error(
+                                                    `Fail to grab data due error: ${
+                                                        err instanceof Error ? err.message : err
+                                                    }`,
+                                                ),
+                                            );
+                                        });
                                 })
                                 .catch(finish.bind(null, comps.session, done));
                         })
                         .catch(finish.bind(null, comps.session, done));
+                })
+                .catch(finish.bind(null, comps.session, done));
         });
     });
 
     it(config.regular.list[6], function () {
         return runners.withSession(config.regular, 6, async (logger, done, comps) => {
-                    const tmpobj = createSampleFile(
-                        501,
-                        logger,
-                        (i: number) =>
-                            `[${i}]:: ${
-                                i % 100 === 0 || i <= 5
-                                    ? `some match A ${i % 6 === 0 ? 'B' : ''} line data\n`
-                                    : `some line data\n`
-                            }`,
-                    );
-                    comps.stream
-                        .observe(
-                            new Factory.File()
-                                .asText()
-                                .type(Factory.FileType.Text)
-                                .file(tmpobj.name)
-                                .get()
-                                .sterilized(),
-                        )
-                        .on('processing', () => {
+            const tmpobj = createSampleFile(
+                501,
+                logger,
+                (i: number) =>
+                    `[${i}]:: ${
+                        i % 100 === 0 || i <= 5
+                            ? `some match A ${i % 6 === 0 ? 'B' : ''} line data\n`
+                            : `some line data\n`
+                    }`,
+            );
+            comps.stream
+                .observe(
+                    new Factory.File()
+                        .asText()
+                        .type(Factory.FileType.Text)
+                        .file(tmpobj.name)
+                        .get()
+                        .sterilized(),
+                )
+                .on('processing', () => {
+                    comps.search
+                        .search([
+                            {
+                                filter: 'match A',
+                                flags: { reg: true, word: false, cases: false },
+                            },
+                            {
+                                filter: 'match [A,B]',
+                                flags: { reg: true, word: false, cases: false },
+                            },
+                        ])
+                        .then((_) => {
                             comps.search
-                                .search([
-                                    {
-                                        filter: 'match A',
-                                        flags: { reg: true, word: false, cases: false },
-                                    },
-                                    {
-                                        filter: 'match [A,B]',
-                                        flags: { reg: true, word: false, cases: false },
-                                    },
-                                ])
-                                .then((_) => {
+                                .getMap(501)
+                                .then((map) => {
+                                    expect(map[0].length).toEqual(2);
+                                    expect(map[0][0][0]).toEqual(0);
+                                    expect(map[0][1][0]).toEqual(1);
+                                    expect(map[500][0][0]).toEqual(0);
+                                    expect(map[500][1][0]).toEqual(1);
+                                    logger.verbose(map);
                                     comps.search
-                                        .getMap(501)
-                                        .then((map) => {
-                                            expect(map[0].length).toEqual(2);
-                                            expect(map[0][0][0]).toEqual(0);
-                                            expect(map[0][1][0]).toEqual(1);
-                                            expect(map[500][0][0]).toEqual(0);
-                                            expect(map[500][1][0]).toEqual(1);
-                                            logger.verbose(map);
+                                        .grab(0, 11)
+                                        .then((result: GrabbedElement[]) => {
+                                            expect(result.map((i) => i.content)).toEqual([
+                                                '[0]:: some match A B line data',
+                                                '[1]:: some match A  line data',
+                                                '[2]:: some match A  line data',
+                                                '[3]:: some match A  line data',
+                                                '[4]:: some match A  line data',
+                                                '[5]:: some match A  line data',
+                                                '[100]:: some match A  line data',
+                                                '[200]:: some match A  line data',
+                                                '[300]:: some match A B line data',
+                                                '[400]:: some match A  line data',
+                                                '[500]:: some match A  line data',
+                                            ]);
+                                            expect(result.map((i) => i.pos)).toEqual([
+                                                0, // 0
+                                                1, // 1
+                                                2, // 2
+                                                3, // 3
+                                                4, // 4
+                                                5, // 5
+                                                100, // 6
+                                                200, // 7
+                                                300, // 8
+                                                400, // 9
+                                                500, // 10
+                                            ]);
                                             comps.search
-                                                .grab(0, 11)
-                                                .then((result: IGrabbedElement[]) => {
-                                                    expect(result.map((i) => i.content)).toEqual([
-                                                        '[0]:: some match A B line data',
-                                                        '[1]:: some match A  line data',
-                                                        '[2]:: some match A  line data',
-                                                        '[3]:: some match A  line data',
-                                                        '[4]:: some match A  line data',
-                                                        '[5]:: some match A  line data',
-                                                        '[100]:: some match A  line data',
-                                                        '[200]:: some match A  line data',
-                                                        '[300]:: some match A B line data',
-                                                        '[400]:: some match A  line data',
-                                                        '[500]:: some match A  line data',
-                                                    ]);
-                                                    expect(result.map((i) => i.position)).toEqual([
-                                                        0, // 0
-                                                        1, // 1
-                                                        2, // 2
-                                                        3, // 3
-                                                        4, // 4
-                                                        5, // 5
-                                                        100, // 6
-                                                        200, // 7
-                                                        300, // 8
-                                                        400, // 9
-                                                        500, // 10
-                                                    ]);
-                                                    comps.search
-                                                        .len()
-                                                        .then((len: number) => {
-                                                            expect(len).toEqual(11);
-                                                            expect(searchStreamUpdated).toEqual(
-                                                                true,
-                                                            );
-                                                            finish(comps.session, done);
-                                                        })
-                                                        .catch((err: Error) => {
-                                                            finish(comps.session, done, err);
-                                                        });
+                                                .len()
+                                                .then((len: number) => {
+                                                    expect(len).toEqual(11);
+                                                    expect(searchStreamUpdated).toEqual(true);
+                                                    finish(comps.session, done);
                                                 })
                                                 .catch((err: Error) => {
-                                                    finish(
-                                                        comps.session,
-                                                        done,
-                                                        new Error(
-                                                            `Fail to grab data due error: ${
-                                                                err instanceof Error
-                                                                    ? err.message
-                                                                    : err
-                                                            }`,
-                                                        ),
-                                                    );
+                                                    finish(comps.session, done, err);
                                                 });
                                         })
-                                        .catch(finish.bind(null, comps.session, done));
+                                        .catch((err: Error) => {
+                                            finish(
+                                                comps.session,
+                                                done,
+                                                new Error(
+                                                    `Fail to grab data due error: ${
+                                                        err instanceof Error ? err.message : err
+                                                    }`,
+                                                ),
+                                            );
+                                        });
                                 })
                                 .catch(finish.bind(null, comps.session, done));
                         })
                         .catch(finish.bind(null, comps.session, done));
-                    let searchStreamUpdated = false;
-                    comps.events.SearchUpdated.subscribe((event) => {
-                        searchStreamUpdated = true;
-                    });
+                })
+                .catch(finish.bind(null, comps.session, done));
+            let searchStreamUpdated = false;
+            comps.events.SearchUpdated.subscribe((event) => {
+                searchStreamUpdated = true;
+            });
         });
     });
 
     it(config.regular.list[7], function () {
         return runners.withSession(config.regular, 7, async (logger, done, comps) => {
-                    const tmpobj = createSampleFile(
-                        5000,
-                        logger,
-                        (i: number) =>
-                            `[${i}]:: ${
-                                i % 100 === 0 || i <= 5
-                                    ? `some match ${
-                                          i % 6 === 0
-                                              ? 'B'
-                                              : i % 4 === 0
-                                              ? 'C'
-                                              : i % 3 === 0
-                                              ? 'D'
-                                              : 'A'
-                                      } line data\n`
-                                    : `some line data\n`
-                            }`,
-                    );
-                    comps.stream
-                        .observe(
-                            new Factory.File()
-                                .asText()
-                                .type(Factory.FileType.Text)
-                                .file(tmpobj.name)
-                                .get()
-                                .sterilized(),
-                        )
-                        .on('processing', () => {
-                            const calls = ['match A', 'match D', 'match C', 'match B'];
-                            let canceled = 0;
-                            calls.forEach((filter) => {
+            const tmpobj = createSampleFile(
+                5000,
+                logger,
+                (i: number) =>
+                    `[${i}]:: ${
+                        i % 100 === 0 || i <= 5
+                            ? `some match ${
+                                  i % 6 === 0 ? 'B' : i % 4 === 0 ? 'C' : i % 3 === 0 ? 'D' : 'A'
+                              } line data\n`
+                            : `some line data\n`
+                    }`,
+            );
+            comps.stream
+                .observe(
+                    new Factory.File()
+                        .asText()
+                        .type(Factory.FileType.Text)
+                        .file(tmpobj.name)
+                        .get()
+                        .sterilized(),
+                )
+                .on('processing', () => {
+                    const calls = ['match A', 'match D', 'match C', 'match B'];
+                    let canceled = 0;
+                    calls.forEach((filter) => {
+                        comps.search
+                            .search([
+                                {
+                                    filter,
+                                    flags: { reg: true, word: false, cases: false },
+                                },
+                            ])
+                            .then((_) => {
+                                expect(canceled).toEqual(3);
                                 comps.search
-                                    .search([
-                                        {
-                                            filter,
-                                            flags: { reg: true, word: false, cases: false },
-                                        },
-                                    ])
-                                    .then((_) => {
-                                        expect(canceled).toEqual(3);
+                                    .grab(0, 16)
+                                    .then((result: GrabbedElement[]) => {
+                                        expect(result.map((i) => i.content)).toEqual([
+                                            '[0]:: some match B line data',
+                                            '[300]:: some match B line data',
+                                            '[600]:: some match B line data',
+                                            '[900]:: some match B line data',
+                                            '[1200]:: some match B line data',
+                                            '[1500]:: some match B line data',
+                                            '[1800]:: some match B line data',
+                                            '[2100]:: some match B line data',
+                                            '[2400]:: some match B line data',
+                                            '[2700]:: some match B line data',
+                                            '[3000]:: some match B line data',
+                                            '[3300]:: some match B line data',
+                                            '[3600]:: some match B line data',
+                                            '[3900]:: some match B line data',
+                                            '[4200]:: some match B line data',
+                                            '[4500]:: some match B line data',
+                                        ]);
+                                        expect(result.map((i) => i.pos)).toEqual([
+                                            0, 300, 600, 900, 1200, 1500, 1800, 2100, 2400, 2700,
+                                            3000, 3300, 3600, 3900, 4200, 4500,
+                                        ]);
                                         comps.search
-                                            .grab(0, 16)
-                                            .then((result: IGrabbedElement[]) => {
-                                                expect(result.map((i) => i.content)).toEqual([
-                                                    '[0]:: some match B line data',
-                                                    '[300]:: some match B line data',
-                                                    '[600]:: some match B line data',
-                                                    '[900]:: some match B line data',
-                                                    '[1200]:: some match B line data',
-                                                    '[1500]:: some match B line data',
-                                                    '[1800]:: some match B line data',
-                                                    '[2100]:: some match B line data',
-                                                    '[2400]:: some match B line data',
-                                                    '[2700]:: some match B line data',
-                                                    '[3000]:: some match B line data',
-                                                    '[3300]:: some match B line data',
-                                                    '[3600]:: some match B line data',
-                                                    '[3900]:: some match B line data',
-                                                    '[4200]:: some match B line data',
-                                                    '[4500]:: some match B line data',
-                                                ]);
-                                                expect(result.map((i) => i.position)).toEqual([
-                                                    0, 300, 600, 900, 1200, 1500, 1800, 2100, 2400,
-                                                    2700, 3000, 3300, 3600, 3900, 4200, 4500,
-                                                ]);
-                                                comps.search
-                                                    .len()
-                                                    .then((len: number) => {
-                                                        expect(len).toEqual(17);
-                                                        expect(searchStreamUpdated).toEqual(true);
-                                                        finish(comps.session, done);
-                                                    })
-                                                    .catch((err: Error) => {
-                                                        finish(comps.session, done, err);
-                                                    });
+                                            .len()
+                                            .then((len: number) => {
+                                                expect(len).toEqual(17);
+                                                expect(searchStreamUpdated).toEqual(true);
+                                                finish(comps.session, done);
                                             })
                                             .catch((err: Error) => {
-                                                finish(
-                                                    comps.session,
-                                                    done,
-                                                    new Error(
-                                                        `Fail to grab data due error: ${
-                                                            err instanceof Error ? err.message : err
-                                                        }`,
-                                                    ),
-                                                );
+                                                finish(comps.session, done, err);
                                             });
                                     })
-                                    .canceled(() => {
-                                        canceled += 1;
-                                    })
                                     .catch((err: Error) => {
-                                        finish(comps.session, done);
+                                        finish(
+                                            comps.session,
+                                            done,
+                                            new Error(
+                                                `Fail to grab data due error: ${
+                                                    err instanceof Error ? err.message : err
+                                                }`,
+                                            ),
+                                        );
                                     });
+                            })
+                            .canceled(() => {
+                                canceled += 1;
+                            })
+                            .catch((err: Error) => {
+                                finish(comps.session, done);
                             });
-                        })
-                        .catch(finish.bind(null, comps.session, done));
-                    let searchStreamUpdated = false;
-                    comps.events.SearchUpdated.subscribe((event) => {
-                        searchStreamUpdated = true;
                     });
+                })
+                .catch(finish.bind(null, comps.session, done));
+            let searchStreamUpdated = false;
+            comps.events.SearchUpdated.subscribe((event) => {
+                searchStreamUpdated = true;
+            });
         });
     });
 });
diff --git a/application/apps/rustcore/ts-bindings/spec/session.stream.spec.ts b/application/apps/rustcore/ts-bindings/spec/session.stream.spec.ts
index b4a2c4ffe8..231faa0e57 100644
--- a/application/apps/rustcore/ts-bindings/spec/session.stream.spec.ts
+++ b/application/apps/rustcore/ts-bindings/spec/session.stream.spec.ts
@@ -6,7 +6,7 @@
 import { initLogger } from './logger';
 initLogger();
 import { Factory } from '../src/api/session';
-import { IGrabbedElement } from 'platform/types/content';
+import { GrabbedElement } from 'platform/types/bindings';
 import { createSampleFile, finish } from './common';
 import { readConfigurationFile } from './config';
 import { utils } from 'platform/log';
@@ -58,7 +58,7 @@ if (process.platform === 'win32') {
                     }
                     comps.stream
                         .grab(500, 7)
-                        .then((result: IGrabbedElement[]) => {
+                        .then((result: GrabbedElement[]) => {
                             logger.debug('result of grab was: ' + JSON.stringify(result));
                             expect(result.map((i) => i.content)).toEqual([
                                 'some line data: 500',
@@ -224,7 +224,7 @@ if (process.platform === 'win32') {
                     }
                     comps.stream
                         .grab(500, 7)
-                        .then((result: IGrabbedElement[]) => {
+                        .then((result: GrabbedElement[]) => {
                             logger.debug('result of grab was: ' + JSON.stringify(result));
                             expect(result.map((i) => i.content)).toEqual([
                                 'some line data: 500',
@@ -451,14 +451,14 @@ if (process.platform === 'win32') {
                     }
                     comps.stream
                         .grab(0, 4)
-                        .then((result: IGrabbedElement[]) => {
+                        .then((result: GrabbedElement[]) => {
                             logger.debug('result of grab was: ' + JSON.stringify(result));
                             expect(
                                 result
                                     .map((i) => i.source_id)
                                     .reduce((partialSum, a) => partialSum + a, 0),
                             ).toBe(2);
-                            expect(result.map((i) => i.position)).toEqual([0, 1, 2, 3]);
+                            expect(result.map((i) => i.pos)).toEqual([0, 1, 2, 3]);
                             expect(result.filter((i) => i.content === TEST_LINES[0]).length).toBe(
                                 2,
                             );
diff --git a/application/apps/rustcore/ts-bindings/spec/session.values.spec.ts b/application/apps/rustcore/ts-bindings/spec/session.values.spec.ts
index 14baed9e50..8a1e32616f 100644
--- a/application/apps/rustcore/ts-bindings/spec/session.values.spec.ts
+++ b/application/apps/rustcore/ts-bindings/spec/session.values.spec.ts
@@ -17,114 +17,124 @@ const MAX_DATASET_LEN = 65000;
 describe('Values', function () {
     it(config.regular.list[1], function () {
         return runners.withSession(config.regular, 1, async (logger, done, comps) => {
-                let sum = 0;
-                const tmpobj = createSampleFile(5000, logger, (i: number) => {
-                    if (i % 100 === 0 || i <= 5) {
-                        sum += i;
-                        return `[${i}]:: some data CPU=${i}% line data\n`;
-                    } else {
-                        return `[${i}]:: some line data\n`;
-                    }
-                });
-                comps.stream
-                    .observe(
-                        new Factory.File()
-                            .asText()
-                            .type(Factory.FileType.Text)
-                            .file(tmpobj.name)
-                            .get()
-                            .sterilized(),
-                    )
-                    .on('processing', () => {
-                        comps.search.values([`CPU=(\\d{1,})`]).catch(finish.bind(null, comps.session, done));
+            let sum = 0;
+            const tmpobj = createSampleFile(5000, logger, (i: number) => {
+                if (i % 100 === 0 || i <= 5) {
+                    sum += i;
+                    return `[${i}]:: some data CPU=${i}% line data\n`;
+                } else {
+                    return `[${i}]:: some line data\n`;
+                }
+            });
+            comps.stream
+                .observe(
+                    new Factory.File()
+                        .asText()
+                        .type(Factory.FileType.Text)
+                        .file(tmpobj.name)
+                        .get()
+                        .sterilized(),
+                )
+                .on('processing', () => {
+                    comps.search
+                        .values([`CPU=(\\d{1,})`])
+                        .catch(finish.bind(null, comps.session, done));
+                })
+                .catch(finish.bind(null, comps.session, done));
+            comps.events.SearchValuesUpdated.subscribe((map) => {
+                if (map === null) {
+                    // Before get results rustcore should inform FE about dropping results.
+                    return;
+                }
+                comps.search
+                    .getValues(MAX_DATASET_LEN)
+                    .then((data) => {
+                        let control = 0;
+                        data.forEach((points, _k) => {
+                            points.forEach((p) => {
+                                control += p.y_value;
+                            });
+                        });
+                        expect(control).toEqual(sum);
+                        finish(comps.session, done);
                     })
                     .catch(finish.bind(null, comps.session, done));
-                comps.events.SearchValuesUpdated.subscribe((map) => {
-                    if (map === null) {
-                        // Before get results rustcore should inform FE about dropping results.
-                        return;
-                    }
+            });
+        });
+    });
+    it(config.regular.list[2], function () {
+        return runners.withSession(config.regular, 2, async (logger, done, comps) => {
+            let sum = 0;
+            const tmpobj = createSampleFile(5000, logger, (i: number) => {
+                if (i % 100 === 0 || i <= 5) {
+                    sum += i;
+                    return `[${i}]:: some data CPU=${i}% line data\n`;
+                } else {
+                    return `[${i}]:: some line data\n`;
+                }
+            });
+            let iteration = 0;
+            comps.stream
+                .observe(
+                    new Factory.File()
+                        .asText()
+                        .type(Factory.FileType.Text)
+                        .file(tmpobj.name)
+                        .get()
+                        .sterilized(),
+                )
+                .on('processing', () => {
+                    comps.search
+                        .values([`CPU=(\\d{1,})`])
+                        .catch(finish.bind(null, comps.session, done));
+                })
+                .catch(finish.bind(null, comps.session, done));
+            comps.events.SearchValuesUpdated.subscribe((map) => {
+                if (map === null) {
+                    // Before get results rustcore should inform FE about dropping results.
+                    return;
+                }
+                if (iteration === 0) {
                     comps.search
                         .getValues(MAX_DATASET_LEN)
                         .then((data) => {
                             let control = 0;
-                            data[0].forEach((pair) => {
-                                control += pair[3];
+                            data.forEach((points, _k) => {
+                                points.forEach((p) => {
+                                    control += p.y_value;
+                                });
                             });
                             expect(control).toEqual(sum);
-                            finish(comps.session, done);
+                            const offset = 5000;
+                            appendToSampleFile(tmpobj, 5000, logger, (i: number) => {
+                                if (i % 100 === 0 || i <= 5) {
+                                    sum += i + offset;
+                                    return `[${i}]:: some data CPU=${i + offset}% line data\n`;
+                                } else {
+                                    return `[${i}]:: some line data\n`;
+                                }
+                            });
                         })
                         .catch(finish.bind(null, comps.session, done));
-                });
-        });
-    });
-    it(config.regular.list[2], function () {
-        return runners.withSession(config.regular, 2, async (logger, done, comps) => {
-                let sum = 0;
-                const tmpobj = createSampleFile(5000, logger, (i: number) => {
-                    if (i % 100 === 0 || i <= 5) {
-                        sum += i;
-                        return `[${i}]:: some data CPU=${i}% line data\n`;
-                    } else {
-                        return `[${i}]:: some line data\n`;
-                    }
-                });
-                let iteration = 0;
-                comps.stream
-                    .observe(
-                        new Factory.File()
-                            .asText()
-                            .type(Factory.FileType.Text)
-                            .file(tmpobj.name)
-                            .get()
-                            .sterilized(),
-                    )
-                    .on('processing', () => {
-                        comps.search.values([`CPU=(\\d{1,})`]).catch(finish.bind(null, comps.session, done));
-                    })
-                    .catch(finish.bind(null, comps.session, done));
-                comps.events.SearchValuesUpdated.subscribe((map) => {
-                    if (map === null) {
-                        // Before get results rustcore should inform FE about dropping results.
-                        return;
-                    }
-                    if (iteration === 0) {
-                        comps.search
-                            .getValues(MAX_DATASET_LEN)
-                            .then((data) => {
-                                let control = 0;
-                                data[0].forEach((pair) => {
-                                    control += pair[3];
-                                });
-                                expect(control).toEqual(sum);
-                                const offset = 5000;
-                                appendToSampleFile(tmpobj, 5000, logger, (i: number) => {
-                                    if (i % 100 === 0 || i <= 5) {
-                                        sum += i + offset;
-                                        return `[${i}]:: some data CPU=${i + offset}% line data\n`;
-                                    } else {
-                                        return `[${i}]:: some line data\n`;
-                                    }
-                                });
-                            })
-                            .catch(finish.bind(null, comps.session, done));
-                        iteration += 1;
-                    } else if (iteration === 1) {
-                        comps.search
-                            .getValues(MAX_DATASET_LEN)
-                            .then((data) => {
-                                let control = 0;
-                                data[0].forEach((pair) => {
-                                    control += pair[3];
+                    iteration += 1;
+                } else if (iteration === 1) {
+                    comps.search
+                        .getValues(MAX_DATASET_LEN)
+                        .then((data) => {
+                            let control = 0;
+                            data.forEach((points, _k) => {
+                                points.forEach((p) => {
+                                    control += p.y_value;
                                 });
-                                expect(control).toEqual(sum);
-                                finish(comps.session, done);
-                            })
-                            .catch(finish.bind(null, comps.session, done));
-                    } else {
-                        expect(iteration).toEqual(1);
-                    }
-                });
+                            });
+                            expect(control).toEqual(sum);
+                            finish(comps.session, done);
+                        })
+                        .catch(finish.bind(null, comps.session, done));
+                } else {
+                    expect(iteration).toEqual(1);
+                }
+            });
         });
     });
 });
diff --git a/application/apps/rustcore/ts-bindings/src/api/executors/session.sleep.executor.ts b/application/apps/rustcore/ts-bindings/src/api/executors/session.sleep.executor.ts
index 4c01d06a65..020793adee 100644
--- a/application/apps/rustcore/ts-bindings/src/api/executors/session.sleep.executor.ts
+++ b/application/apps/rustcore/ts-bindings/src/api/executors/session.sleep.executor.ts
@@ -2,6 +2,8 @@ import { TExecutor, Logger, CancelablePromise, AsyncResultsExecutor } from './ex
 import { RustSession } from '../../native/native.session';
 import { EventProvider } from '../../api/session.provider';
 
+import * as protocol from 'protocol';
+
 export interface IExecuteSleepOptions {
     duration: number;
     ignoreCancellation: boolean;
@@ -28,9 +30,13 @@ export const executor: TExecutor<ISleepResults, IExecuteSleepOptions> = (
         ): Promise<void> {
             return session.sleep(operationUuid, options.duration, options.ignoreCancellation);
         },
-        function (data: any, resolve: (res: ISleepResults) => void, reject: (err: Error) => void) {
+        function (
+            data: Uint8Array,
+            resolve: (res: ISleepResults) => void,
+            reject: (err: Error) => void,
+        ) {
             try {
-                const result: ISleepResults = JSON.parse(data);
+                const result: ISleepResults = protocol.decodeResultSleep(data);
                 resolve(result);
             } catch (e) {
                 return reject(
diff --git a/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.export.executor.ts b/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.export.executor.ts
index 6950ae5f54..06201f131c 100644
--- a/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.export.executor.ts
+++ b/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.export.executor.ts
@@ -4,6 +4,8 @@ import { EventProvider } from '../../api/session.provider';
 import { IRange } from 'platform/types/range';
 import { TextExportOptions } from 'platform/types/exporting';
 
+import * as protocol from 'protocol';
+
 export interface Options {
     dest: string;
     ranges: IRange[];
@@ -24,16 +26,20 @@ export const executor: TExecutor<boolean, Options> = (
         function (session: RustSession, opt: Options, operationUuid: string): Promise<void> {
             return session.export(opt.dest, opt.ranges, opt.opt, operationUuid);
         },
-        function (data: any, resolve: (done: boolean) => void, reject: (err: Error) => void) {
-            data = data === 'true' ? true : data === 'false' ? false : data;
-            if (typeof data !== 'boolean') {
+        function (
+            data: Uint8Array,
+            resolve: (done: boolean) => void,
+            reject: (err: Error) => void,
+        ) {
+            const result: boolean = protocol.decodeResultBool(data);
+            if (typeof result !== 'boolean') {
                 return reject(
                     new Error(
                         `Fail to parse export results. Invalid format. Expecting valid { boolean }; gotten: ${typeof data}`,
                     ),
                 );
             }
-            resolve(data);
+            resolve(result);
         },
         'exporting',
     );
diff --git a/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.export_raw.executor.ts b/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.export_raw.executor.ts
index faf2d67b88..e2c3027f35 100644
--- a/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.export_raw.executor.ts
+++ b/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.export_raw.executor.ts
@@ -3,6 +3,8 @@ import { RustSession } from '../../native/native.session';
 import { EventProvider } from '../session.provider';
 import { IRange } from 'platform/types/range';
 
+import * as protocol from 'protocol';
+
 export interface Options {
     dest: string;
     ranges: IRange[];
@@ -22,16 +24,20 @@ export const executor: TExecutor<boolean, Options> = (
         function (session: RustSession, opt: Options, operationUuid: string): Promise<void> {
             return session.exportRaw(opt.dest, opt.ranges, operationUuid);
         },
-        function (data: any, resolve: (done: boolean) => void, reject: (err: Error) => void) {
-            data = data === 'true' ? true : data === 'false' ? false : data;
-            if (typeof data !== 'boolean') {
+        function (
+            data: Uint8Array,
+            resolve: (done: boolean) => void,
+            reject: (err: Error) => void,
+        ) {
+            const result: boolean = protocol.decodeResultBool(data);
+            if (typeof result !== 'boolean') {
                 return reject(
                     new Error(
                         `Fail to parse export results. Invalid format. Expecting valid { boolean }; gotten: ${typeof data}`,
                     ),
                 );
             }
-            resolve(data);
+            resolve(result);
         },
         'exporting',
     );
diff --git a/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.extract.executor.ts b/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.extract.executor.ts
index 6d933034b0..4d2a5fcdbc 100644
--- a/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.extract.executor.ts
+++ b/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.extract.executor.ts
@@ -10,6 +10,8 @@ import {
     IExtractedValue,
 } from 'platform/types/filter';
 
+import * as protocol from 'protocol';
+
 export const executor: TExecutor<TExtractedValues, IFilter[]> = (
     session: RustSession,
     provider: EventProvider,
@@ -25,12 +27,12 @@ export const executor: TExecutor<TExtractedValues, IFilter[]> = (
             return session.extractMatchesValues(filters, operationUuid);
         },
         function (
-            data: any,
+            data: Uint8Array,
             resolve: (res: TExtractedValues) => void,
             reject: (err: Error) => void,
         ) {
             try {
-                const src: TExtractedValuesSrc = JSON.parse(data);
+                const src: TExtractedValuesSrc = protocol.decodeResultExtractedMatchValues(data);
                 if (!(src instanceof Array)) {
                     return reject(
                         new Error(
diff --git a/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.get_values.executor.ts b/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.get_values.executor.ts
index 198b9ff473..b326764869 100644
--- a/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.get_values.executor.ts
+++ b/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.get_values.executor.ts
@@ -1,8 +1,10 @@
 import { TExecutor, Logger, CancelablePromise, AsyncResultsExecutor } from './executor';
 import { RustSession } from '../../native/native.session';
 import { EventProvider } from '../session.provider';
-import { IValuesMap } from 'platform/types/filter';
 import { error } from 'platform/log/utils';
+import { ResultSearchValues } from 'platform/types/bindings';
+
+import * as protocol from 'protocol';
 
 export interface IOptions {
     datasetLength: number;
@@ -10,13 +12,13 @@ export interface IOptions {
     to?: number;
 }
 
-export const executor: TExecutor<IValuesMap, IOptions> = (
+export const executor: TExecutor<ResultSearchValues, IOptions> = (
     session: RustSession,
     provider: EventProvider,
     logger: Logger,
     options: IOptions,
-): CancelablePromise<IValuesMap> => {
-    return AsyncResultsExecutor<IValuesMap, IOptions>(
+): CancelablePromise<ResultSearchValues> => {
+    return AsyncResultsExecutor<ResultSearchValues, IOptions>(
         session,
         provider,
         logger,
@@ -43,19 +45,14 @@ export const executor: TExecutor<IValuesMap, IOptions> = (
                     .catch(reject);
             });
         },
-        function (data: any, resolve: (r: IValuesMap) => void, reject: (e: Error) => void) {
+        function (
+            data: Uint8Array,
+            resolve: (r: ResultSearchValues) => void,
+            reject: (e: Error) => void,
+        ) {
             try {
-                if (typeof data === 'string') {
-                    data = JSON.parse(data);
-                }
-                if (typeof data !== 'object') {
-                    return reject(
-                        new Error(
-                            `Fail to parse values object. Invalid format. Expecting IValuesMap.`,
-                        ),
-                    );
-                }
-                resolve(data as IValuesMap);
+                const map: ResultSearchValues = protocol.decodeResultSearchValues(data);
+                resolve(map);
             } catch (e) {
                 reject(new Error(error(e)));
             }
diff --git a/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.map.executor.ts b/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.map.executor.ts
index bfd2623c4a..c8841a83b0 100644
--- a/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.map.executor.ts
+++ b/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.map.executor.ts
@@ -3,6 +3,8 @@ import { RustSession } from '../../native/native.session';
 import { EventProvider } from '../../api/session.provider';
 import { ISearchMap } from 'platform/types/filter';
 
+import * as protocol from 'protocol';
+
 export interface IOptions {
     datasetLength: number;
     from?: number;
@@ -42,9 +44,13 @@ export const executor: TExecutor<ISearchMap, IOptions> = (
                     .catch(reject);
             });
         },
-        function (data: any, resolve: (res: ISearchMap) => void, reject: (err: Error) => void) {
+        function (
+            data: Uint8Array,
+            resolve: (res: ISearchMap) => void,
+            reject: (err: Error) => void,
+        ) {
             try {
-                const result: ISearchMap = JSON.parse(data);
+                const result: ISearchMap = protocol.decodeResultScaledDistribution(data);
                 if (!(result instanceof Array)) {
                     return reject(
                         new Error(
diff --git a/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.nearest.executor.ts b/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.nearest.executor.ts
index 35bf56e490..51b4206364 100644
--- a/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.nearest.executor.ts
+++ b/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.nearest.executor.ts
@@ -1,19 +1,21 @@
 import { TExecutor, Logger, CancelablePromise, AsyncResultsExecutor } from './executor';
 import { RustSession } from '../../native/native.session';
 import { EventProvider } from '../../api/session.provider';
-import { INearest } from 'platform/types/filter';
+import { NearestPosition, ResultNearestPosition } from 'platform/types/bindings';
+
+import * as protocol from 'protocol';
 
 export interface IExecuteNearestOptions {
     positionInStream: number;
 }
 
-export const executor: TExecutor<INearest | undefined, IExecuteNearestOptions> = (
+export const executor: TExecutor<NearestPosition | undefined, IExecuteNearestOptions> = (
     session: RustSession,
     provider: EventProvider,
     logger: Logger,
     options: IExecuteNearestOptions,
-): CancelablePromise<INearest | undefined> => {
-    return AsyncResultsExecutor<INearest | undefined, IExecuteNearestOptions>(
+): CancelablePromise<NearestPosition | undefined> => {
+    return AsyncResultsExecutor<NearestPosition | undefined, IExecuteNearestOptions>(
         session,
         provider,
         logger,
@@ -26,15 +28,12 @@ export const executor: TExecutor<INearest | undefined, IExecuteNearestOptions> =
             return session.getNearestTo(operationUuid, options.positionInStream);
         },
         function (
-            data: any,
-            resolve: (res: INearest | undefined) => void,
+            data: Uint8Array,
+            resolve: (res: NearestPosition | undefined) => void,
             reject: (err: Error) => void,
         ) {
-            if (typeof data === 'string' && data.trim().length === 0) {
-                return resolve(undefined);
-            }
             try {
-                const result: INearest | undefined | null = JSON.parse(data);
+                const result: ResultNearestPosition = protocol.decodeResultNearestPosition(data);
                 resolve(result === null ? undefined : result);
             } catch (e) {
                 return reject(
diff --git a/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.search.executor.ts b/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.search.executor.ts
index 878af841fb..2feadac343 100644
--- a/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.search.executor.ts
+++ b/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.search.executor.ts
@@ -3,6 +3,8 @@ import { RustSession } from '../../native/native.session';
 import { EventProvider } from '../../api/session.provider';
 import { IFilter } from 'platform/types/filter';
 
+import * as protocol from 'protocol';
+
 export const executor: TExecutor<number, IFilter[]> = (
     session: RustSession,
     provider: EventProvider,
@@ -17,8 +19,12 @@ export const executor: TExecutor<number, IFilter[]> = (
         function (session: RustSession, filters: IFilter[], operationUuid: string): Promise<void> {
             return session.search(filters, operationUuid);
         },
-        function (data: any, resolve: (found: number) => void, reject: (err: Error) => void) {
-            const found = parseInt(data, 10);
+        function (
+            data: Uint8Array,
+            resolve: (found: number) => void,
+            reject: (err: Error) => void,
+        ) {
+            const found: number = protocol.decodeResultU64(data);
             if (typeof found !== 'number' || isNaN(found) || !isFinite(found)) {
                 return reject(
                     new Error(
diff --git a/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.searchvalues.executor.ts b/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.searchvalues.executor.ts
index 860c988067..e00766be98 100644
--- a/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.searchvalues.executor.ts
+++ b/application/apps/rustcore/ts-bindings/src/api/executors/session.stream.searchvalues.executor.ts
@@ -16,7 +16,7 @@ export const executor: TExecutor<void, string[]> = (
         function (session: RustSession, filters: string[], operationUuid: string): Promise<void> {
             return session.searchValues(filters, operationUuid);
         },
-        function (_data: any, resolve: () => void, _reject: (err: Error) => void) {
+        function (_data: Uint8Array, resolve: () => void, _reject: (err: Error) => void) {
             resolve();
         },
         'search_values',
diff --git a/application/apps/rustcore/ts-bindings/src/api/jobs.ts b/application/apps/rustcore/ts-bindings/src/api/jobs.ts
index 02d8456685..485b5ece93 100644
--- a/application/apps/rustcore/ts-bindings/src/api/jobs.ts
+++ b/application/apps/rustcore/ts-bindings/src/api/jobs.ts
@@ -1,10 +1,17 @@
 import { CancelablePromise } from 'platform/env/promise';
-import { Base } from '../native/native.jobs';
+import { Base, Cancelled, decode } from '../native/native.jobs';
 import { error } from 'platform/log/utils';
 import { IFilter } from 'platform/types/filter';
-import { ShellProfile } from 'platform/types/shells';
 import { SomeipStatistic } from 'platform/types/observe/parser/someip';
-import { StatisticInfo } from 'platform/types/observe/parser/dlt';
+import {
+    FoldersScanningResult,
+    DltStatisticInfo,
+    Profile,
+    ProfileList,
+    MapKeyValue,
+} from 'platform/types/bindings';
+
+import * as protocol from 'protocol';
 
 export class Jobs extends Base {
     public static async create(): Promise<Jobs> {
@@ -20,10 +27,8 @@ export class Jobs extends Base {
             // We should define validation callback. As argument it takes result of job,
             // which should be checked for type. In case it type is correct, callback
             // should return true
-            (res: number): number | Error => {
-                return typeof res === 'number'
-                    ? res
-                    : new Error(`jobCancelTest should return number type`);
+            (buf: Uint8Array): number | Error => {
+                return decode<number>(buf, protocol.decodeCommandOutcomeWithi64);
             },
             // As second argument of executor we should provide native function of job.
             this.native.jobCancelTest(sequence, num_a, num_b),
@@ -40,16 +45,19 @@ export class Jobs extends Base {
         max: number;
         paths: string[];
         include: { files: boolean; folders: boolean };
-    }): CancelablePromise<string> {
+    }): CancelablePromise<FoldersScanningResult> {
         const sequence = this.sequence();
-        const job: CancelablePromise<string> = this.execute(
-            (res: string): any | Error => {
-                if (typeof res !== 'string') {
-                    return new Error(
-                        `[jobs.listContent] Expecting string, but gotten: ${typeof res}`,
-                    );
+        const job: CancelablePromise<FoldersScanningResult> = this.execute(
+            (buf: Uint8Array): any | Error => {
+                const output = decode<FoldersScanningResult>(
+                    buf,
+                    protocol.decodeCommandOutcomeWithFoldersScanningResult,
+                );
+                if (output instanceof Error || output instanceof Cancelled) {
+                    return output;
+                } else {
+                    return output;
                 }
-                return res;
             },
             this.native.listFolderContent(
                 sequence,
@@ -65,23 +73,15 @@ export class Jobs extends Base {
         return job;
     }
 
-    public isFileBinary(options: {
-        filePath: string,
-    }): CancelablePromise<boolean> {
+    public isFileBinary(options: { filePath: string }): CancelablePromise<boolean> {
         const sequence = this.sequence();
         const job: CancelablePromise<boolean> = this.execute(
-            (res: boolean): any | Error => {
-                if (typeof res !== 'boolean') {
-                    return new Error(`[jobs.isFileBinary] Expecting boolean, but got: ${typeof res}`);
-                }
-                return res;
+            (buf: Uint8Array): boolean | Error => {
+                return decode<boolean>(buf, protocol.decodeCommandOutcomeWithbool);
             },
-            this.native.isFileBinary(
-                sequence,
-                options.filePath
-            ),
+            this.native.isFileBinary(sequence, options.filePath),
             sequence,
-            'isFileBinary'
+            'isFileBinary',
         );
         return job;
     }
@@ -89,7 +89,9 @@ export class Jobs extends Base {
     public spawnProcess(path: string, args: string[]): CancelablePromise<void> {
         const sequence = this.sequence();
         const job: CancelablePromise<void> = this.execute(
-            undefined,
+            (buf: Uint8Array): void | Error => {
+                return decode<void>(buf, protocol.decodeCommandOutcomeWithVoid);
+            },
             this.native.spawnProcess(sequence, path, args),
             sequence,
             'spawnProcess',
@@ -100,10 +102,8 @@ export class Jobs extends Base {
     public getFileChecksum(path: string): CancelablePromise<string> {
         const sequence = this.sequence();
         const job: CancelablePromise<string> = this.execute(
-            (res: string): any | Error => {
-                return typeof res === 'string'
-                    ? res
-                    : new Error(`getFileChecksum should return string type`);
+            (buf: Uint8Array): string | Error => {
+                return decode<string>(buf, protocol.decodeCommandOutcomeWithString);
             },
             this.native.getFileChecksum(sequence, path),
             sequence,
@@ -112,15 +112,18 @@ export class Jobs extends Base {
         return job;
     }
 
-    public getDltStats(paths: string[]): CancelablePromise<StatisticInfo> {
+    public getDltStats(paths: string[]): CancelablePromise<DltStatisticInfo> {
         const sequence = this.sequence();
-        const job: CancelablePromise<StatisticInfo> = this.execute(
-            (res: string): StatisticInfo | Error => {
-                try {
-                    return JSON.parse(res) as StatisticInfo;
-                } catch (e) {
-                    return new Error(error(e));
+        const job: CancelablePromise<DltStatisticInfo> = this.execute(
+            (buf: Uint8Array): any | Error => {
+                const decoded = decode<DltStatisticInfo>(
+                    buf,
+                    protocol.decodeCommandOutcomeWithDltStatisticInfo,
+                );
+                if (decoded instanceof Error) {
+                    return decoded;
                 }
+                return decoded;
             },
             this.native.getDltStats(sequence, paths),
             sequence,
@@ -132,9 +135,13 @@ export class Jobs extends Base {
     public getSomeipStatistic(paths: string[]): CancelablePromise<SomeipStatistic> {
         const sequence = this.sequence();
         const job: CancelablePromise<SomeipStatistic> = this.execute(
-            (res: string): SomeipStatistic | Error => {
+            (buf: Uint8Array): any | Error => {
+                const decoded = decode<string>(buf, protocol.decodeCommandOutcomeWithString);
+                if (decoded instanceof Error) {
+                    return decoded;
+                }
                 try {
-                    return JSON.parse(res) as SomeipStatistic;
+                    return JSON.parse(decoded) as SomeipStatistic;
                 } catch (e) {
                     return new Error(error(e));
                 }
@@ -146,23 +153,12 @@ export class Jobs extends Base {
         return job;
     }
 
-    public getShellProfiles(): CancelablePromise<ShellProfile[]> {
+    public getShellProfiles(): CancelablePromise<Profile[]> {
         const sequence = this.sequence();
-        const job: CancelablePromise<ShellProfile[]> = this.execute(
-            (res: string): ShellProfile[] | Error => {
-                try {
-                    const unparsed: unknown[] = JSON.parse(res);
-                    const profiles: ShellProfile[] = [];
-                    unparsed.forEach((unparsed: unknown) => {
-                        const profile = ShellProfile.fromObj(unparsed);
-                        if (!(profile instanceof Error)) {
-                            profiles.push(profile);
-                        }
-                    });
-                    return profiles;
-                } catch (e) {
-                    return new Error(error(e));
-                }
+        const job: CancelablePromise<Profile[]> = this.execute(
+            (buf: Uint8Array): any | Error => {
+                const decoded = decode<ProfileList>(buf, protocol.decodeCommandOutcomeWithString);
+                return decoded;
             },
             this.native.getShellProfiles(sequence),
             sequence,
@@ -174,24 +170,12 @@ export class Jobs extends Base {
     public getContextEnvvars(): CancelablePromise<Map<string, string>> {
         const sequence = this.sequence();
         const job: CancelablePromise<Map<string, string>> = this.execute(
-            (res: string): Map<string, string> | Error => {
-                try {
-                    const unparsed: { [key: string]: string } = JSON.parse(res);
-                    const envvars: Map<string, string> = new Map();
-                    if (
-                        unparsed === undefined ||
-                        unparsed === null ||
-                        typeof unparsed !== 'object'
-                    ) {
-                        return new Error(`Fail to parse envvars string: ${unparsed}`);
-                    }
-                    Object.keys(unparsed).forEach((key) => {
-                        envvars.set(key, unparsed[key]);
-                    });
-                    return envvars;
-                } catch (e) {
-                    return new Error(error(e));
-                }
+            (buf: Uint8Array): Map<string, string> | Error => {
+                const decoded = decode<MapKeyValue>(
+                    buf,
+                    protocol.decodeCommandOutcomeWithMapKeyValue,
+                );
+                return decoded;
             },
             this.native.getContextEnvvars(sequence),
             sequence,
@@ -203,10 +187,8 @@ export class Jobs extends Base {
     public getSerialPortsList(): CancelablePromise<string[]> {
         const sequence = this.sequence();
         const job: CancelablePromise<string[]> = this.execute(
-            (res: string[]): any | Error => {
-                return res instanceof Array
-                    ? res
-                    : new Error(`getSerialPortsList should return string[] type`);
+            (buf: Uint8Array): string[] | Error => {
+                return decode<string[]>(buf, protocol.decodeCommandOutcomeWithSerialPortsList);
             },
             this.native.getSerialPortsList(sequence),
             sequence,
@@ -218,9 +200,15 @@ export class Jobs extends Base {
     public getRegexError(filter: IFilter): CancelablePromise<string | undefined> {
         const sequence = this.sequence();
         const job: CancelablePromise<string | undefined> = this.execute(
-            (res: string): any | Error => {
-                if (typeof res === 'string' && res.trim() !== '') {
-                    return res;
+            (buf: Uint8Array): any | Error => {
+                const decoded = decode<string | undefined>(
+                    buf,
+                    protocol.decodeCommandOutcomeWithOptionString,
+                );
+                if (decoded instanceof Error) {
+                    return decoded;
+                } else if (typeof decoded === 'string' && decoded.trim() !== '') {
+                    return decoded;
                 } else {
                     return undefined;
                 }
@@ -240,8 +228,8 @@ export class Jobs extends Base {
     public sleep(ms: number): CancelablePromise<undefined> {
         const sequence = this.sequence();
         const job: CancelablePromise<undefined> = this.execute(
-            (_res: undefined): any | Error => {
-                return undefined;
+            (buf: Uint8Array): any | Error => {
+                return decode<void>(buf, protocol.decodeCommandOutcomeWithVoid);
             },
             this.native.sleep(sequence, ms),
             sequence,
diff --git a/application/apps/rustcore/ts-bindings/src/api/session.provider.ts b/application/apps/rustcore/ts-bindings/src/api/session.provider.ts
index 9a0938e240..e561b8cc98 100644
--- a/application/apps/rustcore/ts-bindings/src/api/session.provider.ts
+++ b/application/apps/rustcore/ts-bindings/src/api/session.provider.ts
@@ -2,8 +2,10 @@ import { Subject } from 'platform/env/subscription';
 import { ISearchUpdated } from 'platform/types/filter';
 import { Computation } from '../provider/provider';
 import { EErrorKind, EErrorSeverity } from '../provider/provider.errors';
-import { IMapEntity, IMatchEntity, IValuesMinMaxMap } from 'platform/types/filter';
-import { IAttachment } from 'platform/types/content';
+import { IMapEntity, IMatchEntity, FilterMatch } from 'platform/types/filter';
+import { AttachmentInfo } from 'platform/types/bindings';
+
+import * as protocol from 'protocol';
 
 export interface IProgressState {
     total: number;
@@ -45,15 +47,15 @@ export interface IEventMatchesUpdated {
 
 export interface IAttachmentsUpdatedUpdated {
     len: number;
-    attachment: IAttachment;
+    attachment: AttachmentInfo;
 }
 
 export interface ISessionEvents {
     StreamUpdated: Subject<number>;
     FileRead: Subject<void>;
     SearchUpdated: Subject<ISearchUpdated>;
-    SearchValuesUpdated: Subject<IValuesMinMaxMap | null>;
-    SearchMapUpdated: Subject<string>;
+    SearchValuesUpdated: Subject<Map<number, [number, number]> | null>;
+    SearchMapUpdated: Subject<FilterMatch[]>;
     MapUpdated: Subject<IEventMapUpdated>;
     IndexedMapUpdated: Subject<IEventIndexedMapUpdated>;
     MatchesUpdated: Subject<IEventMatchesUpdated>;
@@ -110,9 +112,9 @@ const SessionEventsSignatures: ISessionEventsSignatures = {
 interface ISessionEventsInterfaces {
     StreamUpdated: { self: 'number' };
     FileRead: { self: null };
-    SearchUpdated: { self: 'object'; found: 'number'; stat: typeof Object };
+    SearchUpdated: { self: 'object'; found: 'number'; stat: typeof Map };
     SearchValuesUpdated: { self: ['object', null] };
-    SearchMapUpdated: { self: ['string', null] };
+    SearchMapUpdated: { self: [typeof Array, null] };
     MapUpdated: { self: 'object'; map: typeof Array };
     IndexedMapUpdated: { self: 'object'; len: 'number' };
     MatchesUpdated: { self: 'object'; matches: typeof Array };
@@ -140,9 +142,9 @@ interface ISessionEventsInterfaces {
 const SessionEventsInterfaces: ISessionEventsInterfaces = {
     StreamUpdated: { self: 'number' },
     FileRead: { self: null },
-    SearchUpdated: { self: 'object', found: 'number', stat: Object },
+    SearchUpdated: { self: 'object', found: 'number', stat: Map },
     SearchValuesUpdated: { self: ['object', null] },
-    SearchMapUpdated: { self: ['string', null] },
+    SearchMapUpdated: { self: [Array, null] },
     MapUpdated: { self: 'object', map: Array },
     IndexedMapUpdated: { self: 'object', len: 'number' },
     MatchesUpdated: { self: 'object', matches: Array },
@@ -176,8 +178,8 @@ export class EventProvider extends Computation<
         StreamUpdated: new Subject<number>(),
         FileRead: new Subject<void>(),
         SearchUpdated: new Subject<ISearchUpdated>(),
-        SearchValuesUpdated: new Subject<IValuesMinMaxMap | null>(),
-        SearchMapUpdated: new Subject<string>(),
+        SearchValuesUpdated: new Subject<Map<number, [number, number]> | null>(),
+        SearchMapUpdated: new Subject<FilterMatch[]>(),
         MapUpdated: new Subject<IEventMapUpdated>(),
         IndexedMapUpdated: new Subject<IEventIndexedMapUpdated>(),
         MatchesUpdated: new Subject<IEventMatchesUpdated>(), // dummy
@@ -194,7 +196,7 @@ export class EventProvider extends Computation<
     private readonly _convertors: ISessionEventsConvertors = {};
 
     constructor(uuid: string) {
-        super(uuid);
+        super(uuid, protocol.decodeCallbackEvent);
     }
 
     public getName(): string {
diff --git a/application/apps/rustcore/ts-bindings/src/api/session.search.ts b/application/apps/rustcore/ts-bindings/src/api/session.search.ts
index 7d900a7d51..4844b9d168 100644
--- a/application/apps/rustcore/ts-bindings/src/api/session.search.ts
+++ b/application/apps/rustcore/ts-bindings/src/api/session.search.ts
@@ -3,8 +3,9 @@ import { scope } from 'platform/env/scope';
 import { RustSession } from '../native/native.session';
 import { ICancelablePromise } from 'platform/env/promise';
 import { EventProvider } from '../api/session.provider';
-import { IGrabbedElement } from 'platform/types/content';
-import { IFilter, ISearchMap, TExtractedValues, INearest, IValuesMap } from 'platform/types/filter';
+import { GrabbedElement } from 'platform/types/bindings/miscellaneous';
+import { IFilter, ISearchMap, TExtractedValues } from 'platform/types/filter';
+import { ResultSearchValues, NearestPosition } from 'platform/types/bindings';
 import { Executors } from './executors/session.stream.executors';
 import { SearchTaskManager } from './executors/single.task.search';
 import { ValuesTaskManager } from './executors/single.task.values';
@@ -48,7 +49,7 @@ export class SessionSearch {
      * @param start { number } - first row number in search result
      * @param len { number } - count of rows, which should be included into chank from @param start
      */
-    public grab(start: number, len: number): Promise<IGrabbedElement[]> {
+    public grab(start: number, len: number): Promise<GrabbedElement[]> {
         return this.session.grabSearchChunk(start, len);
     }
 
@@ -97,7 +98,7 @@ export class SessionSearch {
         datasetLength: number,
         from?: number,
         to?: number,
-    ): ICancelablePromise<IValuesMap> {
+    ): ICancelablePromise<ResultSearchValues> {
         return Executors.values_getter(this.session, this.provider, this.logger, {
             datasetLength,
             from,
@@ -105,7 +106,7 @@ export class SessionSearch {
         });
     }
 
-    public getNearest(positionInStream: number): ICancelablePromise<INearest | undefined> {
+    public getNearest(positionInStream: number): ICancelablePromise<NearestPosition | undefined> {
         return Executors.nearest(this.session, this.provider, this.logger, {
             positionInStream,
         });
diff --git a/application/apps/rustcore/ts-bindings/src/api/session.stream.ts b/application/apps/rustcore/ts-bindings/src/api/session.stream.ts
index 408486e638..08b71a601f 100644
--- a/application/apps/rustcore/ts-bindings/src/api/session.stream.ts
+++ b/application/apps/rustcore/ts-bindings/src/api/session.stream.ts
@@ -6,7 +6,7 @@ import { SdeRequest, SdeResponse } from 'platform/types/sde';
 import { EventProvider } from '../api/session.provider';
 import { Executors } from './executors/session.stream.executors';
 import { EFileOptionsRequirements } from './executors/session.stream.observe.executor';
-import { IGrabbedElement } from 'platform/types/content';
+import { GrabbedElement } from 'platform/types/bindings/miscellaneous';
 import { IRange } from 'platform/types/range';
 import { ISourceLink } from 'platform/types/observe/types';
 import { Attachment, IndexingMode } from 'platform/types/content';
@@ -30,11 +30,11 @@ export class SessionStream {
         return Promise.resolve(undefined);
     }
 
-    public grab(start: number, len: number): Promise<IGrabbedElement[]> {
+    public grab(start: number, len: number): Promise<GrabbedElement[]> {
         return this._session.grabStreamChunk(start, len);
     }
 
-    public grabIndexed(start: number, len: number): Promise<IGrabbedElement[]> {
+    public grabIndexed(start: number, len: number): Promise<GrabbedElement[]> {
         return this._session.grabIndexed(start, len);
     }
 
@@ -69,7 +69,7 @@ export class SessionStream {
         return this._session.expandBreadcrumbs(seporator, offset, above);
     }
 
-    public grabRanges(ranges: IRange[]): Promise<IGrabbedElement[]> {
+    public grabRanges(ranges: IRange[]): Promise<GrabbedElement[]> {
         return this._session.grabStreamRanges(ranges);
     }
 
@@ -86,9 +86,9 @@ export class SessionStream {
     }
 
     public sde(operation: string, request: SdeRequest): Promise<SdeResponse> {
-        return this._session.sendIntoSde(operation, JSON.stringify(request)).then((result) => {
+        return this._session.sendIntoSde(operation, request).then((response) => {
             try {
-                return JSON.parse(result) as SdeResponse;
+                return response;
             } catch (e) {
                 return Promise.reject(new Error(`Fail to parse response`));
             }
diff --git a/application/apps/rustcore/ts-bindings/src/api/tracker.provider.ts b/application/apps/rustcore/ts-bindings/src/api/tracker.provider.ts
index c35f8b6d77..84def0887f 100644
--- a/application/apps/rustcore/ts-bindings/src/api/tracker.provider.ts
+++ b/application/apps/rustcore/ts-bindings/src/api/tracker.provider.ts
@@ -1,6 +1,8 @@
 import { Subject } from 'platform/env/subscription';
 import { Computation } from '../provider/provider';
 
+import * as protocol from 'protocol';
+
 export interface Job {
     alias: string;
     uuid: string;
@@ -74,7 +76,7 @@ export class EventProvider extends Computation<
     private readonly _convertors = {};
 
     constructor(uuid: string) {
-        super(uuid);
+        super(uuid, protocol.decodeLifecycleTransition);
     }
 
     public getName(): string {
diff --git a/application/apps/rustcore/ts-bindings/src/interfaces/errors.ts b/application/apps/rustcore/ts-bindings/src/interfaces/errors.ts
index 25a52d609b..f0ae439c0a 100644
--- a/application/apps/rustcore/ts-bindings/src/interfaces/errors.ts
+++ b/application/apps/rustcore/ts-bindings/src/interfaces/errors.ts
@@ -1,6 +1,9 @@
 import { Logger } from 'platform/log';
 import { scope } from 'platform/env/scope';
 
+import * as utils from 'platform/log/utils';
+import * as protocol from 'protocol';
+
 export enum Type {
     NotImplemented = 'NotImplemented',
     InvalidInput = 'InvalidInput',
@@ -11,6 +14,24 @@ export enum Type {
     ParsingSearchChunk = 'ParsingSearchChunk',
     CancelationError = 'CancelationError',
     ContentManipulation = 'ContentManipulation',
+    Communication = 'Communication',
+    DestinationPath = 'DestinationPath',
+    Grabbing = 'Grabbing',
+    InvalidArgs = 'InvalidArgs',
+    InvalidData = 'InvalidData',
+    IoOperation = 'IoOperation',
+    MultipleInitCall = 'MultipleInitCall',
+    NativeError = 'NativeError',
+    OperationNotSupported = 'OperationNotSupported',
+    Process = 'Process',
+    Protocol = 'Protocol',
+    Sde = 'Sde',
+    SearchError = 'SearchError',
+    SessionCreatingFail = 'SessionCreatingFail',
+    SessionUnavailable = 'SessionUnavailable',
+    Unrecognized = 'Unrecognized',
+    Decoding = 'Decoding',
+    Encoding = 'Encoding',
     Other = 'Other',
 }
 
@@ -32,6 +53,7 @@ export enum Source {
     SetDebug = 'SetDebug',
     SendIntoSde = 'SendIntoSde',
     GetAttachments = 'GetAttachments',
+    GetIndexedRanges = 'GetIndexedRanges',
     Concat = 'Concat',
     Merge = 'Merge',
     Extract = 'Extract',
@@ -50,6 +72,7 @@ export enum Source {
     SetIndexingMode = 'SetIndexingMode',
     GetIndexedLen = 'GetIndexedLen',
     getAroundIndexes = 'getAroundIndexes',
+    Native = 'Native',
     Other = 'Other',
 }
 
@@ -59,6 +82,121 @@ export class NativeError extends Error {
     private readonly _logger: Logger = scope.getLogger(`NativeError`);
 
     public static from(smth: any): Error {
+        if (smth instanceof Error) {
+            return smth;
+        }
+        if (typeof smth === 'string') {
+            return new Error(smth);
+        }
+        if (smth instanceof Buffer || smth instanceof Uint8Array) {
+            try {
+                const err = protocol.decodeComputationError(smth);
+                if (err === null) {
+                    return new NativeError(
+                        new Error(`Fail decode error`),
+                        Type.InvalidData,
+                        Source.Native,
+                    );
+                }
+                if (typeof err === 'string') {
+                    if ('DestinationPath' === err) {
+                        return new NativeError(
+                            new Error(`Destination path error`),
+                            Type.Communication,
+                            Source.Native,
+                        );
+                    } else if ('SessionCreatingFail' === err) {
+                        return new NativeError(
+                            new Error(`Fail to create a session`),
+                            Type.SessionCreatingFail,
+                            Source.Native,
+                        );
+                    } else if ('InvalidData' === err) {
+                        return new NativeError(
+                            new Error(`Invalid data`),
+                            Type.InvalidData,
+                            Source.Native,
+                        );
+                    } else if ('MultipleInitCall' === err) {
+                        return new NativeError(
+                            new Error(`Multiple init call`),
+                            Type.MultipleInitCall,
+                            Source.Native,
+                        );
+                    } else if ('SessionUnavailable' === err) {
+                        return new NativeError(
+                            new Error(`Session is unavailable`),
+                            Type.SessionUnavailable,
+                            Source.Native,
+                        );
+                    }
+                } else if ('Communication' in err) {
+                    return new NativeError(
+                        new Error(err.Communication),
+                        Type.Communication,
+                        Source.Native,
+                    );
+                } else if ('OperationNotSupported' in err) {
+                    return new NativeError(
+                        new Error(err.OperationNotSupported),
+                        Type.OperationNotSupported,
+                        Source.Native,
+                    );
+                } else if ('IoOperation' in err) {
+                    return new NativeError(
+                        new Error(err.IoOperation),
+                        Type.IoOperation,
+                        Source.Native,
+                    );
+                } else if ('InvalidArgs' in err) {
+                    return new NativeError(
+                        new Error(err.InvalidArgs),
+                        Type.InvalidArgs,
+                        Source.Native,
+                    );
+                } else if ('Process' in err) {
+                    return new NativeError(new Error(err.Process), Type.Process, Source.Native);
+                } else if ('Protocol' in err) {
+                    return new NativeError(new Error(err.Protocol), Type.Protocol, Source.Native);
+                } else if ('SearchError' in err) {
+                    return new NativeError(
+                        new Error(`Search error: ${err.SearchError}`),
+                        Type.SearchError,
+                        Source.Native,
+                    );
+                } else if ('NativeError' in err) {
+                    return new NativeError(
+                        new Error(err.NativeError?.message),
+                        Type.NativeError,
+                        Source.Native,
+                    );
+                } else if ('Grabbing' in err) {
+                    return new NativeError(
+                        new Error(`Grabbing error: ${err.Grabbing}`),
+                        Type.SearchError,
+                        Source.Native,
+                    );
+                } else if ('Sde' in err) {
+                    return new NativeError(new Error(err.Sde), Type.Sde, Source.Native);
+                } else if ('Decoding' in err) {
+                    return new NativeError(new Error(err.Decoding), Type.Decoding, Source.Native);
+                } else if ('Encoding' in err) {
+                    return new NativeError(new Error(err.Encoding), Type.Encoding, Source.Native);
+                } else {
+                    return new NativeError(
+                        new Error(`Fail to recognize error: ${JSON.stringify(err)}`),
+                        Type.Unrecognized,
+                        Source.Native,
+                    );
+                }
+            } catch (err) {
+                return new NativeError(
+                    new Error(`Fail to decode error: ${utils.error(err)}`),
+                    Type.Other,
+                    Source.Other,
+                );
+            }
+        }
         return smth instanceof Error
             ? smth
             : new Error(`${typeof smth !== 'string' ? JSON.stringify(smth) : smth}`);
diff --git a/application/apps/rustcore/ts-bindings/src/native/native.jobs.ts b/application/apps/rustcore/ts-bindings/src/native/native.jobs.ts
index a5e6942693..0904a6b71f 100644
--- a/application/apps/rustcore/ts-bindings/src/native/native.jobs.ts
+++ b/application/apps/rustcore/ts-bindings/src/native/native.jobs.ts
@@ -3,6 +3,7 @@ import { scope } from 'platform/env/scope';
 import { CancelablePromise } from 'platform/env/promise';
 import { error } from 'platform/log/utils';
 import { getNativeModule } from '../native/native';
+import { NativeError } from '../interfaces/errors';
 
 export abstract class JobsNative {
     public abstract abort(sequence: number): Promise<void>;
@@ -11,9 +12,13 @@ export abstract class JobsNative {
 
     public abstract destroy(): Promise<void>;
 
-    public abstract isFileBinary(sequence: number, filePath: string): Promise<boolean>;
+    public abstract isFileBinary(sequence: number, filePath: string): Promise<Uint8Array>;
 
-    public abstract jobCancelTest(sequence: number, num_a: number, num_b: number): Promise<string>;
+    public abstract jobCancelTest(
+        sequence: number,
+        num_a: number,
+        num_b: number,
+    ): Promise<Uint8Array>;
 
     public abstract listFolderContent(
         sequence: number,
@@ -22,16 +27,20 @@ export abstract class JobsNative {
         paths: string[],
         includeFiles: boolean,
         includeFolders: boolean,
-    ): Promise<string>;
-
-    public abstract spawnProcess(sequence: number, path: string, args: string[]): Promise<void>;
-    public abstract getFileChecksum(sequence: number, path: string): Promise<string>;
-    public abstract getDltStats(sequence: number, files: string[]): Promise<string>;
-    public abstract getSomeipStatistic(sequence: number, files: string[]): Promise<string>;
-    public abstract getShellProfiles(sequence: number): Promise<string>;
-    public abstract getContextEnvvars(sequence: number): Promise<string>;
-    public abstract getSerialPortsList(sequence: number): Promise<string[]>;
-    public abstract sleep(sequence: number, ms: number): Promise<undefined>;
+    ): Promise<Uint8Array>;
+
+    public abstract spawnProcess(
+        sequence: number,
+        path: string,
+        args: string[],
+    ): Promise<Uint8Array>;
+    public abstract getFileChecksum(sequence: number, path: string): Promise<Uint8Array>;
+    public abstract getDltStats(sequence: number, files: string[]): Promise<Uint8Array>;
+    public abstract getSomeipStatistic(sequence: number, files: string[]): Promise<Uint8Array>;
+    public abstract getShellProfiles(sequence: number): Promise<Uint8Array>;
+    public abstract getContextEnvvars(sequence: number): Promise<Uint8Array>;
+    public abstract getSerialPortsList(sequence: number): Promise<Uint8Array>;
+    public abstract sleep(sequence: number, ms: number): Promise<Uint8Array>;
     public abstract getRegexError(
         sequence: number,
         filter: {
@@ -40,7 +49,7 @@ export abstract class JobsNative {
             ignore_case: boolean;
             is_word: boolean;
         },
-    ): Promise<string | undefined | null>;
+    ): Promise<Uint8Array>;
 }
 
 interface Job {
@@ -70,7 +79,7 @@ export class Queue {
 
 export type JobResult<T> = { Finished: T } | 'Cancelled';
 
-export type ConvertCallback<Input, Output> = (input: Input) => Output | Error;
+export type ConvertCallback<Output> = (input: Uint8Array) => Output | Error | Cancelled;
 
 enum State {
     destroyed,
@@ -79,6 +88,26 @@ enum State {
     created,
 }
 
+export class Cancelled extends Error {}
+
+export function decode<Output>(
+    buf: Uint8Array,
+    decoder: (buf: Uint8Array) => any,
+): Output | Error | Cancelled {
+    try {
+        const output = decoder(buf);
+        if (output === 'Cancelled') {
+            return new Cancelled(`Job has been cancelled`);
+        } else if ('Finished' in output) {
+            return output.Finished as Output;
+        } else {
+            return new Error(`Fail to detect job status.`);
+        }
+    } catch (err) {
+        return new Error(`Fail to decode job's results: ${error(err)}`);
+    }
+}
+
 const DESTROY_TIMEOUT = 5000;
 
 export class Base {
@@ -169,9 +198,9 @@ export class Base {
         return this.queue.sequence();
     }
 
-    protected execute<Input, Output>(
-        convert: undefined | ConvertCallback<Input, Output>,
-        task: Promise<any>,
+    protected execute<Output>(
+        convert: ConvertCallback<Output>,
+        task: Promise<Uint8Array>,
         sequence: number,
         alias: string,
     ): CancelablePromise<Output> {
@@ -189,31 +218,20 @@ export class Base {
                     this.logger.error(`Fail to cancel ${error(err)}`);
                 });
             });
-            task.then((nativeOutput: string) => {
-                try {
-                    const result: JobResult<Input> = JSON.parse(nativeOutput);
-                    if (result === 'Cancelled' || self.isCanceling()) {
-                        if (result !== 'Cancelled' && self.isCanceling()) {
-                            this.logger.warn('Job result dropped due canceling');
-                        }
-                        cancel();
-                    } else if (convert === undefined) {
-                        resolve(result.Finished as unknown as Output);
-                    } else {
-                        const converted: Output | Error = convert(result.Finished);
-                        if (converted instanceof Error) {
-                            reject(converted);
-                        } else {
-                            resolve(converted);
-                        }
-                    }
-                } catch (e) {
-                    reject(new Error(`Fail to parse results (${nativeOutput}): ${error(e)}`));
+            task.then((buf: Uint8Array) => {
+                const decoded = convert(buf);
+                if (decoded instanceof Cancelled || self.isCanceling()) {
+                    cancel();
+                } else if (decoded instanceof Error) {
+                    reject(decoded);
+                } else {
+                    resolve(decoded);
                 }
             })
-                .catch((err: Error) => {
-                    this.logger.error(`Fail to do "${alias}" operation due error: ${error(err)}`);
-                    reject(new Error(error(err)));
+                .catch((err: Error | Uint8Array) => {
+                    const nerr = NativeError.from(err);
+                    this.logger.error(`Fail to do "${alias}" operation due error: ${error(nerr)}`);
+                    reject(nerr);
                 })
                 .finally(() => {
                     this.queue.remove(sequence);
diff --git a/application/apps/rustcore/ts-bindings/src/native/native.session.ts b/application/apps/rustcore/ts-bindings/src/native/native.session.ts
index 23446eed73..8b3223d494 100644
--- a/application/apps/rustcore/ts-bindings/src/native/native.session.ts
+++ b/application/apps/rustcore/ts-bindings/src/native/native.session.ts
@@ -3,7 +3,7 @@ import { RustSessionRequiered } from '../native/native.session.required';
 import { TEventEmitter } from '../provider/provider.general';
 import { Computation } from '../provider/provider';
 import { IFilter } from 'platform/types/filter';
-import { IGrabbedElement } from 'platform/types/content';
+import { GrabbedElement } from 'platform/types/bindings/miscellaneous';
 import { getNativeModule } from '../native/native';
 import { EFileOptionsRequirements } from '../api/executors/session.stream.observe.executor';
 import { Type, Source, NativeError } from '../interfaces/errors';
@@ -14,9 +14,12 @@ import { ISourceLink } from 'platform/types/observe/types';
 import { IndexingMode, Attachment } from 'platform/types/content';
 import { Logger, utils } from 'platform/log';
 import { scope } from 'platform/env/scope';
-import { IObserve, Observe } from 'platform/types/observe';
+import { IObserve } from 'platform/types/observe';
 import { TextExportOptions } from 'platform/types/exporting';
 
+import * as protocol from 'protocol';
+import * as types from 'platform/types';
+
 export type RustSessionConstructorImpl<T> = new (
     uuid: string,
     provider: Computation<any, any, any>,
@@ -40,11 +43,11 @@ export abstract class RustSession extends RustSessionRequiered {
      *
      * @error In case of incorrect range should return { NativeError }
      */
-    public abstract grabStreamChunk(start: number, len: number): Promise<IGrabbedElement[]>;
+    public abstract grabStreamChunk(start: number, len: number): Promise<GrabbedElement[]>;
 
-    public abstract grabStreamRanges(ranges: IRange[]): Promise<IGrabbedElement[]>;
+    public abstract grabStreamRanges(ranges: IRange[]): Promise<GrabbedElement[]>;
 
-    public abstract grabIndexed(start: number, len: number): Promise<IGrabbedElement[]>;
+    public abstract grabIndexed(start: number, len: number): Promise<GrabbedElement[]>;
 
     public abstract setIndexingMode(mode: IndexingMode): Promise<void>;
 
@@ -73,7 +76,7 @@ export abstract class RustSession extends RustSessionRequiered {
      * @returns { string }
      * @error In case of incorrect range should return { NativeError }
      */
-    public abstract grabSearchChunk(start: number, len: number): Promise<IGrabbedElement[]>;
+    public abstract grabSearchChunk(start: number, len: number): Promise<GrabbedElement[]>;
 
     /**
      * TODO: @return needs interface. It should not be a string
@@ -158,21 +161,24 @@ export abstract class RustSession extends RustSessionRequiered {
         datasetLength: number,
         from?: number,
         to?: number,
-    ): Promise<string>;
+    ): Promise<void>;
 
     public abstract getValues(
         operationUuid: string,
         datasetLength: number,
         from?: number,
         to?: number,
-    ): Promise<string>;
+    ): Promise<void>;
 
     public abstract getNearestTo(
         operationUuid: string,
         positionInStream: number,
     ): Promise<{ index: number; position: number } | undefined>;
 
-    public abstract sendIntoSde(targetOperationUuid: string, jsonStrMsg: string): Promise<string>;
+    public abstract sendIntoSde(
+        targetOperationUuid: string,
+        request: types.sde.SdeRequest,
+    ): Promise<types.sde.SdeResponse>;
 
     public abstract getAttachments(): Promise<Attachment[]>;
     public abstract getIndexedRanges(): Promise<IRange[]>;
@@ -198,6 +204,12 @@ export abstract class RustSession extends RustSessionRequiered {
 
     // Used only for testing and debug
     public abstract triggerTrackerError(): Promise<void>;
+
+    // Used only for testing and debug
+    public abstract testGrabElsAsJson(): GrabbedElement[] | NativeError;
+
+    // Used only for testing and debug
+    public abstract testGrabElsAsBin(): GrabbedElement[] | NativeError;
 }
 
 export abstract class RustSessionNative {
@@ -209,15 +221,15 @@ export abstract class RustSessionNative {
 
     public abstract getSessionFile(): Promise<string>;
 
-    public abstract observe(source: string, operationUuid: string): Promise<void>;
+    public abstract observe(source: Uint8Array, operationUuid: string): Promise<void>;
 
     public abstract getStreamLen(): Promise<number>;
 
-    public abstract getSourcesDefinitions(): Promise<ISourceLink[]>;
+    public abstract getSourcesDefinitions(): Promise<Uint8Array>;
 
-    public abstract grab(start: number, len: number): Promise<string>;
+    public abstract grab(start: number, len: number): Promise<Uint8Array>;
 
-    public abstract grabIndexed(start: number, len: number): Promise<string>;
+    public abstract grabIndexed(start: number, len: number): Promise<Uint8Array>;
 
     public abstract setIndexingMode(mode: number): Promise<void>;
 
@@ -237,9 +249,9 @@ export abstract class RustSessionNative {
 
     public abstract setBookmarks(rows: number[]): Promise<void>;
 
-    public abstract grabRanges(ranges: number[][]): Promise<string>;
+    public abstract grabRanges(ranges: number[][]): Promise<Uint8Array>;
 
-    public abstract grabSearch(start: number, len: number): Promise<string>;
+    public abstract grabSearch(start: number, len: number): Promise<Uint8Array>;
 
     public abstract getSearchLen(): Promise<number>;
 
@@ -292,23 +304,26 @@ export abstract class RustSessionNative {
         datasetLength: number,
         from?: number,
         to?: number,
-    ): Promise<string>;
+    ): Promise<void>;
 
     public abstract getValues(
         operationUuid: string,
         datasetLength: number,
         from?: number,
         to?: number,
-    ): Promise<string>;
+    ): Promise<void>;
 
     public abstract getNearestTo(
         operationUuid: string,
         positionInStream: number,
     ): Promise<number[] | null>;
 
-    public abstract sendIntoSde(targetOperationUuid: string, jsonStrMsg: string): Promise<string>;
-    public abstract getAttachments(): Promise<string>;
-    public abstract getIndexedRanges(): Promise<string>;
+    public abstract sendIntoSde(
+        targetOperationUuid: string,
+        request: Uint8Array,
+    ): Promise<Uint8Array>;
+    public abstract getAttachments(): Promise<Uint8Array>;
+    public abstract getIndexedRanges(): Promise<Uint8Array>;
 
     public abstract abort(
         selfOperationUuid: string,
@@ -331,6 +346,12 @@ export abstract class RustSessionNative {
 
     // Used only for testing and debug
     public abstract triggerTrackerError(): Promise<void>;
+
+    // Used only for testing and debug
+    public abstract testGrabElsAsJson(): string;
+
+    // Used only for testing and debug
+    public abstract testGrabElsAsBin(): number[];
 }
 
 export function rustSessionFactory(
@@ -430,139 +451,81 @@ export class RustSessionWrapper extends RustSession {
             this._provider.debug().emit.operation('getSourcesDefinitions');
             this._native
                 .getSourcesDefinitions()
-                .then((sources: ISourceLink[]) => {
-                    resolve(sources);
+                .then((buf: Uint8Array) => {
+                    try {
+                        resolve(protocol.decodeSources(buf));
+                    } catch (err) {
+                        reject(
+                            new NativeError(
+                                new Error(
+                                    this._logger.error(
+                                        `Fail to decode message: ${utils.error(err)}`,
+                                    ),
+                                ),
+                                Type.InvalidOutput,
+                                Source.GetSourcesDefinitions,
+                            ),
+                        );
+                    }
                 })
                 .catch((err) => {
-                    reject(
-                        new NativeError(
-                            NativeError.from(err),
-                            Type.GrabbingContent,
-                            Source.GetSourcesDefinitions,
-                        ),
-                    );
+                    reject(NativeError.from(err));
                 });
         });
     }
 
-    public grabStreamChunk(start: number, len: number): Promise<IGrabbedElement[]> {
+    public grabStreamChunk(start: number, len: number): Promise<GrabbedElement[]> {
         return new Promise((resolve, reject) => {
             this._provider.debug().emit.operation('grab');
             this._native
                 .grab(start, len)
-                .then((grabbed: string) => {
+                .then((buf: Uint8Array) => {
                     try {
-                        const result: Array<{
-                            c: string;
-                            id: number;
-                            p: number;
-                            n: number;
-                        }> = JSON.parse(grabbed);
-                        resolve(
-                            result.map(
-                                (
-                                    item: {
-                                        c: string;
-                                        id: number;
-                                        p: number;
-                                        n: number;
-                                    },
-                                    i: number,
-                                ) => {
-                                    return {
-                                        content: item.c,
-                                        source_id: item.id,
-                                        position: getValidNum(item.p),
-                                        nature: item.n,
-                                    };
-                                },
-                            ),
-                        );
+                        resolve(protocol.decodeGrabbedElementList(buf));
                     } catch (err) {
                         reject(
                             new NativeError(
                                 new Error(
                                     this._logger.error(
-                                        `Fail to call grab(${start}, ${len}) due error: ${
-                                            err instanceof Error ? err.message : err
-                                        }`,
+                                        `Fail to decode message: ${utils.error(err)}`,
                                     ),
                                 ),
-                                Type.ParsingContentChunk,
+                                Type.InvalidOutput,
                                 Source.GrabStreamChunk,
                             ),
                         );
                     }
                 })
                 .catch((err) => {
-                    reject(
-                        new NativeError(
-                            NativeError.from(err),
-                            Type.GrabbingContent,
-                            Source.GrabStreamChunk,
-                        ),
-                    );
+                    reject(NativeError.from(err));
                 });
         });
     }
 
-    public grabIndexed(start: number, len: number): Promise<IGrabbedElement[]> {
+    public grabIndexed(start: number, len: number): Promise<GrabbedElement[]> {
         return new Promise((resolve, reject) => {
             this._provider.debug().emit.operation('grabIndexed');
             this._native
                 .grabIndexed(start, len)
-                .then((grabbed: string) => {
+                .then((buf: Uint8Array) => {
                     try {
-                        const result: Array<{
-                            c: string;
-                            id: number;
-                            p: unknown;
-                            n: number;
-                        }> = JSON.parse(grabbed);
-                        resolve(
-                            result.map(
-                                (
-                                    item: {
-                                        c: string;
-                                        id: number;
-                                        p: unknown;
-                                        n: number;
-                                    },
-                                    i: number,
-                                ) => {
-                                    return {
-                                        content: item.c,
-                                        source_id: item.id,
-                                        position: getValidNum(item.p),
-                                        nature: item.n,
-                                    };
-                                },
-                            ),
-                        );
+                        resolve(protocol.decodeGrabbedElementList(buf));
                     } catch (err) {
                         reject(
                             new NativeError(
                                 new Error(
                                     this._logger.error(
-                                        `Fail to call grabIndexed(${start}, ${len}) due error: ${
-                                            err instanceof Error ? err.message : err
-                                        }`,
+                                        `Fail to decode message: ${utils.error(err)}`,
                                     ),
                                 ),
-                                Type.ParsingContentChunk,
+                                Type.InvalidOutput,
                                 Source.GrabStreamChunk,
                             ),
                         );
                     }
                 })
                 .catch((err) => {
-                    reject(
-                        new NativeError(
-                            NativeError.from(err),
-                            Type.GrabbingContent,
-                            Source.GrabStreamChunk,
-                        ),
-                    );
+                    reject(NativeError.from(err));
                 });
         });
     }
@@ -574,13 +537,7 @@ export class RustSessionWrapper extends RustSession {
                 .setIndexingMode(mode)
                 .then(resolve)
                 .catch((err) => {
-                    reject(
-                        new NativeError(
-                            NativeError.from(err),
-                            Type.ContentManipulation,
-                            Source.SetIndexingMode,
-                        ),
-                    );
+                    reject(NativeError.from(err));
                 });
         });
     }
@@ -592,13 +549,7 @@ export class RustSessionWrapper extends RustSession {
                 .getIndexedLen()
                 .then(resolve)
                 .catch((err) => {
-                    reject(
-                        new NativeError(
-                            NativeError.from(err),
-                            Type.ContentManipulation,
-                            Source.GetIndexedLen,
-                        ),
-                    );
+                    reject(NativeError.from(err));
                 });
         });
     }
@@ -617,13 +568,7 @@ export class RustSessionWrapper extends RustSession {
                     });
                 })
                 .catch((err) => {
-                    reject(
-                        new NativeError(
-                            NativeError.from(err),
-                            Type.ContentManipulation,
-                            Source.getAroundIndexes,
-                        ),
-                    );
+                    reject(NativeError.from(err));
                 });
         });
     }
@@ -635,13 +580,7 @@ export class RustSessionWrapper extends RustSession {
                 .expandBreadcrumbs(seporator, offset, above)
                 .then(resolve)
                 .catch((err) => {
-                    reject(
-                        new NativeError(
-                            NativeError.from(err),
-                            Type.ContentManipulation,
-                            Source.ExpandBreadcrumbs,
-                        ),
-                    );
+                    reject(NativeError.from(err));
                 });
         });
     }
@@ -653,13 +592,7 @@ export class RustSessionWrapper extends RustSession {
                 .removeBookmark(row)
                 .then(resolve)
                 .catch((err) => {
-                    reject(
-                        new NativeError(
-                            NativeError.from(err),
-                            Type.ContentManipulation,
-                            Source.RemoveBookmark,
-                        ),
-                    );
+                    reject(NativeError.from(err));
                 });
         });
     }
@@ -671,13 +604,7 @@ export class RustSessionWrapper extends RustSession {
                 .addBookmark(row)
                 .then(resolve)
                 .catch((err) => {
-                    reject(
-                        new NativeError(
-                            NativeError.from(err),
-                            Type.ContentManipulation,
-                            Source.AddBookmark,
-                        ),
-                    );
+                    reject(NativeError.from(err));
                 });
         });
     }
@@ -689,75 +616,36 @@ export class RustSessionWrapper extends RustSession {
                 .setBookmarks(rows)
                 .then(resolve)
                 .catch((err) => {
-                    reject(
-                        new NativeError(
-                            NativeError.from(err),
-                            Type.ContentManipulation,
-                            Source.SetBookmarks,
-                        ),
-                    );
+                    reject(NativeError.from(err));
                 });
         });
     }
 
-    public grabStreamRanges(ranges: IRange[]): Promise<IGrabbedElement[]> {
+    public grabStreamRanges(ranges: IRange[]): Promise<GrabbedElement[]> {
         return new Promise((resolve, reject) => {
             try {
                 this._provider.debug().emit.operation('grabRanges');
                 this._native
-                    .grabRanges(ranges.map((r) => [r.from, r.to]))
-                    .then((grabbed: string) => {
+                    .grabRanges(ranges.map((r) => [r.start, r.end]))
+                    .then((buf: Uint8Array) => {
                         try {
-                            const result: Array<{
-                                c: string;
-                                id: number;
-                                p: number;
-                                n: number;
-                            }> = JSON.parse(grabbed);
-                            resolve(
-                                result.map(
-                                    (
-                                        item: {
-                                            c: string;
-                                            id: number;
-                                            p: number;
-                                            n: number;
-                                        },
-                                        i: number,
-                                    ) => {
-                                        return {
-                                            content: item.c,
-                                            source_id: item.id,
-                                            position: getValidNum(item.p),
-                                            nature: item.n,
-                                        };
-                                    },
-                                ),
-                            );
+                            resolve(protocol.decodeGrabbedElementList(buf));
                         } catch (err) {
                             reject(
                                 new NativeError(
                                     new Error(
                                         this._logger.error(
-                                            `Fail to call grab ranges due error: ${
-                                                err instanceof Error ? err.message : err
-                                            }`,
+                                            `Fail to decode message: ${utils.error(err)}`,
                                         ),
                                     ),
-                                    Type.ParsingContentChunk,
+                                    Type.InvalidOutput,
                                     Source.GrabStreamChunk,
                                 ),
                             );
                         }
                     })
                     .catch((err: Error) => {
-                        reject(
-                            new NativeError(
-                                NativeError.from(err),
-                                Type.Other,
-                                Source.GrabStreamChunk,
-                            ),
-                        );
+                        reject(NativeError.from(err));
                     });
             } catch (err) {
                 return reject(
@@ -767,63 +655,30 @@ export class RustSessionWrapper extends RustSession {
         });
     }
 
-    public grabSearchChunk(start: number, len: number): Promise<IGrabbedElement[]> {
+    public grabSearchChunk(start: number, len: number): Promise<GrabbedElement[]> {
         return new Promise((resolve, reject) => {
             this._provider.debug().emit.operation('grabSearch');
             this._native
                 .grabSearch(start, len)
-                .then((grabbed: string) => {
+                .then((buf: Uint8Array) => {
                     try {
-                        const result: Array<{
-                            c: string;
-                            id: number;
-                            p: number;
-                            n: number;
-                        }> = JSON.parse(grabbed);
-                        resolve(
-                            result.map(
-                                (
-                                    item: {
-                                        c: string;
-                                        id: number;
-                                        p: unknown;
-                                        n: number;
-                                    },
-                                    i: number,
-                                ) => {
-                                    return {
-                                        content: item.c,
-                                        source_id: item.id,
-                                        position: getValidNum(item.p),
-                                        nature: item.n,
-                                    };
-                                },
-                            ),
-                        );
+                        resolve(protocol.decodeGrabbedElementList(buf));
                     } catch (err) {
                         reject(
                             new NativeError(
                                 new Error(
                                     this._logger.error(
-                                        `Fail to call grab(${start}, ${len}) due error: ${
-                                            err instanceof Error ? err.message : err
-                                        }`,
+                                        `Fail to decode message: ${utils.error(err)}`,
                                     ),
                                 ),
-                                Type.ParsingSearchChunk,
+                                Type.InvalidOutput,
                                 Source.GrabSearchChunk,
                             ),
                         );
                     }
                 })
                 .catch((err) => {
-                    reject(
-                        new NativeError(
-                            NativeError.from(err),
-                            Type.GrabbingSearch,
-                            Source.GrabSearchChunk,
-                        ),
-                    );
+                    reject(NativeError.from(err));
                 });
         });
     }
@@ -843,7 +698,7 @@ export class RustSessionWrapper extends RustSession {
                 .getStreamLen()
                 .then(resolve)
                 .catch((err) => {
-                    reject(new NativeError(NativeError.from(err), Type.Other, Source.GetStreamLen));
+                    reject(NativeError.from(err));
                 });
         });
     }
@@ -855,7 +710,7 @@ export class RustSessionWrapper extends RustSession {
                 .getSearchLen()
                 .then(resolve)
                 .catch((err) => {
-                    reject(new NativeError(NativeError.from(err), Type.Other, Source.GetSearchLen));
+                    reject(NativeError.from(err));
                 });
         });
     }
@@ -874,10 +729,10 @@ export class RustSessionWrapper extends RustSession {
             try {
                 this._provider.debug().emit.operation('observe', operationUuid);
                 this._native
-                    .observe(new Observe(source).json().to(), operationUuid)
+                    .observe(protocol.encodeObserveOptions(source), operationUuid)
                     .then(resolve)
                     .catch((err: Error) => {
-                        reject(new NativeError(NativeError.from(err), Type.Other, Source.Assign));
+                        reject(NativeError.from(err));
                     });
             } catch (err) {
                 return reject(new NativeError(NativeError.from(err), Type.Other, Source.Assign));
@@ -897,7 +752,7 @@ export class RustSessionWrapper extends RustSession {
                 this._native
                     .export(
                         dest,
-                        ranges.map((r) => [r.from, r.to]),
+                        ranges.map((r) => [r.start, r.end]),
                         opt.columns,
                         opt.spliter === undefined ? '' : opt.spliter,
                         opt.delimiter === undefined ? '' : opt.delimiter,
@@ -905,7 +760,7 @@ export class RustSessionWrapper extends RustSession {
                     )
                     .then(resolve)
                     .catch((err: Error) => {
-                        reject(new NativeError(NativeError.from(err), Type.Other, Source.Assign));
+                        reject(NativeError.from(err));
                     });
             } catch (err) {
                 return reject(new NativeError(NativeError.from(err), Type.Other, Source.Assign));
@@ -920,12 +775,12 @@ export class RustSessionWrapper extends RustSession {
                 this._native
                     .exportRaw(
                         dest,
-                        ranges.map((r) => [r.from, r.to]),
+                        ranges.map((r) => [r.start, r.end]),
                         operationUuid,
                     )
                     .then(resolve)
                     .catch((err: Error) => {
-                        reject(new NativeError(NativeError.from(err), Type.Other, Source.Assign));
+                        reject(NativeError.from(err));
                     });
             } catch (err) {
                 return reject(new NativeError(NativeError.from(err), Type.Other, Source.Assign));
@@ -940,7 +795,7 @@ export class RustSessionWrapper extends RustSession {
                 .isRawExportAvailable()
                 .then(resolve)
                 .catch((err) => {
-                    reject(new NativeError(NativeError.from(err), Type.Other, Source.GetSearchLen));
+                    reject(NativeError.from(err));
                 });
         });
     }
@@ -963,7 +818,7 @@ export class RustSessionWrapper extends RustSession {
                     )
                     .then(resolve)
                     .catch((err: Error) => {
-                        reject(new NativeError(NativeError.from(err), Type.Other, Source.Search));
+                        reject(NativeError.from(err));
                     });
             } catch (err) {
                 return reject(new NativeError(NativeError.from(err), Type.Other, Source.Search));
@@ -979,9 +834,7 @@ export class RustSessionWrapper extends RustSession {
                     .applySearchValuesFilters(filters, operationUuid)
                     .then(resolve)
                     .catch((err: Error) => {
-                        reject(
-                            new NativeError(NativeError.from(err), Type.Other, Source.SearchValues),
-                        );
+                        reject(NativeError.from(err));
                     });
             } catch (err) {
                 return reject(
@@ -1013,13 +866,7 @@ export class RustSessionWrapper extends RustSession {
                     )
                     .then(resolve)
                     .catch((err: Error) => {
-                        reject(
-                            new NativeError(
-                                NativeError.from(err),
-                                Type.Other,
-                                Source.ExtractMatchesValues,
-                            ),
-                        );
+                        reject(NativeError.from(err));
                     });
             } catch (err) {
                 return reject(
@@ -1034,7 +881,7 @@ export class RustSessionWrapper extends RustSession {
         datasetLength: number,
         from?: number,
         to?: number,
-    ): Promise<string> {
+    ): Promise<void> {
         return new Promise((resolve, reject) => {
             this._provider.debug().emit.operation('getMap', operationUuid);
             (() => {
@@ -1046,7 +893,7 @@ export class RustSessionWrapper extends RustSession {
             })()
                 .then(resolve)
                 .catch((err) => {
-                    reject(new NativeError(NativeError.from(err), Type.Other, Source.GetMap));
+                    reject(NativeError.from(err));
                 });
         });
     }
@@ -1056,7 +903,7 @@ export class RustSessionWrapper extends RustSession {
         datasetLength: number,
         from?: number,
         to?: number,
-    ): Promise<string> {
+    ): Promise<void> {
         return new Promise((resolve, reject) => {
             this._provider.debug().emit.operation('getValues', operationUuid);
             (() => {
@@ -1068,7 +915,7 @@ export class RustSessionWrapper extends RustSession {
             })()
                 .then(resolve)
                 .catch((err) => {
-                    reject(new NativeError(NativeError.from(err), Type.Other, Source.GetMap));
+                    reject(NativeError.from(err));
                 });
         });
     }
@@ -1099,18 +946,37 @@ export class RustSessionWrapper extends RustSession {
                     }
                 })
                 .catch((err) => {
-                    reject(new NativeError(NativeError.from(err), Type.Other, Source.GetNearestTo));
+                    reject(NativeError.from(err));
                 });
         });
     }
 
-    public sendIntoSde(targetOperationUuid: string, jsonStrMsg: string): Promise<string> {
+    public sendIntoSde(
+        targetOperationUuid: string,
+        request: types.sde.SdeRequest,
+    ): Promise<types.sde.SdeResponse> {
         return new Promise((resolve, reject) => {
             this._native
-                .sendIntoSde(targetOperationUuid, jsonStrMsg)
-                .then(resolve)
+                .sendIntoSde(targetOperationUuid, protocol.encodeSdeRequest(request))
+                .then((buf: Uint8Array) => {
+                    try {
+                        resolve(protocol.decodeSdeResponse(buf));
+                    } catch (err) {
+                        reject(
+                            new NativeError(
+                                new Error(
+                                    this._logger.error(
+                                        `Fail to decode message: ${utils.error(err)}`,
+                                    ),
+                                ),
+                                Type.InvalidOutput,
+                                Source.SendIntoSde,
+                            ),
+                        );
+                    }
+                })
                 .catch((err) => {
-                    reject(new NativeError(NativeError.from(err), Type.Other, Source.SendIntoSde));
+                    reject(NativeError.from(err));
                 });
         });
     }
@@ -1119,26 +985,25 @@ export class RustSessionWrapper extends RustSession {
         return new Promise((resolve, reject) => {
             this._native
                 .getAttachments()
-                .then((str: string) => {
+                .then((buf: Uint8Array) => {
                     try {
-                        const attachments: Attachment[] = [];
-                        for (const unchecked of JSON.parse(str) as unknown[]) {
-                            const attachment = Attachment.from(unchecked);
-                            if (attachment instanceof Error) {
-                                reject(attachment);
-                                return;
-                            }
-                            attachments.push(attachment);
-                        }
-                        resolve(attachments);
-                    } catch (e) {
-                        reject(new Error(utils.error(e)));
+                        resolve(protocol.decodeAttachmentList(buf));
+                    } catch (err) {
+                        reject(
+                            new NativeError(
+                                new Error(
+                                    this._logger.error(
+                                        `Fail to decode message: ${utils.error(err)}`,
+                                    ),
+                                ),
+                                Type.InvalidOutput,
+                                Source.GetAttachments,
+                            ),
+                        );
                     }
                 })
                 .catch((err) => {
-                    reject(
-                        new NativeError(NativeError.from(err), Type.Other, Source.GetAttachments),
-                    );
+                    reject(NativeError.from(err));
                 });
         });
     }
@@ -1147,26 +1012,25 @@ export class RustSessionWrapper extends RustSession {
         return new Promise((resolve, reject) => {
             this._native
                 .getIndexedRanges()
-                .then((str: string) => {
+                .then((buf: Uint8Array) => {
                     try {
-                        const ranges: IRange[] = [];
-                        for (const unchecked of JSON.parse(str) as unknown[]) {
-                            const range = fromTuple(unchecked);
-                            if (range instanceof Error) {
-                                reject(range);
-                                return;
-                            }
-                            ranges.push(range);
-                        }
-                        resolve(ranges);
-                    } catch (e) {
-                        reject(new Error(utils.error(e)));
+                        resolve(protocol.decodeRanges(buf));
+                    } catch (err) {
+                        reject(
+                            new NativeError(
+                                new Error(
+                                    this._logger.error(
+                                        `Fail to decode message: ${utils.error(err)}`,
+                                    ),
+                                ),
+                                Type.InvalidOutput,
+                                Source.GetIndexedRanges,
+                            ),
+                        );
                     }
                 })
                 .catch((err) => {
-                    reject(
-                        new NativeError(NativeError.from(err), Type.Other, Source.GetAttachments),
-                    );
+                    reject(NativeError.from(err));
                 });
         });
     }
@@ -1186,7 +1050,7 @@ export class RustSessionWrapper extends RustSession {
                 .setDebug(debug)
                 .then(resolve)
                 .catch((err) => {
-                    reject(new NativeError(NativeError.from(err), Type.Other, Source.SetDebug));
+                    reject(NativeError.from(err));
                 });
         });
     }
@@ -1197,13 +1061,7 @@ export class RustSessionWrapper extends RustSession {
                 .getOperationsStat()
                 .then(resolve)
                 .catch((err) => {
-                    reject(
-                        new NativeError(
-                            NativeError.from(err),
-                            Type.Other,
-                            Source.GetOperationsStat,
-                        ),
-                    );
+                    reject(NativeError.from(err));
                 });
         });
     }
@@ -1220,7 +1078,7 @@ export class RustSessionWrapper extends RustSession {
                 .sleep(operationUuid, duration, ignoreCancellation)
                 .then(resolve)
                 .catch((err) => {
-                    reject(new NativeError(NativeError.from(err), Type.Other, Source.Sleep));
+                    reject(NativeError.from(err));
                 });
         });
     }
@@ -1233,13 +1091,7 @@ export class RustSessionWrapper extends RustSession {
                 .triggerStateError()
                 .then(resolve)
                 .catch((err) => {
-                    reject(
-                        new NativeError(
-                            NativeError.from(err),
-                            Type.Other,
-                            Source.TriggerStateError,
-                        ),
-                    );
+                    reject(NativeError.from(err));
                 });
         });
     }
@@ -1252,16 +1104,52 @@ export class RustSessionWrapper extends RustSession {
                 .triggerTrackerError()
                 .then(resolve)
                 .catch((err) => {
-                    reject(
-                        new NativeError(
-                            NativeError.from(err),
-                            Type.Other,
-                            Source.TriggerTrackerError,
-                        ),
-                    );
+                    reject(NativeError.from(err));
                 });
         });
     }
+
+    public testGrabElsAsJson(): GrabbedElement[] | NativeError {
+        try {
+            const lines: Array<{
+                content: string;
+                source_id: number;
+                pos: number;
+                nature: number;
+            }> = JSON.parse(this._native.testGrabElsAsJson());
+            const elements = lines.map(
+                (
+                    item: {
+                        content: string;
+                        source_id: number;
+                        pos: number;
+                        nature: number;
+                    },
+                    i: number,
+                ) => {
+                    return {
+                        content: item.content,
+                        source_id: item.source_id,
+                        pos: getValidNum(item.pos),
+                        nature: item.nature,
+                    };
+                },
+            );
+            return elements;
+        } catch (err) {
+            return new NativeError(new Error(utils.error(err)), Type.Other, Source.Other);
+        }
+    }
+
+    public testGrabElsAsBin(): GrabbedElement[] | NativeError {
+        try {
+            const received = this._native.testGrabElsAsBin();
+            const elements = protocol.decodeGrabbedElementList(Uint8Array.from(received));
+            return elements;
+        } catch (err) {
+            return new NativeError(new Error(utils.error(err)), Type.Other, Source.Other);
+        }
+    }
 }
 
 export const RustSessionWrapperConstructor: RustSessionConstructorImpl<RustSessionWrapper> =
diff --git a/application/apps/rustcore/ts-bindings/src/provider/provider.general.ts b/application/apps/rustcore/ts-bindings/src/provider/provider.general.ts
index 3a3e808e87..40e689e492 100644
--- a/application/apps/rustcore/ts-bindings/src/provider/provider.general.ts
+++ b/application/apps/rustcore/ts-bindings/src/provider/provider.general.ts
@@ -1,7 +1 @@
-export interface IEventData {
-    [key: string]: any;
-}
-
-export type TEventData = string | Required<IEventData>;
-
-export type TEventEmitter = (event: TEventData) => void;
+export type TEventEmitter = (event: Uint8Array) => void;
diff --git a/application/apps/rustcore/ts-bindings/src/provider/provider.ts b/application/apps/rustcore/ts-bindings/src/provider/provider.ts
index 5d42573cb9..7afd6d5ac9 100644
--- a/application/apps/rustcore/ts-bindings/src/provider/provider.ts
+++ b/application/apps/rustcore/ts-bindings/src/provider/provider.ts
@@ -7,7 +7,7 @@ import { Subject, validateEventDesc } from 'platform/env/subscription';
 import { error } from 'platform/log/utils';
 import { Logger } from 'platform/log';
 import { scope } from 'platform/env/scope';
-import { TEventData, TEventEmitter, IEventData } from '../provider/provider.general';
+import { TEventEmitter } from '../provider/provider.general';
 
 export interface IOrderStat {
     type: 'E' | 'O';
@@ -18,8 +18,8 @@ export interface IOrderStat {
 }
 export abstract class Computation<TEvents, IEventsSignatures, IEventsInterfaces> {
     private _destroyed: boolean = false;
-    private readonly _uuid: string;
-    private readonly _tracking: {
+    protected readonly uuid: string;
+    protected readonly tracking: {
         subjects: {
             unsupported: Subject<string>;
             error: Subject<string>;
@@ -52,17 +52,19 @@ export abstract class Computation<TEvents, IEventsSignatures, IEventsInterfaces>
         store: false,
         count: false,
     };
+    protected readonly decoder: (buf: Uint8Array) => any;
     public readonly logger: Logger;
 
-    constructor(uuid: string) {
-        this._uuid = uuid;
+    constructor(uuid: string, decoder: (buf: Uint8Array) => any) {
+        this.uuid = uuid;
+        this.decoder = decoder;
         this._emitter = this._emitter.bind(this);
         this.logger = scope.getLogger(`${this.getName()}: ${uuid}`);
     }
 
     public destroy(): Promise<void> {
         if (this._destroyed) {
-            this.logger.warn(`Computation (${this._uuid}) is already destroying or destroyed`);
+            this.logger.warn(`Computation (${this.uuid}) is already destroying or destroyed`);
         } else {
             this._destroy();
         }
@@ -109,79 +111,79 @@ export abstract class Computation<TEvents, IEventsSignatures, IEventsInterfaces>
         return {
             getEvents() {
                 return {
-                    unsupported: self._tracking.subjects.unsupported,
-                    error: self._tracking.subjects.error,
+                    unsupported: self.tracking.subjects.unsupported,
+                    error: self.tracking.subjects.error,
                 };
             },
             isTracking(): boolean {
-                return self._tracking.track;
+                return self.tracking.track;
             },
             isStored(): boolean {
-                return self._tracking.store;
+                return self.tracking.store;
             },
             setTracking(value: boolean): void {
-                self._tracking.track = value;
+                self.tracking.track = value;
             },
             setStoring(value: boolean): void {
-                self._tracking.store = value;
+                self.tracking.store = value;
             },
             setCount(value: boolean): void {
-                self._tracking.count = value;
+                self.tracking.count = value;
             },
             setAlias(value: string): void {
-                self._tracking.stat.alias = value;
+                self.tracking.stat.alias = value;
             },
             getAlias(): string | undefined {
-                return self._tracking.stat.alias;
+                return self.tracking.stat.alias;
             },
             stat: {
                 unsupported(): string[] {
-                    return self._tracking.stat.unsupported;
+                    return self.tracking.stat.unsupported;
                 },
                 error(): string[] {
-                    return self._tracking.stat.error;
+                    return self.tracking.stat.error;
                 },
                 counter(): { [key: string]: number } {
-                    return self._tracking.stat.counter;
+                    return self.tracking.stat.counter;
                 },
                 order(): IOrderStat[] {
-                    return self._tracking.stat.order;
+                    return self.tracking.stat.order;
                 },
                 operations(): { [key: string]: number } {
-                    return self._tracking.stat.operations;
+                    return self.tracking.stat.operations;
                 },
             },
             emit: {
                 unsupported(msg: string): void {
-                    if (self._tracking.track) {
-                        self._tracking.subjects.unsupported.emit(msg);
+                    if (self.tracking.track) {
+                        self.tracking.subjects.unsupported.emit(msg);
                     }
-                    if (self._tracking.store) {
-                        self._tracking.stat.unsupported.push(msg);
+                    if (self.tracking.store) {
+                        self.tracking.stat.unsupported.push(msg);
                     }
                 },
                 error(msg: string): void {
-                    if (self._tracking.track) {
-                        self._tracking.subjects.error.emit(msg);
+                    if (self.tracking.track) {
+                        self.tracking.subjects.error.emit(msg);
                     }
-                    if (self._tracking.store) {
-                        self._tracking.stat.error.push(msg);
+                    if (self.tracking.store) {
+                        self.tracking.stat.error.push(msg);
                     }
                 },
                 event(event: string, id?: string): void {
-                    if (!self._tracking.count) {
+                    if (!self.tracking.count) {
                         return;
                     }
-                    if (self._tracking.stat.counter[event] === undefined) {
-                        self._tracking.stat.counter[event] = 0;
+                    if (self.tracking.stat.counter[event] === undefined) {
+                        self.tracking.stat.counter[event] = 0;
                     }
-                    self._tracking.stat.counter[event] += 1;
+                    self.tracking.stat.counter[event] += 1;
                     const operation =
                         id === undefined
                             ? undefined
-                            : self._tracking.stat.order.find((s) => s.id === id);
+                            : self.tracking.stat.order.find((s) => s.id === id);
                     if (operation === undefined) {
-                        self._tracking.stat.order.push({
+                        self.tracking.stat.order.push({
                             type: 'E',
                             name: event,
                             id,
@@ -190,7 +192,7 @@ export abstract class Computation<TEvents, IEventsSignatures, IEventsInterfaces>
                         });
                     } else {
                         const emitted = Date.now();
-                        self._tracking.stat.order.push({
+                        self.tracking.stat.order.push({
                             type: 'E',
                             name: event,
                             id,
@@ -201,14 +203,14 @@ export abstract class Computation<TEvents, IEventsSignatures, IEventsInterfaces>
                     }
                 },
                 operation(operation: string, id?: string): void {
-                    if (!self._tracking.count) {
+                    if (!self.tracking.count) {
                         return;
                     }
-                    if (self._tracking.stat.operations[operation] === undefined) {
-                        self._tracking.stat.operations[operation] = 0;
+                    if (self.tracking.stat.operations[operation] === undefined) {
+                        self.tracking.stat.operations[operation] = 0;
                     }
-                    self._tracking.stat.operations[operation] += 1;
-                    self._tracking.stat.order.push({
+                    self.tracking.stat.operations[operation] += 1;
+                    self.tracking.stat.order.push({
                         type: 'O',
                         name: operation,
                         id,
@@ -225,54 +227,29 @@ export abstract class Computation<TEvents, IEventsSignatures, IEventsInterfaces>
      * { [type: string]: string | undefined }
      * @param data {string}
      */
-    private _emitter(data: TEventData) {
-        function dataAsStr(data: TEventData): { debug: string; verb?: string } {
-            let message = '';
-            if (typeof data === 'string') {
-                message = `(defined as string): ${data}`;
-            } else {
-                message = `(defined as object): keys: ${Object.keys(data).join(
-                    ', ',
-                )} / values: ${Object.keys(data)
-                    .map((k) => JSON.stringify(data[k]))
-                    .join(', ')}`;
-            }
-            return {
-                debug: `${message.substring(0, 250)}${message.length > 250 ? '...' : ''}`,
-                verb: message.length > 250 ? message : undefined,
-            };
-        }
-        const logs = dataAsStr(data);
-        this.logger.verbose(`Event from rust:\n\t${logs.debug}`);
-        logs.verb !== undefined && this.logger.verbose(`Event from rust:\n\t${logs.verb}`);
-        let event: Required<IEventData>;
-        if (typeof data === 'string') {
-            try {
-                event = JSON.parse(data);
-            } catch (e) {
-                const msg: string = `Failed to parse rust event data due error: ${e}.\nExpecting type (JSON string): { [type: string]: string | undefined }, got: ${data}`;
-                this.debug().emit.error(msg);
-                this.logger.error(msg);
-                return;
-            }
-        } else if (typeof data === 'object' && data !== null) {
-            event = data;
-        } else {
-            const msg: string = `Unsupported format of event data: ${typeof data} / ${data}.\nExpecting type (JSON string): { [type: string]: string | undefined }`;
-            this.debug().emit.error(msg);
-            this.logger.error(msg);
+    private _emitter(buf: Uint8Array) {
+        let event: any;
+        try {
+            event = this.decoder(buf);
+        } catch (err) {
+            this.debug().emit.error(
+                this.logger.error(`Fail to decode CallbackEvent: ${error(err)}`),
+            );
             return;
         }
         if (typeof event === 'string') {
             this._emit(event, null);
         } else if (typeof event !== 'object' || event === null || Object.keys(event).length !== 1) {
-            const msg: string = `Has been gotten incorrect event data: ${data}. No any props field found.\nExpecting type (JSON string): { [type: string]: string | undefined }`;
-            this.debug().emit.error(msg);
-            this.logger.error(msg);
+            this.debug().emit.error(
+                this.logger.error(
+                    `Has been gotten incorrect event data: ${JSON.stringify(
+                        event,
+                    )}. No any props field found.\nExpecting type: { [type: string]: string | undefined }`,
+                ),
+            );
         } else {
             const type: string = Object.keys(event)[0];
             const body: any = event[type];
-
             this._emit(type, body);
         }
     }
@@ -283,8 +260,8 @@ export abstract class Computation<TEvents, IEventsSignatures, IEventsInterfaces>
         Object.keys(this.getEvents() as unknown as object).forEach((key: string) => {
             (this.getEvents() as any)[key].destroy();
         });
-        Object.keys(this._tracking.subjects).forEach((key: string) => {
-            (this._tracking.subjects as any)[key].destroy();
+        Object.keys(this.tracking.subjects).forEach((key: string) => {
+            (this.tracking.subjects as any)[key].destroy();
         });
         this.logger.debug(`Provider has been destroyed.`);
     }
diff --git a/application/apps/rustcore/ts-bindings/yarn.lock b/application/apps/rustcore/ts-bindings/yarn.lock
index 560dfa4b3b..f77228446e 100644
--- a/application/apps/rustcore/ts-bindings/yarn.lock
+++ b/application/apps/rustcore/ts-bindings/yarn.lock
@@ -3385,6 +3385,12 @@ __metadata:
   languageName: node
   linkType: hard
 
+"protocol@link:../../protocol/pkg::locator=rustcore%40workspace%3A.":
+  version: 0.0.0-use.local
+  resolution: "protocol@link:../../protocol/pkg::locator=rustcore%40workspace%3A."
+  languageName: node
+  linkType: soft
+
 "pump@npm:^3.0.0":
   version: 3.0.0
   resolution: "pump@npm:3.0.0"
@@ -3581,6 +3587,7 @@ __metadata:
     jasmine: "npm:^5.1.0"
     loglevel: "npm:^1.6.6"
     platform: "link:../../../platform"
+    protocol: "link:../../protocol/pkg"
     tmp: "npm:^0.2.3"
     ts-node: "npm:^10.4.0"
     tslib: "npm:^2.6.2"
diff --git a/application/client/src/app/schema/content/row.ts b/application/client/src/app/schema/content/row.ts
index a975b82f4d..ec52e00f3e 100644
--- a/application/client/src/app/schema/content/row.ts
+++ b/application/client/src/app/schema/content/row.ts
@@ -1,7 +1,8 @@
 import { Session } from '@service/session/session';
 import { Subject, Subscriber } from '@platform/env/subscription';
-import { IGrabbedElement, Nature } from '@platform/types/content';
+import { Nature } from '@platform/types/content';
 import { EAlias } from '@service/session/dependencies/search/highlights/modifier';
+import { GrabbedElement } from '@platform/types/bindings/miscellaneous';
 
 export enum Owner {
     Output = 'Output',
@@ -97,12 +98,12 @@ export class Row extends Subscriber {
     }
 
     public as(): {
-        grabbed(): IGrabbedElement;
+        grabbed(): GrabbedElement;
     } {
         return {
-            grabbed: (): IGrabbedElement => {
+            grabbed: (): GrabbedElement => {
                 return {
-                    position: this.position,
+                    pos: this.position,
                     source_id: this.source,
                     content: this.content,
                     nature: 0,
diff --git a/application/client/src/app/service/bridge.ts b/application/client/src/app/service/bridge.ts
index f289a73724..5b4fb1ada2 100644
--- a/application/client/src/app/service/bridge.ts
+++ b/application/client/src/app/service/bridge.ts
@@ -1,9 +1,9 @@
 import { SetupService, Interface, Implementation, register } from '@platform/entity/service';
 import { services } from '@register/services';
 import { File, Entity } from '@platform/types/files';
+import { FolderEntity } from '@platform/types/bindings';
 import { FileType } from '@platform/types/observe/types/file';
-import { ShellProfile } from '@platform/types/shells';
-import { StatisticInfo } from '@platform/types/observe/parser/dlt';
+import { DltStatisticInfo, Profile } from '@platform/types/bindings';
 import { Entry } from '@platform/types/storage/entry';
 import { error } from '@platform/log/utils';
 
@@ -12,7 +12,7 @@ import * as Requests from '@platform/ipc/request/index';
 @SetupService(services['bridge'])
 export class Service extends Implementation {
     protected cache: {
-        shells: ShellProfile[] | undefined;
+        shells: Profile[] | undefined;
         files: Map<string, File>;
         checksums: Map<string, string>;
     } = {
@@ -22,7 +22,7 @@ export class Service extends Implementation {
     };
     protected queue: {
         shells: Array<{
-            resolve: (profiles: ShellProfile[]) => void;
+            resolve: (profiles: Profile[]) => void;
             reject: (err: Error) => void;
         }>;
     } = {
@@ -49,7 +49,7 @@ export class Service extends Implementation {
             depth: number;
             max: number;
             include?: { files: boolean; folders: boolean };
-        }): Promise<{ entities: Entity[]; max: boolean }>;
+        }): Promise<{ entities: FolderEntity[]; max: boolean }>;
         stat(path: string): Promise<Entity>;
         checksum(filename: string): Promise<string>;
         isBinary(file: string): Promise<boolean>;
@@ -137,7 +137,7 @@ export class Service extends Implementation {
                 depth: number;
                 max: number;
                 include?: { files: boolean; folders: boolean };
-            }): Promise<{ entities: Entity[]; max: boolean }> {
+            }): Promise<{ entities: FolderEntity[]; max: boolean }> {
                 return Requests.IpcRequest.send(
                     Requests.Os.List.Response,
                     new Requests.Os.List.Request(
@@ -399,10 +399,10 @@ export class Service extends Implementation {
     }
 
     public dlt(): {
-        stat(files: string[]): Promise<StatisticInfo>;
+        stat(files: string[]): Promise<DltStatisticInfo>;
     } {
         return {
-            stat: (files: string[]): Promise<StatisticInfo> => {
+            stat: (files: string[]): Promise<DltStatisticInfo> => {
                 return new Promise((resolve, reject) => {
                     Requests.IpcRequest.send(
                         Requests.Dlt.Stat.Response,
@@ -536,7 +536,7 @@ export class Service extends Implementation {
 
     public os(): {
         homedir(): Promise<string>;
-        shells(): Promise<ShellProfile[]>;
+        shells(): Promise<Profile[]>;
         envvars(): Promise<Map<string, string>>;
     } {
         return {
@@ -552,7 +552,7 @@ export class Service extends Implementation {
                         .catch(reject);
                 });
             },
-            shells: (): Promise<ShellProfile[]> => {
+            shells: (): Promise<Profile[]> => {
                 return new Promise((resolve, reject) => {
                     if (this.cache.shells !== undefined) {
                         resolve(this.cache.shells);
@@ -564,12 +564,10 @@ export class Service extends Implementation {
                                 new Requests.Os.Shells.Request(),
                             )
                                 .then((response) => {
-                                    this.cache.shells = response.profiles
-                                        .map((p) => ShellProfile.fromObj(p))
-                                        .filter((p) => p instanceof ShellProfile) as ShellProfile[];
+                                    this.cache.shells = response.profiles;
                                     this.queue.shells
                                         .map((h) => h.resolve)
-                                        .forEach((r) => r(this.cache.shells as ShellProfile[]));
+                                        .forEach((r) => r(this.cache.shells as Profile[]));
                                 })
                                 .catch((err: Error) => {
                                     this.queue.shells.map((h) => h.reject).forEach((r) => r(err));
diff --git a/application/client/src/app/service/favorites.ts b/application/client/src/app/service/favorites.ts
index 179d70bf2a..e14ca56228 100644
--- a/application/client/src/app/service/favorites.ts
+++ b/application/client/src/app/service/favorites.ts
@@ -98,7 +98,7 @@ export class Service extends Implementation {
                 const accepted: string[] = [];
                 for (const path of paths) {
                     const stat = await bridge.files().stat(path);
-                    if (stat.type === EntityType.Directory) {
+                    if (stat.kind === EntityType.Directory) {
                         accepted.push(path);
                     }
                 }
diff --git a/application/client/src/app/service/session/dependencies/charts/cursor.ts b/application/client/src/app/service/session/dependencies/charts/cursor.ts
index 7814b12c46..e31129d90a 100644
--- a/application/client/src/app/service/session/dependencies/charts/cursor.ts
+++ b/application/client/src/app/service/session/dependencies/charts/cursor.ts
@@ -39,11 +39,11 @@ export class Cursor {
     }
 
     public setFrame(frame: IRange) {
-        if (frame.from < 0 || frame.to < 0 || frame.from > frame.to) {
+        if (frame.start < 0 || frame.end < 0 || frame.start > frame.end) {
             throw new Error(`Invalid cursor`);
         }
-        this.from = frame.from;
-        this.to = frame.to;
+        this.from = frame.start;
+        this.to = frame.end;
         this.subjects.get().position.emit();
     }
 
@@ -65,7 +65,7 @@ export class Cursor {
         if (this.from < 0 || this.to < 0 || this.from > this.to) {
             return undefined;
         }
-        return { from: this.from, to: this.to };
+        return { start: this.from, end: this.to };
     }
 
     public getWidth(): number {
diff --git a/application/client/src/app/service/session/dependencies/charts/index.ts b/application/client/src/app/service/session/dependencies/charts/index.ts
index deb8b670f9..683791ef46 100644
--- a/application/client/src/app/service/session/dependencies/charts/index.ts
+++ b/application/client/src/app/service/session/dependencies/charts/index.ts
@@ -1,7 +1,7 @@
 import { SetupLogger, LoggerInterface } from '@platform/entity/logger';
 import { Subject, Subjects, Subscriber } from '@platform/env/subscription';
 import { isDevMode } from '@angular/core';
-import { IValuesMap, IValuesMinMaxMap, ISearchMap } from '@platform/types/filter';
+import { ISearchMap } from '@platform/types/filter';
 import { cutUuid } from '@log/index';
 import { IRange } from '@platform/types/range';
 import { Cursor } from './cursor';
@@ -9,13 +9,14 @@ import { Stream } from '../stream';
 import { Search } from '../search';
 import { FilterRequest } from '../search/filters/request';
 import { ChartRequest } from '../search/charts/request';
+import { ResultSearchValues } from '@platform/types/bindings';
 
 import * as Requests from '@platform/ipc/request';
 import * as Events from '@platform/ipc/event';
 
 export interface Output {
-    peaks: IValuesMinMaxMap;
-    values: IValuesMap;
+    peaks: Map<number, [number, number]>;
+    values: ResultSearchValues;
     map: ISearchMap;
     frame: IRange;
     filters: FilterRequest[];
@@ -30,11 +31,11 @@ export interface Output {
 export class Charts extends Subscriber {
     public cursor: Cursor = new Cursor();
     public subjects: Subjects<{
-        peaks: Subject<IValuesMinMaxMap>;
+        peaks: Subject<Map<number, [number, number]>>;
         output: Subject<Output>;
         summary: Subject<Output>;
     }> = new Subjects({
-        peaks: new Subject<IValuesMinMaxMap>(),
+        peaks: new Subject<Map<number, [number, number]>>(),
         output: new Subject<Output>(),
         summary: new Subject<Output>(),
     });
@@ -42,7 +43,7 @@ export class Charts extends Subscriber {
     protected stream!: Stream;
     protected search!: Search;
     protected uuid!: string;
-    protected peaks: IValuesMinMaxMap = {};
+    protected peaks: Map<number, [number, number]> = new Map();
     protected lengths: {
         stream: number;
         search: number;
@@ -94,7 +95,7 @@ export class Charts extends Subscriber {
                     })
                     .catch((err: Error) => {
                         this.log().error(
-                            `Fail load output frame ${frame.from}-${frame.to}: ${err.message}`,
+                            `Fail load output frame ${frame.start}-${frame.end}: ${err.message}`,
                         );
                     })
                     .finally(() => {
@@ -120,7 +121,7 @@ export class Charts extends Subscriber {
                 if (this.progress.summary !== undefined) {
                     return;
                 }
-                const frame = { from: 0, to: this.lengths.stream - 1 };
+                const frame = { start: 0, end: this.lengths.stream - 1 };
                 this.progress.summary = hash();
                 this.reload()
                     .load(frame)
@@ -130,7 +131,7 @@ export class Charts extends Subscriber {
                     })
                     .catch((err: Error) => {
                         this.log().error(
-                            `Fail load summary frame ${frame.from}-${frame.to}: ${err.message}`,
+                            `Fail load summary frame ${frame.start}-${frame.end}: ${err.message}`,
                         );
                     })
                     .finally(() => {
@@ -167,7 +168,7 @@ export class Charts extends Subscriber {
                         this.scaled(width, frame).values(),
                         this.scaled(Math.floor(width / 2), frame).matches(),
                     ])
-                        .then((results: [IValuesMap, ISearchMap]) => {
+                        .then((results: [ResultSearchValues, ISearchMap]) => {
                             resolve(
                                 this.reload().defs({
                                     peaks: this.peaks,
@@ -193,19 +194,19 @@ export class Charts extends Subscriber {
                 return isDevMode() ? this.reload().validation(output) : output;
             },
             validation: (output: Output): Output => {
-                let invalid: [number, number, number, number][] = [];
-                Object.keys(output.values).forEach((k: string) => {
-                    invalid = invalid.concat(
-                        output.values[parseInt(k, 10)].filter((d) => typeof d[3] !== 'number'),
-                    );
-                });
-                if (invalid.length !== 0) {
-                    this.log().error(
-                        `Invalid data for charts; found NONE number values on (rows): ${invalid
-                            .map((d) => d[0])
-                            .join(', ')}`,
-                    );
-                }
+                // let invalid: [number, number, number, number][] = [];
+                // Object.keys(output.values).forEach((k: string) => {
+                //     invalid = invalid.concat(
+                //         output.values[parseInt(k, 10)].filter((d) => typeof d[3] !== 'number'),
+                //     );
+                // });
+                // if (invalid.length !== 0) {
+                //     this.log().error(
+                //         `Invalid data for charts; found NONE number values on (rows): ${invalid
+                //             .map((d) => d[0])
+                //             .join(', ')}`,
+                //     );
+                // }
                 return output;
             },
             requests: (): { filters: FilterRequest[]; charts: ChartRequest[]; active: boolean } => {
@@ -234,18 +235,18 @@ export class Charts extends Subscriber {
         datasetLength: number,
         range?: IRange,
     ): {
-        values(): Promise<IValuesMap>;
+        values(): Promise<ResultSearchValues>;
         matches(): Promise<ISearchMap>;
     } {
         return {
-            values: (): Promise<IValuesMap> => {
+            values: (): Promise<ResultSearchValues> => {
                 return Requests.IpcRequest.send(
                     Requests.Values.Frame.Response,
                     new Requests.Values.Frame.Request({
                         session: this.uuid,
                         width: datasetLength,
-                        from: range !== undefined ? range.from : undefined,
-                        to: range !== undefined ? range.to : undefined,
+                        from: range !== undefined ? range.start : undefined,
+                        to: range !== undefined ? range.end : undefined,
                     }),
                 ).then((response) => {
                     if (typeof response.error === 'string' && response.error.trim().length > 0) {
@@ -262,8 +263,8 @@ export class Charts extends Subscriber {
                     new Requests.Search.Map.Request({
                         session: this.uuid,
                         len: datasetLength,
-                        from: range ? range.from : undefined,
-                        to: range ? range.to : undefined,
+                        from: range ? range.start : undefined,
+                        to: range ? range.end : undefined,
                     }),
                 ).then((response) => {
                     return response.map;
@@ -282,7 +283,7 @@ export class Charts extends Subscriber {
                 if (event.session !== this.uuid) {
                     return;
                 }
-                this.peaks = event.map === null ? {} : event.map;
+                this.peaks = event.map === null ? new Map() : event.map;
                 this.subjects.get().peaks.emit(this.peaks);
                 this.reload().both();
             }),
@@ -328,7 +329,7 @@ export class Charts extends Subscriber {
         this.unsubscribe();
     }
 
-    public getPeaks(): IValuesMinMaxMap {
+    public getPeaks(): Map<number, [number, number]> {
         return this.peaks;
     }
 
diff --git a/application/client/src/app/service/session/dependencies/comments/index.ts b/application/client/src/app/service/session/dependencies/comments/index.ts
index 7b6a8aa6eb..957134c862 100644
--- a/application/client/src/app/service/session/dependencies/comments/index.ts
+++ b/application/client/src/app/service/session/dependencies/comments/index.ts
@@ -213,8 +213,8 @@ export class Comments extends Subscriber {
         const stored = remember();
         const origin = (
             await this.session.stream.grab([
-                { from: selection.rows.start, to: selection.rows.start },
-                { from: selection.rows.end, to: selection.rows.end },
+                { start: selection.rows.start, end: selection.rows.start },
+                { start: selection.rows.end, end: selection.rows.end },
             ])
         ).map((el) => {
             el.content = safeEscapeAnsi(el.content);
diff --git a/application/client/src/app/service/session/dependencies/indexed.ts b/application/client/src/app/service/session/dependencies/indexed.ts
index 11f38568e8..883e45101d 100644
--- a/application/client/src/app/service/session/dependencies/indexed.ts
+++ b/application/client/src/app/service/session/dependencies/indexed.ts
@@ -1,8 +1,9 @@
 import { SetupLogger, LoggerInterface } from '@platform/entity/logger';
 import { Subscriber, Subjects, Subject } from '@platform/env/subscription';
 import { cutUuid } from '@log/index';
-import { IGrabbedElement, IndexingMode } from '@platform/types/content';
+import { IndexingMode } from '@platform/types/content';
 import { Range, IRange } from '@platform/types/range';
+import { GrabbedElement } from '@platform/types/bindings/miscellaneous';
 
 import * as Requests from '@platform/ipc/request';
 import * as Events from '@platform/ipc/event';
@@ -84,7 +85,7 @@ export class Indexed extends Subscriber {
         };
     }
 
-    public grab(range: Range | IRange): Promise<IGrabbedElement[]> {
+    public grab(range: Range | IRange): Promise<GrabbedElement[]> {
         if (this._len === 0) {
             // TODO: Grabber is crash session in this case... should be prevented on grabber level
             return Promise.resolve([]);
@@ -93,15 +94,15 @@ export class Indexed extends Subscriber {
             Requests.Stream.Indexed.Response,
             new Requests.Stream.Indexed.Request({
                 session: this._uuid,
-                from: range.from,
-                to: range.to,
+                from: range.start,
+                to: range.end,
             }),
         )
             .then((response: Requests.Stream.Indexed.Response) => {
                 return response.rows;
             })
             .catch((error: Error) => {
-                if (range.to >= this.len()) {
+                if (range.end >= this.len()) {
                     // It might be, during request search map has been changed already
                     // For example we requested range, but right after it, a new search
                     // was created and length of stream became 0
diff --git a/application/client/src/app/service/session/dependencies/search.ts b/application/client/src/app/service/session/dependencies/search.ts
index b00a880381..cfed010be8 100644
--- a/application/client/src/app/service/session/dependencies/search.ts
+++ b/application/client/src/app/service/session/dependencies/search.ts
@@ -1,6 +1,6 @@
 import { SetupLogger, LoggerInterface } from '@platform/entity/logger';
 import { Subscriber, Subjects, Subject } from '@platform/env/subscription';
-import { ISearchMap, INearest } from '@platform/types/filter';
+import { ISearchMap } from '@platform/types/filter';
 import { cutUuid } from '@log/index';
 import { IFilter, ISearchUpdated } from '@platform/types/filter';
 import { IRange } from '@platform/types/range';
@@ -8,6 +8,7 @@ import { FilterRequest, FiltersStore } from './search/filters/store';
 import { DisableStore } from './search/disabled/store';
 import { ChartsStore } from './search/charts/store';
 import { State } from './search/state';
+import { NearestPosition } from '@platform/types/bindings';
 
 import * as Requests from '@platform/ipc/request';
 import * as Events from '@platform/ipc/event';
@@ -136,8 +137,8 @@ export class Search extends Subscriber {
                 new Requests.Search.Map.Request({
                     session: this._uuid,
                     len,
-                    from: range ? range.from : undefined,
-                    to: range ? range.to : undefined,
+                    from: range ? range.start : undefined,
+                    to: range ? range.end : undefined,
                 }),
             )
                 .then((response) => {
@@ -162,7 +163,7 @@ export class Search extends Subscriber {
         });
     }
 
-    public nearest(stream: number): Promise<INearest | undefined> {
+    public nearest(stream: number): Promise<NearestPosition | undefined> {
         return new Promise((resolve) => {
             Requests.IpcRequest.send(
                 Requests.Search.Nearest.Response,
diff --git a/application/client/src/app/service/session/dependencies/stream.ts b/application/client/src/app/service/session/dependencies/stream.ts
index 22ab3f4846..0408bb0eae 100644
--- a/application/client/src/app/service/session/dependencies/stream.ts
+++ b/application/client/src/app/service/session/dependencies/stream.ts
@@ -3,7 +3,7 @@ import { Subscriber, Subjects, Subject } from '@platform/env/subscription';
 import { Range, IRange } from '@platform/types/range';
 import { cutUuid } from '@log/index';
 import { Rank } from './rank';
-import { IGrabbedElement } from '@platform/types/content';
+import { GrabbedElement } from '@platform/types/bindings/miscellaneous';
 import { Observe } from '@platform/types/observe';
 import { ObserveOperation } from './observing/operation';
 import { ObserveSource } from './observing/source';
@@ -289,7 +289,7 @@ export class Stream extends Subscriber {
         };
     }
 
-    public chunk(range: Range): Promise<IGrabbedElement[]> {
+    public chunk(range: Range): Promise<GrabbedElement[]> {
         if (this._len === 0) {
             // TODO: Grabber is crash session in this case... should be prevented on grabber level
             return Promise.resolve([]);
@@ -299,8 +299,8 @@ export class Stream extends Subscriber {
                 Requests.Stream.Chunk.Response,
                 new Requests.Stream.Chunk.Request({
                     session: this._uuid,
-                    from: range.from,
-                    to: range.to,
+                    from: range.start,
+                    to: range.end,
                 }),
             )
                 .then((response: Requests.Stream.Chunk.Response) => {
@@ -312,7 +312,7 @@ export class Stream extends Subscriber {
         });
     }
 
-    public grab(ranges: IRange[]): Promise<IGrabbedElement[]> {
+    public grab(ranges: IRange[]): Promise<GrabbedElement[]> {
         if (this._len === 0) {
             // TODO: Grabber is crash session in this case... should be prevented on grabber level
             return Promise.resolve([]);
diff --git a/application/client/src/app/ui/elements/navigator/providers/provider.files.ts b/application/client/src/app/ui/elements/navigator/providers/provider.files.ts
index c4220c8bfc..494d7b8f9e 100644
--- a/application/client/src/app/ui/elements/navigator/providers/provider.files.ts
+++ b/application/client/src/app/ui/elements/navigator/providers/provider.files.ts
@@ -2,8 +2,9 @@ import { IFileDescription } from './entity';
 import { Provider as Base, INoContentActions, IStatistics } from './provider';
 import { favorites } from '@service/favorites';
 import { bridge } from '@service/bridge';
-import { EntityType, getFileName } from '@platform/types/files';
+import { getFileName } from '@platform/types/files';
 import { notifications, Notification } from '@ui/service/notifications';
+import { FolderEntityType } from '@platform/types/bindings';
 
 import * as Factory from '@platform/types/observe/factory';
 import { IMenuItem } from '@ui/service/contextmenu';
@@ -46,7 +47,7 @@ export class Provider extends Base<IFileDescription> {
             if (this.isAborted()) {
                 return;
             }
-            if (entity.type === EntityType.File && entity.details !== undefined) {
+            if (entity.kind === FolderEntityType.File && entity.details) {
                 items.push({
                     filename: entity.fullname,
                     name: entity.name,
@@ -138,11 +139,13 @@ export class Provider extends Base<IFileDescription> {
                     });
             },
             auto: (): void => {
-                bridge.files().getByPath([item.filename])
+                bridge
+                    .files()
+                    .getByPath([item.filename])
                     .then((files) => {
                         if (files.length > 1) {
-                            this.ilc.log().info("More than one file detected");
-                            return
+                            this.ilc.log().info('More than one file detected');
+                            return;
                         }
                         const filetype = files[0].type;
                         if (filetype === Factory.FileType.Text) {
@@ -150,10 +153,16 @@ export class Provider extends Base<IFileDescription> {
                                 .ilc()
                                 .services.system.session.initialize()
                                 .observe(
-                                    new Factory.File().asText().type(filetype).file(item.filename).get(),
+                                    new Factory.File()
+                                        .asText()
+                                        .type(filetype)
+                                        .file(item.filename)
+                                        .get(),
                                 )
                                 .catch((err: Error) => {
-                                    this.ilc.log().error(`Fail to open text file; error: ${err.message}`);
+                                    this.ilc
+                                        .log()
+                                        .error(`Fail to open text file; error: ${err.message}`);
                                 });
                         } else {
                             this.ilc
@@ -167,14 +176,15 @@ export class Provider extends Base<IFileDescription> {
                                         .get(),
                                 )
                                 .catch((err: Error) => {
-                                    this.ilc.log().error(`Fail to open text file; error: ${err.message}`);
+                                    this.ilc
+                                        .log()
+                                        .error(`Fail to open text file; error: ${err.message}`);
                                 });
                         }
                     })
                     .catch((error) => {
                         this.ilc.log().error(error);
                     });
-
             },
         };
     }
diff --git a/application/client/src/app/ui/elements/scrollarea/component.ts b/application/client/src/app/ui/elements/scrollarea/component.ts
index 3dee38653e..4dd06fa934 100644
--- a/application/client/src/app/ui/elements/scrollarea/component.ts
+++ b/application/client/src/app/ui/elements/scrollarea/component.ts
@@ -178,7 +178,7 @@ export class ScrollAreaComponent extends ChangesDetector implements OnDestroy, A
     }
 
     public getFrameStart(): number {
-        return this.frame.get().from;
+        return this.frame.get().start;
     }
 
     public isSourceSwitched(i: number): boolean {
diff --git a/application/client/src/app/ui/elements/scrollarea/controllers/range.ts b/application/client/src/app/ui/elements/scrollarea/controllers/range.ts
index 7fdaf60bbf..f85926e92f 100644
--- a/application/client/src/app/ui/elements/scrollarea/controllers/range.ts
+++ b/application/client/src/app/ui/elements/scrollarea/controllers/range.ts
@@ -19,9 +19,9 @@ export class Range {
                 .max(true)
                 .$(defaults.len)
                 .len()
-                .$(defaults.range.from)
+                .$(defaults.range.start)
                 .from()
-                .$(defaults.range.to)
+                .$(defaults.range.end)
                 .to();
         }
     }
@@ -90,7 +90,7 @@ export class Range {
     }
 
     public equal(range: IRange): boolean {
-        return this.range.from === range.from && this.range.to === range.to;
+        return this.range.from === range.start && this.range.to === range.end;
     }
 
     public onChange(handler: (ci: ChangesInitiator) => void): Subscription {
@@ -98,6 +98,6 @@ export class Range {
     }
 
     public hash(): string {
-        return `${this.getTotal()}:${this.get().from}-${this.get().to}`;
+        return `${this.getTotal()}:${this.get().start}-${this.get().end}`;
     }
 }
diff --git a/application/client/src/app/ui/elements/scrollarea/controllers/selection.ts b/application/client/src/app/ui/elements/scrollarea/controllers/selection.ts
index afaf63e47e..653c7750bb 100644
--- a/application/client/src/app/ui/elements/scrollarea/controllers/selection.ts
+++ b/application/client/src/app/ui/elements/scrollarea/controllers/selection.ts
@@ -127,8 +127,8 @@ export class Selecting {
             return;
         }
         if (
-            (focus.row < frame.from && anchor.row < frame.from) ||
-            (focus.row > frame.to && anchor.row > frame.to)
+            (focus.row < frame.start && anchor.row < frame.start) ||
+            (focus.row > frame.end && anchor.row > frame.end)
         ) {
             if (selection !== null) {
                 selection.removeAllRanges();
@@ -146,19 +146,19 @@ export class Selecting {
             focusPath = focus.path;
         } else if (focus.row > anchor.row) {
             // Direction: down
-            anchorOffset = anchor.row < frame.from ? 0 : anchor.offset;
-            focusOffset = focus.row > frame.to ? Infinity : focus.offset;
+            anchorOffset = anchor.row < frame.start ? 0 : anchor.offset;
+            focusOffset = focus.row > frame.end ? Infinity : focus.offset;
             anchorPath =
-                anchor.row < frame.from ? `li[${ROW_INDEX_ATTR}="${frame.from}"]` : anchor.path;
-            focusPath = focus.row > frame.to ? `li[${ROW_INDEX_ATTR}="${frame.to}"]` : focus.path;
+                anchor.row < frame.start ? `li[${ROW_INDEX_ATTR}="${frame.start}"]` : anchor.path;
+            focusPath = focus.row > frame.end ? `li[${ROW_INDEX_ATTR}="${frame.end}"]` : focus.path;
         } else if (focus.row < anchor.row) {
             // Direction: up
-            anchorOffset = anchor.row > frame.to ? Infinity : anchor.offset;
-            focusOffset = focus.row < frame.from ? 0 : focus.offset;
+            anchorOffset = anchor.row > frame.end ? Infinity : anchor.offset;
+            focusOffset = focus.row < frame.start ? 0 : focus.offset;
             anchorPath =
-                anchor.row > frame.to ? `li[${ROW_INDEX_ATTR}="${frame.to}"]` : anchor.path;
+                anchor.row > frame.end ? `li[${ROW_INDEX_ATTR}="${frame.end}"]` : anchor.path;
             focusPath =
-                focus.row < frame.from ? `li[${ROW_INDEX_ATTR}="${frame.from}"]` : focus.path;
+                focus.row < frame.start ? `li[${ROW_INDEX_ATTR}="${frame.start}"]` : focus.path;
         }
         if (selection === null) {
             return;
@@ -237,11 +237,11 @@ export class Selecting {
                 switch (this._directed.direction) {
                     case SelectionDirection.Top:
                         this._frame.offsetToByRows(-1, ChangesInitiator.Selecting);
-                        this._selection.focus.setToRow(this._frame.get().from);
+                        this._selection.focus.setToRow(this._frame.get().start);
                         break;
                     case SelectionDirection.Bottom:
                         this._frame.offsetToByRows(1, ChangesInitiator.Selecting);
-                        this._selection.focus.setToRow(this._frame.get().to);
+                        this._selection.focus.setToRow(this._frame.get().end);
                         break;
                 }
                 this._holder.focus();
@@ -346,7 +346,7 @@ export class Selecting {
             return Promise.resolve();
         }
         const rows = (
-            await this._service.getRows({ from: selection.rows.start, to: selection.rows.end })
+            await this._service.getRows({ start: selection.rows.start, end: selection.rows.end })
         ).rows.map((r) => {
             if (this._delimiter === undefined) {
                 const escaped = escapeAnsi(r.content);
diff --git a/application/client/src/app/ui/elements/scrollarea/controllers/service.ts b/application/client/src/app/ui/elements/scrollarea/controllers/service.ts
index bb474b0013..cb454b41eb 100644
--- a/application/client/src/app/ui/elements/scrollarea/controllers/service.ts
+++ b/application/client/src/app/ui/elements/scrollarea/controllers/service.ts
@@ -49,7 +49,7 @@ export class Service implements Destroy {
     }) {
         this.setFrame = (range: SafeRange) => {
             api.setFrame(range);
-            this._cursor = range.from;
+            this._cursor = range.start;
         };
         this.getLen = api.getLen;
         this.getItemHeight = api.getItemHeight;
diff --git a/application/client/src/app/ui/elements/scrollarea/vertical/component.ts b/application/client/src/app/ui/elements/scrollarea/vertical/component.ts
index 0189cde372..804a33d93b 100644
--- a/application/client/src/app/ui/elements/scrollarea/vertical/component.ts
+++ b/application/client/src/app/ui/elements/scrollarea/vertical/component.ts
@@ -99,7 +99,7 @@ export class ScrollAreaVerticalComponent
                     return;
                 }
                 this.detectChanges();
-                const position = event.range.from / this._count;
+                const position = event.range.start / this._count;
                 this.elRef.nativeElement.scrollTop =
                     this.elRef.nativeElement.scrollHeight * position;
             }),
diff --git a/application/client/src/app/ui/elements/tree/entity.ts b/application/client/src/app/ui/elements/tree/entity.ts
index dc0d56739c..9499f44cc8 100644
--- a/application/client/src/app/ui/elements/tree/entity.ts
+++ b/application/client/src/app/ui/elements/tree/entity.ts
@@ -1,14 +1,14 @@
-import { Entity as IEntity, EntityType } from '@platform/types/files';
 import { Filter } from '@elements/filter/filter';
 import { getDomSanitizer } from '@ui/env/globals';
 import { SafeHtml } from '@angular/platform-browser';
 import { fromStr, serialize } from '@platform/env/regex';
 import { getFileExtention } from '@platform/types/files';
+import { FolderEntity, FolderEntityType } from '@platform/types/bindings';
 
 const EXTENTION_PATTERN = /^\*\.|^\./gi;
 
 export class Entity {
-    public readonly entity: IEntity;
+    public readonly entity: FolderEntity;
     public readonly parent: string;
     public ext: string | undefined;
     public selected: boolean = false;
@@ -19,7 +19,7 @@ export class Entity {
     protected readonly filter: Filter;
 
     constructor(
-        entity: IEntity,
+        entity: FolderEntity,
         parent: string,
         favourite: boolean,
         exists: boolean,
@@ -30,7 +30,7 @@ export class Entity {
         this.favourite = favourite;
         this.exists = exists;
         this.filter = filter;
-        if (entity.details !== undefined) {
+        if (entity.details) {
             this.ext = entity.details.ext.toUpperCase().replace('.', '');
         }
     }
@@ -40,7 +40,7 @@ export class Entity {
     }
 
     public isFolder(): boolean {
-        return this.entity.type === EntityType.Directory;
+        return this.entity.kind === FolderEntityType.Directory;
     }
 
     public getName(): string {
diff --git a/application/client/src/app/ui/elements/tree/scheme.ts b/application/client/src/app/ui/elements/tree/scheme.ts
index 31867bc63c..53a59b8675 100644
--- a/application/client/src/app/ui/elements/tree/scheme.ts
+++ b/application/client/src/app/ui/elements/tree/scheme.ts
@@ -3,12 +3,12 @@ import { FlatTreeControl } from '@angular/cdk/tree';
 import { BehaviorSubject, merge, Observable } from 'rxjs';
 import { map } from 'rxjs/operators';
 import { Entity } from './entity';
-import { EntityType } from '@platform/types/files';
 import { Services } from '@service/ilc/services';
 import { Filter } from '@elements/filter/filter';
 import { FavoritePlace } from '@service/favorites';
 import { IlcInterface } from '@service/ilc';
 import { ChangesDetector } from '@ui/env/extentions/changes';
+import { FolderEntityType } from '@platform/types/bindings';
 
 export { Entity };
 
@@ -74,8 +74,8 @@ export class DynamicDatabase {
                         {
                             name: root.path,
                             fullname: root.path,
-                            type: EntityType.Directory,
-                            details: undefined,
+                            kind: FolderEntityType.Directory,
+                            details: null,
                         },
                         '',
                         true,
diff --git a/application/client/src/app/ui/tabs/observe/origin/stream/transport/setup/bases/process/component.ts b/application/client/src/app/ui/tabs/observe/origin/stream/transport/setup/bases/process/component.ts
index ac8ba32e4f..7360e39cc8 100644
--- a/application/client/src/app/ui/tabs/observe/origin/stream/transport/setup/bases/process/component.ts
+++ b/application/client/src/app/ui/tabs/observe/origin/stream/transport/setup/bases/process/component.ts
@@ -16,7 +16,7 @@ import {
 import { Ilc, IlcInterface } from '@env/decorators/component';
 import { State } from '../../states/process';
 import { components } from '@env/decorators/initial';
-import { ShellProfile } from '@platform/types/shells';
+import { Profile } from '@platform/types/bindings';
 import { Action } from '@ui/tabs/observe/action';
 import { Session } from '@service/session';
 
@@ -196,7 +196,7 @@ export class SetupBase
         });
     }
 
-    public importEnvVars(profile: ShellProfile | undefined) {
+    public importEnvVars(profile: Profile | undefined) {
         this.state.importEnvvarsFromShell(profile).catch((err: Error) => {
             this.log().error(`Fail to save selected profile: ${err.message}`);
         });
diff --git a/application/client/src/app/ui/tabs/observe/origin/stream/transport/setup/complete/process/component.ts b/application/client/src/app/ui/tabs/observe/origin/stream/transport/setup/complete/process/component.ts
index eed0bde450..d35ed8ea55 100644
--- a/application/client/src/app/ui/tabs/observe/origin/stream/transport/setup/complete/process/component.ts
+++ b/application/client/src/app/ui/tabs/observe/origin/stream/transport/setup/complete/process/component.ts
@@ -13,6 +13,7 @@ import { Options as FoldersOptions } from '@elements/folderinput/component';
 import { Subject } from '@platform/env/subscription';
 import { CmdErrorState } from '../../bases/process/error';
 import { SetupBase } from '../../bases/process/component';
+import { Profile } from '@platform/types/bindings';
 
 @Component({
     selector: 'app-transport-process',
@@ -60,5 +61,9 @@ export class Setup extends SetupBase implements AfterContentInit, AfterViewInit,
             );
         super.ngAfterContentInit();
     }
+
+    public getEnvvarsCount(profile: Profile) {
+        return profile.envvars ? profile.envvars.size : 0;
+    }
 }
 export interface Setup extends IlcInterface {}
diff --git a/application/client/src/app/ui/tabs/observe/origin/stream/transport/setup/complete/process/template.html b/application/client/src/app/ui/tabs/observe/origin/stream/transport/setup/complete/process/template.html
index f284601251..1c2757cde8 100644
--- a/application/client/src/app/ui/tabs/observe/origin/stream/transport/setup/complete/process/template.html
+++ b/application/client/src/app/ui/tabs/observe/origin/stream/transport/setup/complete/process/template.html
@@ -1,8 +1,11 @@
 <div class="command">
-    <app-autocomplete-input #cmd [options]="inputs.cmd"
+    <app-autocomplete-input
+        #cmd
+        [options]="inputs.cmd"
         (edit)="edit('cmd', $event)"
         (enter)="enter('cmd')"
-        (panel)="panel()"></app-autocomplete-input>
+        (panel)="panel()"
+    ></app-autocomplete-input>
     <button tabindex="-1" class="flat-codicon-button" [matMenuTriggerFor]="menu">
         <ng-container *ngIf="state.isProfilesLoaded()">
             <span class="codicon codicon-terminal"></span>
@@ -14,19 +17,29 @@
         </ng-container>
     </button>
 </div>
-<app-folderinput-input #cwd [options]="inputs.cwd"
+<app-folderinput-input
+    #cwd
+    [options]="inputs.cwd"
     (edit)="edit('cwd', $event)"
     (enter)="enter('cwd')"
-    (panel)="panel()"></app-folderinput-input>
+    (panel)="panel()"
+></app-folderinput-input>
 <mat-menu #menu="matMenu" class="app-transport-process-menu">
     <button mat-menu-item (click)="importEnvVars(undefined)">Use default environment</button>
     <mat-divider></mat-divider>
     <ng-container *ngIf="state.isProfilesLoaded()">
         <p class="material-menu-label">Import variables from:</p>
-        <button *ngFor="let profile of state.profiles.valid" [attr.data-selected]="state.isShellSelected(profile)" mat-menu-item (click)="importEnvVars(profile)">
+        <button
+            *ngFor="let profile of state.profiles.valid"
+            [attr.data-selected]="state.isShellSelected(profile)"
+            mat-menu-item
+            (click)="importEnvVars(profile)"
+        >
             <div class="shell-profile">
                 <span class="shell-name">{{profile.name}}</span>
-                <span class="envvars-count">{{(' (has ' + profile.getEnvvarsCount() + ' vars)')}}</span>
+                <span class="envvars-count"
+                    >{{(' (has ' + getEnvvarsCount(profile) + ' vars)')}}</span
+                >
                 <span class="shell-path">{{profile.path}}</span>
             </div>
         </button>
@@ -37,4 +50,4 @@
     </ng-container>
     <mat-divider></mat-divider>
     <button mat-menu-item (click)="showEnvVars()">Show current variables</button>
-</mat-menu>
\ No newline at end of file
+</mat-menu>
diff --git a/application/client/src/app/ui/tabs/observe/origin/stream/transport/setup/quick/process/component.ts b/application/client/src/app/ui/tabs/observe/origin/stream/transport/setup/quick/process/component.ts
index 3570507b63..386a892af2 100644
--- a/application/client/src/app/ui/tabs/observe/origin/stream/transport/setup/quick/process/component.ts
+++ b/application/client/src/app/ui/tabs/observe/origin/stream/transport/setup/quick/process/component.ts
@@ -11,6 +11,7 @@ import { Options as FoldersOptions } from '@elements/folderinput/component';
 import { Subject } from '@platform/env/subscription';
 import { CmdErrorState } from '../../bases/process/error';
 import { SetupBase } from '../../bases/process/component';
+import { Profile } from '@platform/types/bindings';
 
 @Component({
     selector: 'app-process-quicksetup',
@@ -46,6 +47,10 @@ export class QuickSetup extends SetupBase implements AfterContentInit, OnDestroy
         this.setInputs(this.inputs);
     }
 
+    public getEnvvarsCount(profile: Profile) {
+        return profile.envvars ? profile.envvars.size : 0;
+    }
+
     public destroy(): Promise<void> {
         return Promise.resolve();
     }
diff --git a/application/client/src/app/ui/tabs/observe/origin/stream/transport/setup/quick/process/template.html b/application/client/src/app/ui/tabs/observe/origin/stream/transport/setup/quick/process/template.html
index f284601251..1c2757cde8 100644
--- a/application/client/src/app/ui/tabs/observe/origin/stream/transport/setup/quick/process/template.html
+++ b/application/client/src/app/ui/tabs/observe/origin/stream/transport/setup/quick/process/template.html
@@ -1,8 +1,11 @@
 <div class="command">
-    <app-autocomplete-input #cmd [options]="inputs.cmd"
+    <app-autocomplete-input
+        #cmd
+        [options]="inputs.cmd"
         (edit)="edit('cmd', $event)"
         (enter)="enter('cmd')"
-        (panel)="panel()"></app-autocomplete-input>
+        (panel)="panel()"
+    ></app-autocomplete-input>
     <button tabindex="-1" class="flat-codicon-button" [matMenuTriggerFor]="menu">
         <ng-container *ngIf="state.isProfilesLoaded()">
             <span class="codicon codicon-terminal"></span>
@@ -14,19 +17,29 @@
         </ng-container>
     </button>
 </div>
-<app-folderinput-input #cwd [options]="inputs.cwd"
+<app-folderinput-input
+    #cwd
+    [options]="inputs.cwd"
     (edit)="edit('cwd', $event)"
     (enter)="enter('cwd')"
-    (panel)="panel()"></app-folderinput-input>
+    (panel)="panel()"
+></app-folderinput-input>
 <mat-menu #menu="matMenu" class="app-transport-process-menu">
     <button mat-menu-item (click)="importEnvVars(undefined)">Use default environment</button>
     <mat-divider></mat-divider>
     <ng-container *ngIf="state.isProfilesLoaded()">
         <p class="material-menu-label">Import variables from:</p>
-        <button *ngFor="let profile of state.profiles.valid" [attr.data-selected]="state.isShellSelected(profile)" mat-menu-item (click)="importEnvVars(profile)">
+        <button
+            *ngFor="let profile of state.profiles.valid"
+            [attr.data-selected]="state.isShellSelected(profile)"
+            mat-menu-item
+            (click)="importEnvVars(profile)"
+        >
             <div class="shell-profile">
                 <span class="shell-name">{{profile.name}}</span>
-                <span class="envvars-count">{{(' (has ' + profile.getEnvvarsCount() + ' vars)')}}</span>
+                <span class="envvars-count"
+                    >{{(' (has ' + getEnvvarsCount(profile) + ' vars)')}}</span
+                >
                 <span class="shell-path">{{profile.path}}</span>
             </div>
         </button>
@@ -37,4 +50,4 @@
     </ng-container>
     <mat-divider></mat-divider>
     <button mat-menu-item (click)="showEnvVars()">Show current variables</button>
-</mat-menu>
\ No newline at end of file
+</mat-menu>
diff --git a/application/client/src/app/ui/tabs/observe/origin/stream/transport/setup/states/process.ts b/application/client/src/app/ui/tabs/observe/origin/stream/transport/setup/states/process.ts
index 16c9ca27b8..c7380b1d99 100644
--- a/application/client/src/app/ui/tabs/observe/origin/stream/transport/setup/states/process.ts
+++ b/application/client/src/app/ui/tabs/observe/origin/stream/transport/setup/states/process.ts
@@ -1,4 +1,4 @@
-import { ShellProfile } from '@platform/types/shells';
+import { Profile } from '@platform/types/bindings';
 import { bridge } from '@service/bridge';
 import { Destroy } from '@platform/types/env/types';
 import { Action } from '../../../../../action';
@@ -11,15 +11,15 @@ const ENTRY_KEY = 'selected_profile_path';
 
 export class State implements Destroy {
     public profiles: {
-        all: ShellProfile[] | undefined;
-        valid: ShellProfile[] | undefined;
+        all: Profile[] | undefined;
+        valid: Profile[] | undefined;
     } = {
         all: undefined,
         valid: undefined,
     };
     // No context envvars
     public envvars: Map<string, string> = new Map();
-    public current: ShellProfile | undefined;
+    public current: Profile | undefined;
 
     constructor(
         public readonly action: Action,
@@ -30,8 +30,8 @@ export class State implements Destroy {
         // Having method "destroy()" is requirement of session's storage
     }
 
-    public setProfiles(profiles: ShellProfile[]): Promise<void> {
-        const valid: ShellProfile[] = [];
+    public setProfiles(profiles: Profile[]): Promise<void> {
+        const valid: Profile[] = [];
         profiles.forEach((profile) => {
             valid.find((p) => p.path === profile.path) === undefined &&
                 profile.envvars !== undefined &&
@@ -53,7 +53,7 @@ export class State implements Destroy {
         return this.profiles.all !== undefined;
     }
 
-    public importEnvvarsFromShell(profile: ShellProfile | undefined): Promise<void> {
+    public importEnvvarsFromShell(profile: Profile | undefined): Promise<void> {
         if (profile === undefined) {
             this.current = undefined;
             this.configuration.configuration.envs = obj.mapToObj(this.envvars);
@@ -72,7 +72,7 @@ export class State implements Destroy {
         return obj.objToStringMap(this.configuration.configuration.envs);
     }
 
-    public isShellSelected(profile: ShellProfile): boolean {
+    public isShellSelected(profile: Profile): boolean {
         return this.current ? profile.path === this.current.path : false;
     }
 
diff --git a/application/client/src/app/ui/tabs/observe/parsers/extra/dlt/state.ts b/application/client/src/app/ui/tabs/observe/parsers/extra/dlt/state.ts
index c5f467566a..142485a7e0 100644
--- a/application/client/src/app/ui/tabs/observe/parsers/extra/dlt/state.ts
+++ b/application/client/src/app/ui/tabs/observe/parsers/extra/dlt/state.ts
@@ -3,6 +3,7 @@ import { Section } from './structure/section';
 import { Summary } from './summary';
 import { StatEntity } from './structure/statentity';
 import { getTypedProp } from '@platform/env/obj';
+import { DltStatisticInfo, DltLevelDistribution } from '@platform/types/bindings';
 
 import * as Dlt from '@platform/types/observe/parser/dlt';
 
@@ -19,7 +20,7 @@ export const NAMES: { [key: string]: string } = {
 };
 export class State extends Base {
     public structure: Section[] = [];
-    public stat: Dlt.StatisticInfo | undefined;
+    public stat: DltStatisticInfo | undefined;
     public summary: {
         total: Summary;
         selected: Summary;
@@ -101,9 +102,9 @@ export class State extends Base {
                 const stat = this.stat;
                 const structure: Section[] = [];
                 ['app_ids', 'context_ids', 'ecu_ids'].forEach((key: string) => {
-                    const content: Array<[string, Dlt.LevelDistribution]> = getTypedProp<
-                        Dlt.StatisticInfo,
-                        Array<[string, Dlt.LevelDistribution]>
+                    const content: Array<[string, DltLevelDistribution]> = getTypedProp<
+                        DltStatisticInfo,
+                        Array<[string, DltLevelDistribution]>
                     >(stat, key);
                     const entities: StatEntity[] = content.map((record) => {
                         const entity = new StatEntity(record[0], key, record[1], this.matcher);
diff --git a/application/client/src/app/ui/tabs/observe/parsers/extra/dlt/structure/statentity.ts b/application/client/src/app/ui/tabs/observe/parsers/extra/dlt/structure/statentity.ts
index f7a375ef0c..fb2961ffc2 100644
--- a/application/client/src/app/ui/tabs/observe/parsers/extra/dlt/structure/statentity.ts
+++ b/application/client/src/app/ui/tabs/observe/parsers/extra/dlt/structure/statentity.ts
@@ -1,4 +1,4 @@
-import { LevelDistribution } from '@platform/types/observe/parser/dlt';
+import { DltLevelDistribution } from '@platform/types/bindings';
 import { Matchee } from '@module/matcher';
 
 import * as wasm from '@loader/wasm';
@@ -16,7 +16,7 @@ export class StatEntity extends Matchee {
     public log_verbose: number;
     public log_invalid: number;
 
-    constructor(id: string, parent: string, from: LevelDistribution, matcher: wasm.Matcher) {
+    constructor(id: string, parent: string, from: DltLevelDistribution, matcher: wasm.Matcher) {
         super(matcher, { id: id });
         this.id = id;
         this.parent = parent;
diff --git a/application/client/src/app/ui/views/sidebar/search/filters/provider.ts b/application/client/src/app/ui/views/sidebar/search/filters/provider.ts
index 26fbb5a0ec..265a72caec 100644
--- a/application/client/src/app/ui/views/sidebar/search/filters/provider.ts
+++ b/application/client/src/app/ui/views/sidebar/search/filters/provider.ts
@@ -27,10 +27,11 @@ export class ProviderFilters extends Provider<FilterRequest> {
             this.session.search.subjects.get().updated.subscribe((event) => {
                 this._entities.forEach((entity) => {
                     const alias = entity.extract().alias();
+                    const stat = event.stat.get(alias);
                     entity
                         .extract()
                         .set()
-                        .found(event.stat[alias] === undefined ? 0 : event.stat[alias]);
+                        .found(stat === undefined ? 0 : stat);
                 });
             }),
         );
diff --git a/application/client/src/app/ui/views/toolbar/chart/cursor.ts b/application/client/src/app/ui/views/toolbar/chart/cursor.ts
index 8622d0d19f..1f00a7917d 100644
--- a/application/client/src/app/ui/views/toolbar/chart/cursor.ts
+++ b/application/client/src/app/ui/views/toolbar/chart/cursor.ts
@@ -32,7 +32,7 @@ export class Cursor {
         if (!this.visible) {
             return undefined;
         }
-        return { from: this.from, to: this.to };
+        return { start: this.from, end: this.to };
     }
 
     public setStreamLen(len: number): void {
@@ -128,11 +128,11 @@ export class Cursor {
     public rowsRangeByX(x: number): IRange {
         const frame = this.to - this.from;
         const rate = this.width / frame;
-        const from = Math.floor(x / rate) + this.from;
+        const start = Math.floor(x / rate) + this.from;
         if (rate < 1) {
-            return { from, to: from + Math.floor(frame / this.width) };
+            return { start, end: start + Math.floor(frame / this.width) };
         } else {
-            return { from, to: from };
+            return { start, end: start };
         }
     }
 
diff --git a/application/client/src/app/ui/views/toolbar/chart/output/component.ts b/application/client/src/app/ui/views/toolbar/chart/output/component.ts
index f7560644ec..30d6057eb8 100644
--- a/application/client/src/app/ui/views/toolbar/chart/output/component.ts
+++ b/application/client/src/app/ui/views/toolbar/chart/output/component.ts
@@ -47,7 +47,7 @@ export class ViewChartOutput extends OutputBase implements AfterViewInit {
         const labels: Label[] = this.renders.charts.coors.get(event.offsetX);
         if (labels.length === 0) {
             this.session.cursor.select(
-                this.state.cursor.rowsRangeByX(event.offsetX).from,
+                this.state.cursor.rowsRangeByX(event.offsetX).start,
                 Owner.Chart,
                 undefined,
                 undefined,
diff --git a/application/client/src/app/ui/views/toolbar/chart/output/template.html b/application/client/src/app/ui/views/toolbar/chart/output/template.html
index c8ee248bac..405056281b 100644
--- a/application/client/src/app/ui/views/toolbar/chart/output/template.html
+++ b/application/client/src/app/ui/views/toolbar/chart/output/template.html
@@ -1,10 +1,10 @@
 <canvas #canvas [attr.width]="rect.width" [attr.height]="rect.height"></canvas>
 <span class="pointer" *ngIf="x !== undefined" [ngStyle]="{'left': x}">
-    <span class="label range" *ngIf="((range !== undefined) && (range.from !== range.to))"
-        >{{range.from}} - {{range.to}}</span
+    <span class="label range" *ngIf="((range !== undefined) && (range.start !== range.end))"
+        >{{range.start}} - {{range.end}}</span
     >
-    <span class="label range" *ngIf="((range !== undefined) && (range.from === range.to))"
-        >{{range.from}}</span
+    <span class="label range" *ngIf="((range !== undefined) && (range.start === range.end))"
+        >{{range.start}}</span
     >
     <span class="label" [ngStyle]="{'background': label.color}" *ngFor="let label of labels">
         <span class="wrapped" *ngIf="!label.candle">
diff --git a/application/client/src/app/ui/views/toolbar/chart/render/chart.ts b/application/client/src/app/ui/views/toolbar/chart/render/chart.ts
index 8df9f2f942..c468b565bc 100644
--- a/application/client/src/app/ui/views/toolbar/chart/render/chart.ts
+++ b/application/client/src/app/ui/views/toolbar/chart/render/chart.ts
@@ -1,4 +1,4 @@
-import { IValuesMap, IValuesMinMaxMap } from '@platform/types/filter';
+import { ResultSearchValues, Point } from '@platform/types/bindings';
 import { scheme_color_0, scheme_color_5_75, shadeColor } from '@styles/colors';
 import { Base } from './render';
 import { ChartRequest, ChartType } from '@service/session/dependencies/search/charts/request';
@@ -8,8 +8,8 @@ import { IRange } from '@platform/types/range';
 const GRID_LINES_COUNT = 5;
 
 export class Render extends Base {
-    protected values: IValuesMap = {};
-    protected peaks: IValuesMinMaxMap = {};
+    protected values: ResultSearchValues = new Map<number, Point[]>();
+    protected peaks: Map<number, [number, number]> = new Map();
     protected charts: ChartRequest[] = [];
     protected points: boolean = true;
     protected selected: number | undefined;
@@ -18,8 +18,8 @@ export class Render extends Base {
         if (this.selected === undefined) {
             return;
         }
-        const selected = this.values[this.selected];
-        const peaks = this.peaks[this.selected];
+        const selected = this.values.get(this.selected);
+        const peaks = this.peaks.get(this.selected);
         if (selected === undefined || peaks === undefined) {
             return;
         }
@@ -74,12 +74,12 @@ export class Render extends Base {
         return this;
     }
 
-    public setValues(values: IValuesMap): Render {
+    public setValues(values: ResultSearchValues): Render {
         this.values = values;
         return this;
     }
 
-    public setPeaks(peaks: IValuesMinMaxMap): Render {
+    public setPeaks(peaks: Map<number, [number, number]>): Render {
         this.peaks = peaks;
         return this;
     }
@@ -94,20 +94,20 @@ export class Render extends Base {
         if (frame === undefined) {
             return;
         }
-        if (frame.to - frame.from <= 0) {
+        if (frame.end - frame.start <= 0) {
             return;
         }
         this.coors.drop();
         const size = this.size();
-        (Object.keys(this.values) as unknown as number[]).forEach((k: number) => {
-            const peaks = this.peaks[k];
+        this.values.forEach((points: Point[], k: number) => {
+            const peaks = this.peaks.get(k);
             if (peaks === undefined) {
                 console.error(`No peaks for chart #${k}`);
                 return;
             }
             const chart = this.charts[k];
             const type = chart === undefined ? ChartType.Linear : chart.definition.type;
-            const render = this.modes(frame, peaks, this.values[k], size, chart);
+            const render = this.modes(frame, peaks, points, size, chart);
             switch (type) {
                 case ChartType.Linear:
                     render.linear();
@@ -126,7 +126,7 @@ export class Render extends Base {
     protected modes(
         frame: IRange,
         peaks: [number, number],
-        values: [number, number, number, number][],
+        values: Point[],
         size: {
             width: number;
             height: number;
@@ -138,17 +138,17 @@ export class Render extends Base {
         temperature(): void;
     } {
         const rate = {
-            byX: size.width / (frame.to - frame.from),
+            byX: size.width / (frame.end - frame.start),
             byY: size.height / (peaks[1] - peaks[0]),
         };
         return {
             linear: (): void => {
                 this.context.beginPath();
                 const coors: [number, number][] = [];
-                values.forEach((pair: [number, number, number, number], i: number) => {
-                    const position = pair[0];
-                    const value = pair[3];
-                    const x = Math.round((position - frame.from) * rate.byX);
+                values.forEach((point: Point, i: number) => {
+                    const position = point.row;
+                    const value = point.y_value;
+                    const x = Math.round((position - frame.start) * rate.byX);
                     const y = size.height - Math.round((value - peaks[0]) * rate.byY);
                     if (i === 0) {
                         this.context.moveTo(x, y);
@@ -156,7 +156,7 @@ export class Render extends Base {
                         this.context.lineTo(x, y);
                     }
                     coors.push([x, y]);
-                    this.coors.add(x, value, position, pair[1], pair[2], chart);
+                    this.coors.add(x, value, position, point.min, point.max, chart);
                 });
                 const color = chart === undefined ? scheme_color_0 : chart.definition.color;
                 const lineWidth =
@@ -186,10 +186,10 @@ export class Render extends Base {
                 this.context.beginPath();
                 const coors: [number, number][] = [];
                 let prevY = 0;
-                values.forEach((pair: [number, number, number, number], i: number) => {
-                    const position = pair[0];
-                    const value = pair[3];
-                    const x = Math.round((position - frame.from) * rate.byX);
+                values.forEach((point: Point, i: number) => {
+                    const position = point.row;
+                    const value = point.y_value;
+                    const x = Math.round((position - frame.start) * rate.byX);
                     const y = size.height - Math.round((value - peaks[0]) * rate.byY);
                     if (i === 0) {
                         this.context.moveTo(x, y);
@@ -199,7 +199,7 @@ export class Render extends Base {
                     }
                     prevY = y;
                     coors.push([x, y]);
-                    this.coors.add(x, value, position, pair[1], pair[2], chart);
+                    this.coors.add(x, value, position, point.min, point.max, chart);
                 });
                 const color = chart === undefined ? scheme_color_0 : chart.definition.color;
                 const lineWidth =
@@ -230,10 +230,10 @@ export class Render extends Base {
                 const coors: [number, number][] = [];
                 const start = { x: 0, y: 0 };
                 const end = { x: 0, y: 0 };
-                values.forEach((pair: [number, number, number, number], i: number) => {
-                    const position = pair[0];
-                    const value = pair[3];
-                    const x = Math.round((position - frame.from) * rate.byX);
+                values.forEach((point: Point, i: number) => {
+                    const position = point.row;
+                    const value = point.y_value;
+                    const x = Math.round((position - frame.start) * rate.byX);
                     const y = size.height - Math.round((value - peaks[0]) * rate.byY);
                     if (i === 0) {
                         this.context.moveTo(x, y);
@@ -247,7 +247,7 @@ export class Render extends Base {
                         end.y = y;
                     }
                     coors.push([x, y]);
-                    this.coors.add(x, value, position, pair[1], pair[2], chart);
+                    this.coors.add(x, value, position, point.min, point.max, chart);
                 });
                 const color = chart === undefined ? scheme_color_0 : chart.definition.color;
                 const lineWidth =
diff --git a/application/client/src/app/ui/views/toolbar/chart/render/filters.ts b/application/client/src/app/ui/views/toolbar/chart/render/filters.ts
index 456a686bb6..02c36fda79 100644
--- a/application/client/src/app/ui/views/toolbar/chart/render/filters.ts
+++ b/application/client/src/app/ui/views/toolbar/chart/render/filters.ts
@@ -30,7 +30,7 @@ export class Render extends Base {
         if (frame === undefined) {
             return;
         }
-        const frameLength = frame.to - frame.from;
+        const frameLength = frame.end - frame.start;
         if (frameLength <= 0) {
             return;
         }
diff --git a/application/client/src/app/ui/views/toolbar/search/results/backing.ts b/application/client/src/app/ui/views/toolbar/search/results/backing.ts
index 7f51f39489..58c2a53f73 100644
--- a/application/client/src/app/ui/views/toolbar/search/results/backing.ts
+++ b/application/client/src/app/ui/views/toolbar/search/results/backing.ts
@@ -2,18 +2,18 @@ import { Session } from '@service/session';
 import { IRowsPacket, Service } from '@elements/scrollarea/controllers/service';
 import { Range, IRange } from '@platform/types/range';
 import { Row, Owner } from '@schema/content/row';
-import { IGrabbedElement } from '@platform/types/content';
+import { GrabbedElement } from '@platform/types/bindings/miscellaneous';
 
 const SCROLLAREA_SERVICE = 'search_scroll_area_service';
 
 async function getRowFrom(
     session: Session,
-    element: IGrabbedElement,
-    elements: IGrabbedElement[],
+    element: GrabbedElement,
+    elements: GrabbedElement[],
     index: number,
 ): Promise<Row> {
     const row = new Row({
-        position: element.position,
+        position: element.pos,
         content: element.content,
         session: session,
         owner: Owner.Search,
@@ -27,18 +27,18 @@ async function getRowFrom(
         return row;
     }
     if (index > 0 && index < elements.length - 1) {
-        row.nature.hidden = elements[index + 1].position - elements[index - 1].position;
+        row.nature.hidden = elements[index + 1].pos - elements[index - 1].pos;
         return row;
     }
-    const around = await session.indexed.getIndexesAround(element.position);
+    const around = await session.indexed.getIndexesAround(element.pos);
     if (around.before !== undefined && around.after !== undefined) {
         row.nature.hidden = around.after - around.before;
     } else {
         row.nature.hidden =
             around.before !== undefined
-                ? element.position - around.before
+                ? element.pos - around.before
                 : around.after !== undefined
-                ? around.after - element.position
+                ? around.after - element.pos
                 : 0;
     }
     return row;
diff --git a/application/client/src/app/ui/views/workspace/backing.ts b/application/client/src/app/ui/views/workspace/backing.ts
index f2a4bb945e..bfd157faf7 100644
--- a/application/client/src/app/ui/views/workspace/backing.ts
+++ b/application/client/src/app/ui/views/workspace/backing.ts
@@ -12,7 +12,7 @@ function getRows(session: Session, range: Range): Promise<IRowsPacket> {
             .then((rows) => {
                 const converted = rows.map((row) => {
                     return new Row({
-                        position: row.position,
+                        position: row.pos,
                         content: row.content,
                         session,
                         owner: Owner.Output,
diff --git a/application/holder/src/service/bridge/os/entity.ts b/application/holder/src/service/bridge/os/entity.ts
index d863605fab..58a85849ce 100644
--- a/application/holder/src/service/bridge/os/entity.ts
+++ b/application/holder/src/service/bridge/os/entity.ts
@@ -21,7 +21,7 @@ export const handler = Requests.InjectLogger<
                     const entity: Entity = {
                         name: request.path,
                         fullname: path.normalize(request.path),
-                        type: (() => {
+                        kind: (() => {
                             if (stats.isBlockDevice()) {
                                 return EntityType.BlockDevice;
                             } else if (stats.isCharacterDevice()) {
diff --git a/application/holder/src/service/electron/dialogs.ts b/application/holder/src/service/electron/dialogs.ts
index 9b7cbfdcd2..b19b05aeb4 100644
--- a/application/holder/src/service/electron/dialogs.ts
+++ b/application/holder/src/service/electron/dialogs.ts
@@ -27,10 +27,6 @@ export class Dialogs extends Implementation {
 
     public saveFile(ext?: string, defaultFileName?: string): Promise<string | undefined> {
         this.fixFocusAndMouse();
-        console.log(`>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n\n`);
-        console.log(`EXT: ${ext}`);
-        console.log(`defaultFileName: ${defaultFileName}`);
-        console.log(`>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n\n`);
         return dialog
             .showSaveDialog(this._window, {
                 title: 'Select file to save',
diff --git a/application/holder/src/service/sessions/holder.ts b/application/holder/src/service/sessions/holder.ts
index 877c4767b7..d3b951828e 100644
--- a/application/holder/src/service/sessions/holder.ts
+++ b/application/holder/src/service/sessions/holder.ts
@@ -99,7 +99,7 @@ export class Holder {
                 return new Promise((resolve, reject) => {
                     const observer = this.session
                         .getStream()
-                        .observe(observe.configuration)
+                        .observe(cfg)
                         .on('confirmed', () => {
                             Events.IpcEvent.emit(
                                 new Events.Observe.Started.Event({
diff --git a/application/holder/src/service/sessions/requests/session/create.ts b/application/holder/src/service/sessions/requests/session/create.ts
index 737fc2795b..3d600b23c9 100644
--- a/application/holder/src/service/sessions/requests/session/create.ts
+++ b/application/holder/src/service/sessions/requests/session/create.ts
@@ -1,5 +1,5 @@
 import { CancelablePromise } from 'platform/env/promise';
-import { ISearchUpdated } from 'platform/types/filter';
+import { FilterMatch, ISearchUpdated } from 'platform/types/filter';
 import { Session, IEventIndexedMapUpdated } from 'rustcore';
 import { sessions } from '@service/sessions';
 import { Subscriber } from 'platform/env/subscription';
@@ -64,14 +64,14 @@ export const handler = Requests.InjectLogger<
                         }),
                     );
                     subscriber.register(
-                        session.getEvents().SearchMapUpdated.subscribe((map: string | null) => {
+                        session.getEvents().SearchMapUpdated.subscribe((_map: FilterMatch[]) => {
                             if (!sessions.exists(uuid)) {
                                 return;
                             }
                             Events.IpcEvent.emit(
                                 new Events.Search.MapUpdated.Event({
                                     session: uuid,
-                                    map,
+                                    map: null,
                                 }),
                             );
                         }),
diff --git a/application/holder/src/service/sessions/requests/values/frame.ts b/application/holder/src/service/sessions/requests/values/frame.ts
index 8c85659217..1563bead0a 100644
--- a/application/holder/src/service/sessions/requests/values/frame.ts
+++ b/application/holder/src/service/sessions/requests/values/frame.ts
@@ -40,7 +40,7 @@ export const handler = Requests.InjectLogger<
                             new Requests.Values.Frame.Response({
                                 session: session_uuid,
                                 canceled: true,
-                                values: {},
+                                values: new Map(),
                             }),
                         );
                     })
@@ -50,7 +50,7 @@ export const handler = Requests.InjectLogger<
                             new Requests.Values.Frame.Response({
                                 session: session_uuid,
                                 canceled: false,
-                                values: {},
+                                values: new Map(),
                                 error: err.message,
                             }),
                         );
diff --git a/application/holder/src/service/unbound/dlt/stat.ts b/application/holder/src/service/unbound/dlt/stat.ts
index 22b82371a4..a87d7b20fe 100644
--- a/application/holder/src/service/unbound/dlt/stat.ts
+++ b/application/holder/src/service/unbound/dlt/stat.ts
@@ -2,7 +2,7 @@ import { CancelablePromise } from 'platform/env/promise';
 import { Logger } from 'platform/log';
 import { jobs } from '@service/jobs';
 import { unbound } from '@service/unbound';
-import { StatisticInfo } from 'platform/types/observe/parser/dlt';
+import { DltStatisticInfo } from 'platform/types/bindings';
 
 import * as Requests from 'platform/ipc/request';
 
@@ -26,7 +26,7 @@ export const handler = Requests.InjectLogger<
                 .start();
             unbound.jobs
                 .getDltStats(request.files)
-                .then((stat: StatisticInfo) => {
+                .then((stat: DltStatisticInfo) => {
                     resolve(
                         new Requests.Dlt.Stat.Response({
                             stat,
diff --git a/application/holder/src/service/unbound/os/list.ts b/application/holder/src/service/unbound/os/list.ts
index 61d43a6be7..c3b38113c5 100644
--- a/application/holder/src/service/unbound/os/list.ts
+++ b/application/holder/src/service/unbound/os/list.ts
@@ -1,11 +1,10 @@
 import { CancelablePromise } from 'platform/env/promise';
 import { Logger } from 'platform/log';
 import { error } from 'platform/log/utils';
-import { Entity, entityFromObj } from 'platform/types/files';
 import { unbound } from '@service/unbound';
+import { FoldersScanningResult } from 'platform/types/bindings';
 
 import * as Requests from 'platform/ipc/request';
-import * as obj from 'platform/env/obj';
 
 export const handler = Requests.InjectLogger<
     Requests.Os.List.Request,
@@ -18,26 +17,12 @@ export const handler = Requests.InjectLogger<
         return new CancelablePromise((resolve, reject) => {
             unbound.jobs
                 .listContent(request)
-                .then((content: string) => {
+                .then((res: FoldersScanningResult) => {
                     try {
-                        const data = typeof content === 'string' ? JSON.parse(content) : content;
-                        const list = obj.getAsArray(data, 'list') as { [key: string]: unknown }[];
-                        const max = obj.getAsBool(data, 'max_len_reached');
                         resolve(
                             new Requests.Os.List.Response({
-                                entities: list
-                                    .map((smth: { [key: string]: unknown }) => {
-                                        try {
-                                            return entityFromObj(smth);
-                                        } catch (e) {
-                                            log.warn(
-                                                `Fail to parse listContent entity: ${error(e)}`,
-                                            );
-                                            return undefined;
-                                        }
-                                    })
-                                    .filter((e) => e !== undefined) as Entity[],
-                                max,
+                                entities: res.list,
+                                max: res.max_len_reached,
                             }),
                         );
                     } catch (e) {
diff --git a/application/holder/src/service/unbound/os/shells.ts b/application/holder/src/service/unbound/os/shells.ts
index f3e3432075..7964494efe 100644
--- a/application/holder/src/service/unbound/os/shells.ts
+++ b/application/holder/src/service/unbound/os/shells.ts
@@ -1,11 +1,11 @@
 import { CancelablePromise } from 'platform/env/promise';
 import { Logger } from 'platform/log';
-import { ShellProfile } from 'platform/types/shells';
+import { Profile } from 'platform/types/bindings';
 import { unbound } from '@service/unbound';
 
 import * as Requests from 'platform/ipc/request';
 
-let cached: ShellProfile[] | undefined = undefined;
+let cached: Profile[] | undefined = undefined;
 
 export const handler = Requests.InjectLogger<
     Requests.Os.Shells.Request,
diff --git a/application/platform/ipc/event/search/updated.ts b/application/platform/ipc/event/search/updated.ts
index 0f24ccff9e..77c8fb914b 100644
--- a/application/platform/ipc/event/search/updated.ts
+++ b/application/platform/ipc/event/search/updated.ts
@@ -6,9 +6,9 @@ import * as validator from '../../../env/obj';
 export class Event extends SignatureRequirement {
     public session: string;
     public rows: number;
-    public stat: { [key: string]: number };
+    public stat: Map<string, number>;
 
-    constructor(input: { session: string; rows: number; stat: { [key: string]: number } }) {
+    constructor(input: { session: string; rows: number; stat: Map<string, number> }) {
         super();
         validator.isObject(input);
         this.session = validator.getAsNotEmptyString(input, 'session');
diff --git a/application/platform/ipc/event/stream/attachment.ts b/application/platform/ipc/event/stream/attachment.ts
index b2ac1a50c0..0933ae5b14 100644
--- a/application/platform/ipc/event/stream/attachment.ts
+++ b/application/platform/ipc/event/stream/attachment.ts
@@ -1,22 +1,24 @@
 import { Define, Interface, SignatureRequirement } from '../declarations';
-import { IAttachment, Attachment } from '../../../types/content';
+import { Attachment } from '../../../types/content';
+import { AttachmentInfo } from '../../../types/bindings';
 
 import * as validator from '../../../env/obj';
 
 @Define({ name: 'AttachmentsUpdated' })
 export class Event extends SignatureRequirement {
     public session: string;
-    public attachment: IAttachment;
+    public attachment: AttachmentInfo;
     public len: number;
 
-    constructor(input: { session: string; attachment: IAttachment; len: number }) {
+    constructor(input: { session: string; attachment: AttachmentInfo; len: number }) {
         super();
         validator.isObject(input);
         this.session = validator.getAsNotEmptyString(input, 'session');
         this.len = validator.getAsValidNumber(input, 'len');
-        const attachment = Attachment.from(validator.getAsObj(input, 'attachment'));
-        if (attachment instanceof Error) {
-            throw attachment;
+        const attachment = validator.getAsObj(input, 'attachment');
+        const err = Attachment.from(attachment);
+        if (err instanceof Error) {
+            throw err;
         }
         this.attachment = attachment;
     }
diff --git a/application/platform/ipc/event/values/updated.ts b/application/platform/ipc/event/values/updated.ts
index b16c1f171c..e45ec0fb97 100644
--- a/application/platform/ipc/event/values/updated.ts
+++ b/application/platform/ipc/event/values/updated.ts
@@ -1,14 +1,13 @@
 import { Define, Interface, SignatureRequirement } from '../declarations';
-import { IValuesMinMaxMap } from '../../../types/filter';
 
 import * as validator from '../../../env/obj';
 
 @Define({ name: 'SearchValuesUpdated' })
 export class Event extends SignatureRequirement {
     public session: string;
-    public map: IValuesMinMaxMap | null;
+    public map: Map<number, [number, number]> | null;
 
-    constructor(input: { session: string; map: IValuesMinMaxMap | null }) {
+    constructor(input: { session: string; map: Map<number, [number, number]> | null }) {
         super();
         validator.isObject(input);
         this.session = validator.getAsNotEmptyString(input, 'session');
diff --git a/application/platform/ipc/request/dlt/stat.ts b/application/platform/ipc/request/dlt/stat.ts
index be694c161b..ebb1364bea 100644
--- a/application/platform/ipc/request/dlt/stat.ts
+++ b/application/platform/ipc/request/dlt/stat.ts
@@ -1,5 +1,5 @@
 import { Define, Interface, SignatureRequirement } from '../declarations';
-import { StatisticInfo } from '../../../types/observe/parser/dlt';
+import { DltStatisticInfo } from '../../../types/bindings';
 import * as validator from '../../../env/obj';
 
 @Define({ name: 'DltStatRequest' })
@@ -16,9 +16,9 @@ export interface Request extends Interface {}
 
 @Define({ name: 'DltStatResponse' })
 export class Response extends SignatureRequirement {
-    public stat: StatisticInfo;
+    public stat: DltStatisticInfo;
 
-    constructor(input: { stat: StatisticInfo }) {
+    constructor(input: { stat: DltStatisticInfo }) {
         super();
         validator.isObject(input);
         this.stat = validator.getAsObj(input, 'stat');
diff --git a/application/platform/ipc/request/os/list.ts b/application/platform/ipc/request/os/list.ts
index 19d90bb902..d4eb87453e 100644
--- a/application/platform/ipc/request/os/list.ts
+++ b/application/platform/ipc/request/os/list.ts
@@ -1,5 +1,5 @@
 import { Define, Interface, SignatureRequirement } from '../declarations';
-import { Entity } from '../../../types/files';
+import { FolderEntity } from '../../../types/bindings';
 
 import * as validator from '../../../env/obj';
 
@@ -30,10 +30,10 @@ export interface Request extends Interface {}
 
 @Define({ name: 'ListFilesAndFoldersResponse' })
 export class Response extends SignatureRequirement {
-    public entities: Entity[];
+    public entities: FolderEntity[];
     public max: boolean;
 
-    constructor(input: { entities: Entity[]; max: boolean }) {
+    constructor(input: { entities: FolderEntity[]; max: boolean }) {
         super();
         validator.isObject(input);
         this.entities = validator.getAsArray(input, 'entities');
diff --git a/application/platform/ipc/request/os/shells.ts b/application/platform/ipc/request/os/shells.ts
index 92e769766d..03282a87ea 100644
--- a/application/platform/ipc/request/os/shells.ts
+++ b/application/platform/ipc/request/os/shells.ts
@@ -1,5 +1,5 @@
 import { Define, Interface, SignatureRequirement } from '../declarations';
-import { ShellProfile } from '../../../types/shells';
+import { Profile } from '../../../types/bindings';
 
 import * as validator from '../../../env/obj';
 
@@ -9,9 +9,9 @@ export interface Request extends Interface {}
 
 @Define({ name: 'ShellProfilesListResponse' })
 export class Response extends SignatureRequirement {
-    public profiles: ShellProfile[];
+    public profiles: Profile[];
 
-    constructor(input: { profiles: ShellProfile[] }) {
+    constructor(input: { profiles: Profile[] }) {
         super();
         validator.isObject(input);
         this.profiles = validator.getAsArray(input, 'profiles');
diff --git a/application/platform/ipc/request/search/nearest.ts b/application/platform/ipc/request/search/nearest.ts
index 8a91cabbc0..5f765bc273 100644
--- a/application/platform/ipc/request/search/nearest.ts
+++ b/application/platform/ipc/request/search/nearest.ts
@@ -1,5 +1,5 @@
 import { Define, Interface, SignatureRequirement } from '../declarations';
-import { INearest } from '../../../types/filter';
+import { NearestPosition } from '../../../types/bindings';
 
 import * as validator from '../../../env/obj';
 
@@ -21,9 +21,9 @@ export interface Request extends Interface {}
 @Define({ name: 'NearestResponse' })
 export class Response extends SignatureRequirement {
     public session: string;
-    public nearest: INearest | undefined;
+    public nearest: NearestPosition | undefined;
 
-    constructor(input: { session: string; nearest: INearest | undefined }) {
+    constructor(input: { session: string; nearest: NearestPosition | undefined }) {
         super();
         validator.isObject(input);
         this.session = validator.getAsNotEmptyString(input, 'session');
diff --git a/application/platform/ipc/request/stream/chunk.ts b/application/platform/ipc/request/stream/chunk.ts
index 80142d8b5b..4dc14948b8 100644
--- a/application/platform/ipc/request/stream/chunk.ts
+++ b/application/platform/ipc/request/stream/chunk.ts
@@ -1,5 +1,5 @@
 import { Define, Interface, SignatureRequirement } from '../declarations';
-import { IGrabbedElement } from '../../../types/content';
+import { GrabbedElement } from '../../../types/bindings/miscellaneous';
 
 import * as validator from '../../../env/obj';
 
@@ -23,16 +23,16 @@ export interface Request extends Interface {}
 @Define({ name: 'StreamChunkResponse' })
 export class Response extends SignatureRequirement {
     public session: string;
-    public rows: IGrabbedElement[];
+    public rows: GrabbedElement[];
     public from: number;
     public to: number;
-    constructor(input: { session: string; from: number; to: number; rows: IGrabbedElement[] }) {
+    constructor(input: { session: string; from: number; to: number; rows: GrabbedElement[] }) {
         super();
         validator.isObject(input);
         this.session = validator.getAsNotEmptyString(input, 'session');
         this.from = validator.getAsValidNumber(input, 'from');
         this.to = validator.getAsValidNumber(input, 'to');
-        this.rows = validator.getAsArray<IGrabbedElement>(input, 'rows');
+        this.rows = validator.getAsArray<GrabbedElement>(input, 'rows');
     }
 }
 
diff --git a/application/platform/ipc/request/stream/indexed.ts b/application/platform/ipc/request/stream/indexed.ts
index 73f411311b..ea03355b93 100644
--- a/application/platform/ipc/request/stream/indexed.ts
+++ b/application/platform/ipc/request/stream/indexed.ts
@@ -1,5 +1,5 @@
 import { Define, Interface, SignatureRequirement } from '../declarations';
-import { IGrabbedElement } from '../../../types/content';
+import { GrabbedElement } from '../../../types/bindings/miscellaneous';
 
 import * as validator from '../../../env/obj';
 
@@ -23,16 +23,16 @@ export interface Request extends Interface {}
 @Define({ name: 'IndexedChunkResponse' })
 export class Response extends SignatureRequirement {
     public session: string;
-    public rows: IGrabbedElement[];
+    public rows: GrabbedElement[];
     public from: number;
     public to: number;
-    constructor(input: { session: string; from: number; to: number; rows: IGrabbedElement[] }) {
+    constructor(input: { session: string; from: number; to: number; rows: GrabbedElement[] }) {
         super();
         validator.isObject(input);
         this.session = validator.getAsNotEmptyString(input, 'session');
         this.from = validator.getAsValidNumber(input, 'from');
         this.to = validator.getAsValidNumber(input, 'to');
-        this.rows = validator.getAsArray<IGrabbedElement>(input, 'rows');
+        this.rows = validator.getAsArray<GrabbedElement>(input, 'rows');
     }
 }
 
diff --git a/application/platform/ipc/request/stream/ranges.ts b/application/platform/ipc/request/stream/ranges.ts
index 325cd2ad05..82e05cd3b1 100644
--- a/application/platform/ipc/request/stream/ranges.ts
+++ b/application/platform/ipc/request/stream/ranges.ts
@@ -1,5 +1,5 @@
 import { Define, Interface, SignatureRequirement } from '../declarations';
-import { IGrabbedElement } from '../../../types/content';
+import { GrabbedElement } from '../../../types/bindings/miscellaneous';
 import { IRange } from '../../../types/range';
 
 import * as validator from '../../../env/obj';
@@ -22,12 +22,12 @@ export interface Request extends Interface {}
 @Define({ name: 'StreamRangesChunkResponse' })
 export class Response extends SignatureRequirement {
     public session: string;
-    public rows: IGrabbedElement[];
-    constructor(input: { session: string; rows: IGrabbedElement[] }) {
+    public rows: GrabbedElement[];
+    constructor(input: { session: string; rows: GrabbedElement[] }) {
         super();
         validator.isObject(input);
         this.session = validator.getAsNotEmptyString(input, 'session');
-        this.rows = validator.getAsArray<IGrabbedElement>(input, 'rows');
+        this.rows = validator.getAsArray<GrabbedElement>(input, 'rows');
     }
 }
 
diff --git a/application/platform/ipc/request/values/frame.ts b/application/platform/ipc/request/values/frame.ts
index 18b0c34a12..def8aeac87 100644
--- a/application/platform/ipc/request/values/frame.ts
+++ b/application/platform/ipc/request/values/frame.ts
@@ -1,5 +1,5 @@
 import { Define, Interface, SignatureRequirement } from '../declarations';
-import { IValuesMap } from '../../../types/filter';
+import { ResultSearchValues } from '../../../types/bindings';
 
 import * as validator from '../../../env/obj';
 
@@ -25,11 +25,16 @@ export interface Request extends Interface {}
 @Define({ name: 'SearchValuesGettingResponse' })
 export class Response extends SignatureRequirement {
     public session: string;
-    public values: IValuesMap;
+    public values: ResultSearchValues;
     public canceled: boolean;
     public error?: string;
 
-    constructor(input: { session: string; values: IValuesMap; canceled: boolean; error?: string }) {
+    constructor(input: {
+        session: string;
+        values: ResultSearchValues;
+        canceled: boolean;
+        error?: string;
+    }) {
         super();
         validator.isObject(input);
         this.session = validator.getAsNotEmptyString(input, 'session');
diff --git a/application/platform/package.json b/application/platform/package.json
index 9bf63415fd..e764edd1ed 100644
--- a/application/platform/package.json
+++ b/application/platform/package.json
@@ -88,7 +88,6 @@
     "./modules/system": "./dist/modules/system.js",
     "./types": "./dist/types/index.js",
     "./types/sde/index": "./dist/types/sde/index.js",
-    "./types/shells": "./dist/types/shells.js",
     "./types/files": "./dist/types/files.js",
     "./types/content": "./dist/types/content.js",
     "./types/filter": "./dist/types/filter.js",
@@ -143,6 +142,7 @@
     "./types/github/chart": "./dist/types/github/chart.js",
     "./types/github/filemetadata": "./dist/types/github/filemetadata.js",
     "./types/github/filter": "./dist/types/github/filter.js",
+    "./types/bindings": "./dist/types/bindings/index.js",
     "./package.json": "./package.json"
   },
   "packageManager": "yarn@4.2.2"
diff --git a/application/platform/types/bindings/attachment.ts b/application/platform/types/bindings/attachment.ts
new file mode 100644
index 0000000000..2dea61cbc9
--- /dev/null
+++ b/application/platform/types/bindings/attachment.ts
@@ -0,0 +1,43 @@
+// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
+
+/**
+ * Describes the content of attached data found in the `payload` of a `dlt` message.
+ */
+export interface AttachmentInfo {
+    /**
+     * A unique identifier for the attachment.
+     */
+    uuid: string;
+    /**
+     * The full path to the file. Note that `chipmunk` serializes the file name to ensure proper
+     * saving to disk, so the actual file name may differ from the value in the `name` field.
+     */
+    filepath: string;
+    /**
+     * The name of the application, usually corresponding to the file name.
+     */
+    name: string;
+    /**
+     * The file extension, if available.
+     */
+    ext: string | null;
+    /**
+     * The size of the file in bytes.
+     */
+    size: number;
+    /**
+     * The `mime` type of the file, if it could be determined.
+     */
+    mime: string | null;
+    /**
+     * The log entry numbers containing the application data. Note that the application
+     * data may be contained in a single log entry or split into parts distributed
+     * across sequential log entries.
+     */
+    messages: number[];
+}
+
+/**
+ * A list of attachments.
+ */
+export type AttachmentList = Array<AttachmentInfo>;
diff --git a/application/platform/types/bindings/callback.ts b/application/platform/types/bindings/callback.ts
new file mode 100644
index 0000000000..96d5e7d479
--- /dev/null
+++ b/application/platform/types/bindings/callback.ts
@@ -0,0 +1,61 @@
+// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
+import type { AttachmentInfo } from "./attachment";
+import type { FilterMatchList } from "./miscellaneous";
+import type { NativeError } from "./error";
+import type { Progress } from "./progress";
+
+/**
+ * Represents events sent to the client.
+ */
+export type CallbackEvent = { "StreamUpdated": number } | "FileRead" | { "SearchUpdated": { 
+/**
+ * The number of logs with matches. Can be `0` if the search is reset on the client side.
+ */
+found: number, 
+/**
+ * A map of search conditions and their global match counts within the session.
+ * - `String`: The search condition.
+ * - `u64`: The count of matches.
+ */
+stat: Map<string, number>, } } | { "IndexedMapUpdated": { 
+/**
+ * The number of log entries from search results available for reading.
+ */
+len: number, } } | { "SearchMapUpdated": FilterMatchList | null } | { "SearchValuesUpdated": Map<number, [number, number]> } | { "AttachmentsUpdated": { 
+/**
+ * The size of the attachment in bytes.
+ */
+len: number, 
+/**
+ * The description of the attachment.
+ */
+attachment: AttachmentInfo, } } | { "Progress": { 
+/**
+ * The unique identifier of the operation.
+ */
+uuid: string, 
+/**
+ * Information about the progress.
+ */
+progress: Progress, } } | { "SessionError": NativeError } | { "OperationError": { 
+/**
+ * The unique identifier of the operation that caused the error.
+ */
+uuid: string, 
+/**
+ * The error details.
+ */
+error: NativeError, } } | { "OperationStarted": string } | { "OperationProcessing": string } | { "OperationDone": OperationDone } | "SessionDestroyed";
+
+/**
+ * Contains the results of an operation.
+ */
+export type OperationDone = { 
+/**
+ * The unique identifier of the operation.
+ */
+uuid: string, 
+/**
+ * The results of the operation, if available.
+ */
+result: Array<number> | null, };
diff --git a/application/platform/types/bindings/command.ts b/application/platform/types/bindings/command.ts
new file mode 100644
index 0000000000..91f25851c2
--- /dev/null
+++ b/application/platform/types/bindings/command.ts
@@ -0,0 +1,196 @@
+// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
+
+/**
+ * Represents the result of a command execution.
+ * At the core level, this type is used for all commands invoked within an `UnboundSession`.
+ * It is only used to indicate the successful completion or interruption of a command.
+ */
+export type CommandOutcomeBool = { Finished: boolean } | 'Cancelled';
+
+/**
+ * Represents the result of a command execution.
+ * At the core level, this type is used for all commands invoked within an `UnboundSession`.
+ * It is only used to indicate the successful completion or interruption of a command.
+ */
+export type CommandOutcomeDltStatisticInfoResult = { Finished: DltStatisticInfo } | 'Cancelled';
+
+/**
+ * Represents the result of a command execution.
+ * At the core level, this type is used for all commands invoked within an `UnboundSession`.
+ * It is only used to indicate the successful completion or interruption of a command.
+ */
+export type CommandOutcomeFoldersScanningResult = { Finished: FoldersScanningResult } | 'Cancelled';
+
+/**
+ * Represents the result of a command execution.
+ * At the core level, this type is used for all commands invoked within an `UnboundSession`.
+ * It is only used to indicate the successful completion or interruption of a command.
+ */
+export type CommandOutcomeOptionalString = { Finished: string | null } | 'Cancelled';
+
+/**
+ * Represents the result of a command execution.
+ * At the core level, this type is used for all commands invoked within an `UnboundSession`.
+ * It is only used to indicate the successful completion or interruption of a command.
+ */
+export type CommandOutcomeProfilesResult = { Finished: ProfileList } | 'Cancelled';
+
+/**
+ * Represents the result of a command execution.
+ * At the core level, this type is used for all commands invoked within an `UnboundSession`.
+ * It is only used to indicate the successful completion or interruption of a command.
+ */
+export type CommandOutcomeSerialPortsList = { Finished: SerialPortsList } | 'Cancelled';
+
+/**
+ * Represents the result of a command execution.
+ * At the core level, this type is used for all commands invoked within an `UnboundSession`.
+ * It is only used to indicate the successful completion or interruption of a command.
+ */
+export type CommandOutcomeString = { Finished: string } | 'Cancelled';
+
+/**
+ * Represents the result of a command execution.
+ * At the core level, this type is used for all commands invoked within an `UnboundSession`.
+ * It is only used to indicate the successful completion or interruption of a command.
+ */
+export type CommandOutcomeVoid = 'Finished' | 'Cancelled';
+
+/**
+ * Represents the result of a command execution.
+ * At the core level, this type is used for all commands invoked within an `UnboundSession`.
+ * It is only used to indicate the successful completion or interruption of a command.
+ */
+export type CommandOutcomei64 = { Finished: number } | 'Cancelled';
+
+export type DltLevelDistribution = {
+    non_log: number;
+    log_fatal: number;
+    log_error: number;
+    log_warning: number;
+    log_info: number;
+    log_debug: number;
+    log_verbose: number;
+    log_invalid: number;
+};
+
+export type DltStatisticInfo = {
+    app_ids: Array<[string, DltLevelDistribution]>;
+    context_ids: Array<[string, DltLevelDistribution]>;
+    ecu_ids: Array<[string, DltLevelDistribution]>;
+    contained_non_verbose: boolean;
+};
+
+/**
+ * Represents a folder entity in the file system.
+ */
+export type FolderEntity = {
+    /**
+     * The name of the entity (file or folder).
+     */
+    name: string;
+    /**
+     * The full path of the entity.
+     */
+    fullname: string;
+    /**
+     * The type of the entity (e.g., file, directory, symbolic link).
+     */
+    kind: FolderEntityType;
+    /**
+     * Optional detailed information about the entity.
+     */
+    details: FolderEntityDetails | null;
+};
+
+/**
+ * Contains detailed information about a folder entity.
+ */
+export type FolderEntityDetails = {
+    /**
+     * The name of the file or folder.
+     */
+    filename: string;
+    /**
+     * The full path to the file or folder.
+     */
+    full: string;
+    /**
+     * The directory path containing the file or folder.
+     */
+    path: string;
+    /**
+     * The base name of the file or folder.
+     */
+    basename: string;
+    /**
+     * The file extension, if applicable.
+     */
+    ext: string;
+};
+
+/**
+ * Represents the type of a folder entity in the file system.
+ */
+export enum FolderEntityType {
+    BlockDevice = 'BlockDevice',
+    CharacterDevice = 'CharacterDevice',
+    Directory = 'Directory',
+    FIFO = 'FIFO',
+    File = 'File',
+    Socket = 'Socket',
+    SymbolicLink = 'SymbolicLink',
+}
+
+/**
+ * Represents the result of scanning a folder.
+ */
+export type FoldersScanningResult = {
+    /**
+     * A list of folder entities found during the scan.
+     */
+    list: Array<FolderEntity>;
+    /**
+     * Indicates whether the maximum length of results was reached.
+     */
+    max_len_reached: boolean;
+};
+
+export type Profile = {
+    /**
+     * Suggested name of shell. For unix based systems it will be name of executable file,
+     * like "bash", "fish" etc. For windows it will be names like "GitBash", "PowerShell"
+     * etc.
+     */
+    name: string;
+    /**
+     * Path to executable file of shell
+     */
+    path: string;
+    /**
+     * List of environment variables. Because extracting operation could take some time
+     * by default `envvars = None`. To load data should be used method `load`, which will
+     * make attempt to detect environment variables.
+     */
+    envvars: Map<string, string>;
+    /**
+     * true - if path to executable file of shell is symlink to another location.
+     */
+    symlink: boolean;
+};
+
+/**
+ * Represents a list of serial ports.
+ *
+ * This structure contains a vector of strings, where each string represents the name
+ * or identifier of a serial port available on the system.
+ */
+export type ProfileList = Array<Profile>;
+
+/**
+ * Represents a list of serial ports.
+ *
+ * This structure contains a vector of strings, where each string represents the name
+ * or identifier of a serial port available on the system.
+ */
+export type SerialPortsList = Array<string>;
diff --git a/application/platform/types/bindings/dlt.ts b/application/platform/types/bindings/dlt.ts
new file mode 100644
index 0000000000..de8c67bae7
--- /dev/null
+++ b/application/platform/types/bindings/dlt.ts
@@ -0,0 +1,22 @@
+export interface DltFilterConfig {
+    /// only select log entries with level MIN_LEVEL and more severe
+    ///
+    /// ``` text
+    ///  1 => FATAL
+    ///  2 => ERROR
+    ///  3 => WARN
+    ///  4 => INFO
+    ///  5 => DEBUG
+    ///  6 => VERBOSE
+    /// ```    min_log_level?: number,
+    /// what app ids should be allowed.
+    app_ids?: string[];
+    /// what ecu ids should be allowed
+    ecu_ids?: string[];
+    /// what context ids should be allowed
+    context_ids?: string[];
+    /// how many app ids exist in total
+    app_id_count: number;
+    /// how many context ids exist in total
+    context_id_count: number;
+}
diff --git a/application/platform/types/bindings/error.ts b/application/platform/types/bindings/error.ts
new file mode 100644
index 0000000000..0c77c44b0a
--- /dev/null
+++ b/application/platform/types/bindings/error.ts
@@ -0,0 +1,33 @@
+// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
+
+/**
+ * Describes the type and details of an error.
+ */
+export type ComputationError = "DestinationPath" | "SessionCreatingFail" | { "Communication": string } | { "OperationNotSupported": string } | { "IoOperation": string } | "InvalidData" | { "InvalidArgs": string } | { "Process": string } | { "Protocol": string } | { "SearchError": string } | "MultipleInitCall" | "SessionUnavailable" | { "NativeError": NativeError } | { "Grabbing": string } | { "Sde": string } | { "Decoding": string } | { "Encoding": string };
+
+/**
+ * Describes the details of an error.
+ */
+export type NativeError = { 
+/**
+ * The severity level of the error.
+ */
+severity: Severity, 
+/**
+ * The type or source of the error.
+ */
+kind: NativeErrorKind, 
+/**
+ * A detailed message describing the error.
+ */
+message: string | null, };
+
+/**
+ * Defines the source or type of an error.
+ */
+export type NativeErrorKind = "FileNotFound" | "UnsupportedFileType" | "ComputationFailed" | "Configuration" | "Interrupted" | "OperationSearch" | "NotYetImplemented" | "ChannelError" | "Io" | "Grabber";
+
+/**
+ * Indicates the severity level of an error.
+ */
+export type Severity = "WARNING" | "ERROR";
diff --git a/application/platform/types/bindings/index.ts b/application/platform/types/bindings/index.ts
new file mode 100644
index 0000000000..9720071896
--- /dev/null
+++ b/application/platform/types/bindings/index.ts
@@ -0,0 +1,10 @@
+export * from './attachment';
+export * from './callback';
+export * from './command';
+export * from './error';
+export * from './lf_transition';
+export * from './miscellaneous';
+export * from './observe';
+export * from './progress';
+export * from './dlt';
+export * from './operations';
diff --git a/application/platform/types/bindings/lf_transition.ts b/application/platform/types/bindings/lf_transition.ts
new file mode 100644
index 0000000000..de4a4003ee
--- /dev/null
+++ b/application/platform/types/bindings/lf_transition.ts
@@ -0,0 +1,23 @@
+// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
+import type { Ticks } from "./progress";
+
+/**
+ * Describes the progress of an operation.
+ */
+export type LifecycleTransition = { "Started": { 
+/**
+ * The unique identifier of the operation.
+ */
+uuid: string, 
+/**
+ * A user-friendly name of the operation for display purposes.
+ */
+alias: string, } } | { "Ticks": { 
+/**
+ * The unique identifier of the operation.
+ */
+uuid: string, 
+/**
+ * The progress data associated with the operation.
+ */
+ticks: Ticks, } } | { "Stopped": string };
diff --git a/application/platform/types/bindings/miscellaneous.ts b/application/platform/types/bindings/miscellaneous.ts
new file mode 100644
index 0000000000..8b2df171f1
--- /dev/null
+++ b/application/platform/types/bindings/miscellaneous.ts
@@ -0,0 +1,109 @@
+// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
+
+/**
+ * Data about indices (log entry numbers). Used to provide information about
+ * the nearest search results relative to a specific log entry number.
+ */
+export type AroundIndexes = [number | undefined | null, number | undefined | null];
+
+/**
+ * Describes a match for a search condition.
+ */
+export type FilterMatch = {
+    /**
+     * The index (number) of the matching log entry.
+     */
+    index: number;
+    /**
+     * The identifiers of the filters (search conditions) that matched
+     * the specified log entry.
+     */
+    filters: Array<number>;
+};
+
+/**
+ * A list of matches for a search condition.
+ */
+export type FilterMatchList = Array<FilterMatch>;
+
+/**
+ * Information about a log entry.
+ */
+export type GrabbedElement = {
+    /**
+     * The unique identifier of the source.
+     */
+    source_id: number;
+    /**
+     * The textual content of the log entry.
+     */
+    content: string;
+    /**
+     * The position of the log entry in the overall stream.
+     */
+    pos: number;
+    /**
+     * The nature of the log entry, represented as a bitmask. Possible values include:
+     * - `SEARCH`: Nature = Nature(1)
+     * - `BOOKMARK`: Nature = Nature(1 << 1)
+     * - `EXPANDED`: Nature = Nature(1 << 5)
+     * - `BREADCRUMB`: Nature = Nature(1 << 6)
+     * - `BREADCRUMB_SEPARATOR`: Nature = Nature(1 << 7)
+     */
+    nature: number;
+};
+
+/**
+ * A list of log entries.
+ */
+export type GrabbedElementList = Array<GrabbedElement>;
+
+export type MapKeyValue = Map<string, string>;
+
+/**
+ * Representation of ranges. We cannot use std ranges as soon as no way
+ * to derive Serialize, Deserialize
+ */
+export type Range = { start: number; end: number };
+
+/**
+ * A list of ranges to read.
+ */
+export type Ranges = Array<Range>;
+
+/**
+ * A request to a stream that supports feedback, such as a terminal command
+ * that accepts input through `stdin`.
+ */
+export type SdeRequest = { WriteText: string } | { WriteBytes: Array<number> };
+
+/**
+ * The response from a source to a sent `SdeRequest`. Note that sending data
+ * with `SdeRequest` does not guarantee a response, as the behavior depends
+ * on the source.
+ */
+export type SdeResponse = {
+    /**
+     * The number of bytes received.
+     */
+    bytes: number;
+};
+
+/**
+ * Describes a data source.
+ */
+export type SourceDefinition = {
+    /**
+     * The unique identifier of the source.
+     */
+    id: number;
+    /**
+     * The user-friendly name of the source for display purposes.
+     */
+    alias: string;
+};
+
+/**
+ * A list of data sources.
+ */
+export type Sources = Array<SourceDefinition>;
diff --git a/application/platform/types/bindings/observe.ts b/application/platform/types/bindings/observe.ts
new file mode 100644
index 0000000000..9782464dc0
--- /dev/null
+++ b/application/platform/types/bindings/observe.ts
@@ -0,0 +1,159 @@
+import { DltFilterConfig } from './dlt';
+
+// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
+
+/**
+ * Settings for the DLT parser.
+ */
+export type DltParserSettings = { 
+/**
+ * Configuration for filtering DLT messages.
+ */
+filter_config: DltFilterConfig, 
+/**
+ * Paths to FIBEX files for additional interpretation of `payload` content.
+ */
+fibex_file_paths: Array<string> | null, 
+/**
+ * Indicates whether the source contains a `StorageHeader`. Set to `true` if applicable.
+ */
+with_storage_header: boolean, 
+/**
+ * Timezone for timestamp adjustment. If specified, timestamps are converted to this timezone.
+ */
+tz: string | null, };
+
+/**
+ * Supported file formats for observation.
+ */
+export type FileFormat = "PcapNG" | "PcapLegacy" | "Text" | "Binary";
+
+/**
+ * Multicast configuration information.
+ * - `multiaddr`: A valid multicast address.
+ * - `interface`: The address of the local interface used to join the multicast group.
+ *   If set to `INADDR_ANY`, the system selects an appropriate interface.
+ */
+export type MulticastInfo = { multiaddr: string, interface: string | null, };
+
+/**
+ * Options for observing data within a session.
+ */
+export type ObserveOptions = { 
+/**
+ * The description of the data source.
+ */
+origin: ObserveOrigin, 
+/**
+ * The parser configuration to be applied.
+ */
+parser: ParserType, };
+
+/**
+ * Describes the source of data for observation.
+ */
+export type ObserveOrigin = { "File": [string, FileFormat, string] } | { "Concat": Array<[string, FileFormat, string]> } | { "Stream": [string, Transport] };
+
+/**
+ * Specifies the parser to be used for processing session data.
+ */
+export type ParserType = { "Dlt": DltParserSettings } | { "SomeIp": SomeIpParserSettings } | { "Text": null };
+
+/**
+ * Configuration for executing terminal commands.
+ */
+export type ProcessTransportConfig = { 
+/**
+ * The working directory for the command.
+ */
+cwd: string, 
+/**
+ * The command to execute.
+ */
+command: string, 
+/**
+ * Environment variables. If empty, the default environment variables are used.
+ */
+envs: Map<string, string>, };
+
+/**
+ * Configuration for serial port connections.
+ */
+export type SerialTransportConfig = { 
+/**
+ * The path to the serial port.
+ */
+path: string, 
+/**
+ * The baud rate for the connection.
+ */
+baud_rate: number, 
+/**
+ * The number of data bits per frame.
+ */
+data_bits: number, 
+/**
+ * The flow control setting.
+ */
+flow_control: number, 
+/**
+ * The parity setting.
+ */
+parity: number, 
+/**
+ * The number of stop bits.
+ */
+stop_bits: number, 
+/**
+ * The delay in sending data, in milliseconds.
+ */
+send_data_delay: number, 
+/**
+ * Whether the connection is exclusive.
+ */
+exclusive: boolean, };
+
+/**
+ * Settings for the SomeIp parser.
+ */
+export type SomeIpParserSettings = { 
+/**
+ * Paths to FIBEX files for additional interpretation of `payload` content.
+ */
+fibex_file_paths: Array<string> | null, };
+
+/**
+ * Configuration for TCP connections.
+ */
+export type TCPTransportConfig = { 
+/**
+ * The address to bind the TCP connection to.
+ */
+bind_addr: string, };
+
+/**
+ * Describes the transport source for a session.
+ */
+export type Transport = { "Process": ProcessTransportConfig } | { "TCP": TCPTransportConfig } | { "UDP": UDPTransportConfig } | { "Serial": SerialTransportConfig };
+
+/**
+ * Configuration for UDP connections.
+ */
+export type UDPTransportConfig = { 
+/**
+ * The address to bind the UDP connection to.
+ */
+bind_addr: string, 
+/**
+ * A list of multicast configurations.
+ */
+multicast: Array<MulticastInfo>, };
+
+/**
+ * Configuration for UDP connections.
+ */
+export type UdpConnectionInfo = { 
+/**
+ * A list of multicast addresses to listen on.
+ */
+multicast_addr: Array<MulticastInfo>, };
diff --git a/application/platform/types/bindings/operations.ts b/application/platform/types/bindings/operations.ts
new file mode 100644
index 0000000000..5989527ca1
--- /dev/null
+++ b/application/platform/types/bindings/operations.ts
@@ -0,0 +1,48 @@
+// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
+
+/**
+ * Used to delivery results of extracting values. That's used in the scope
+ * of chart feature
+ */
+export type ExtractedMatchValue = { 
+/**
+ * The index of log entry (row number)
+ */
+index: number, 
+/**
+ * List of matches:
+ * `usize` - index of filter
+ * `Vec<String>` - list of extracted values
+ */
+values: Array<[number, Array<string>]>, };
+
+export type NearestPosition = { index: number, position: number, };
+
+/**
+ *(row_number, min_value_in_range, max_value_in_range, value)
+ * value - can be last value in range or some kind of average
+ */
+export type Point = { row: number, min: number, max: number, y_value: number, };
+
+export type ResultBool = boolean;
+
+/**
+ * The list of `ExtractedMatchValue`
+ */
+export type ResultExtractedMatchValues = Array<ExtractedMatchValue>;
+
+export type ResultNearestPosition = NearestPosition | null;
+
+/**
+ * Scaled chart data
+ */
+export type ResultScaledDistribution = Array<Array<[number, number]>>;
+
+export type ResultSearchValues = Map<number, Point[]>;
+
+/**
+ * Used only for debug session lifecycle
+ */
+export type ResultSleep = { sleep_well: boolean, };
+
+export type ResultU64 = number;
diff --git a/application/platform/types/bindings/progress.ts b/application/platform/types/bindings/progress.ts
new file mode 100644
index 0000000000..e02a8793ee
--- /dev/null
+++ b/application/platform/types/bindings/progress.ts
@@ -0,0 +1,43 @@
+// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
+import type { Severity } from "./error";
+
+/**
+ * Represents a notification about an event (including potential errors)
+ * related to processing a specific log entry, if such data is available.
+ */
+export type Notification = { 
+/**
+ * The severity level of the event.
+ */
+severity: Severity, 
+/**
+ * The content or message describing the event.
+ */
+content: string, 
+/**
+ * The log entry number that triggered the event, if applicable.
+ */
+line: number | null, };
+
+/**
+ * Describes the progress of an operation.
+ */
+export type Progress = { "Ticks": Ticks } | { "Notification": Notification } | "Stopped";
+
+/**
+ * Provides detailed information about the progress of an operation.
+ */
+export type Ticks = { 
+/**
+ * The current progress count, typically representing `n` out of `100%`.
+ */
+count: number, 
+/**
+ * The name of the current progress stage, for user display purposes.
+ */
+state: string | null, 
+/**
+ * The total progress counter. Usually `100`, but for file operations,
+ * it might represent the file size, where `count` indicates the number of bytes read.
+ */
+total: number | null | undefined, };
diff --git a/application/platform/types/content.ts b/application/platform/types/content.ts
index 9c4e36780f..e4f54f7c3e 100644
--- a/application/platform/types/content.ts
+++ b/application/platform/types/content.ts
@@ -1,23 +1,9 @@
 import { error } from '../log/utils';
 import { Mutable } from './unity/mutable';
+import { AttachmentInfo } from './bindings/attachment';
 
 import * as obj from '../env/obj';
 
-export interface IGrabbedContent {
-    grabbed_elements: IGrabbedElement[];
-}
-
-/**
- * Output for @grabStreamChunk method of session
- * (application/apps/rustcore/ts/src/native/native.session.ts)
- */
-export interface IGrabbedElement {
-    source_id: number;
-    content: string;
-    position: number;
-    nature: number;
-}
-
 export enum IndexingMode {
     Regular = 0,
     Breadcrumbs = 1,
@@ -88,16 +74,6 @@ export class Nature {
     }
 }
 
-export interface IAttachment {
-    uuid: string;
-    filepath: string;
-    name: string;
-    ext: string | undefined;
-    size: number;
-    mime: string | undefined;
-    messages: number[];
-}
-
 export class Attachment {
     public readonly uuid: string;
     public readonly filepath: string;
@@ -127,13 +103,13 @@ export class Attachment {
         }
     }
 
-    constructor(attachment: IAttachment) {
+    constructor(attachment: AttachmentInfo) {
         this.uuid = attachment.uuid;
         this.filepath = attachment.filepath;
         this.name = attachment.name;
-        this.ext = attachment.ext;
+        this.ext = attachment.ext ? attachment.ext : undefined;
         this.size = attachment.size;
-        this.mime = attachment.mime;
+        this.mime = attachment.mime ? attachment.mime : undefined;
         this.messages = attachment.messages;
     }
 
diff --git a/application/platform/types/files.ts b/application/platform/types/files.ts
index 7c55e3be5c..24c7c171c0 100644
--- a/application/platform/types/files.ts
+++ b/application/platform/types/files.ts
@@ -25,7 +25,7 @@ const EntityTypeRev: { [key: string]: EntityType } = {
 export interface Entity {
     name: string;
     fullname: string;
-    type: EntityType;
+    kind: EntityType;
     details?: {
         filename: string;
         full: string;
@@ -46,7 +46,7 @@ export function entityFromObj(smth: { [key: string]: unknown }): Entity {
     const entity: Entity = {
         name: obj.getAsNotEmptyString(smth, 'name'),
         fullname: obj.getAsNotEmptyString(smth, 'fullname'),
-        type: entityType,
+        kind: entityType,
         details: undefined,
     };
     if (smth['details'] !== null && typeof smth['details'] === 'object') {
diff --git a/application/platform/types/filter.ts b/application/platform/types/filter.ts
index 4efbfe7659..e5d229066c 100644
--- a/application/platform/types/filter.ts
+++ b/application/platform/types/filter.ts
@@ -21,13 +21,18 @@ export interface FilterStyle {
     background: string;
 }
 
+export interface FilterMatch {
+    index: number;
+    filters: number[];
+}
+
 export interface ISearchStats {
-    stats: { [key: string]: number };
+    stats: Map<string, number>;
 }
 
 export interface ISearchUpdated {
     found: number;
-    stat: { [key: string]: number };
+    stat: Map<string, number>;
 }
 
 export enum EFlag {
@@ -38,24 +43,16 @@ export enum EFlag {
 
 export type ISearchMap = Array<[number, number][]>;
 
-export type IValuesMap = { [key: number]: [number, number, number, number][] };
-
 export type IValuesMinMaxMap = { [key: number]: [number, number] };
 
-export interface INearest {
-    index: number;
-    position: number;
-}
-
 export interface IExtractedMatch {
     filter: IFilter;
     values: string[];
 }
 
 export interface IExtractedValueSrc {
-    index: number; // row position in the stream
-    // [filter_index, [values]]
-    values: Array<Array<number | string[]>>;
+    index: number;
+    values: Array<[number, string[]]>;
 }
 export type TExtractedValuesSrc = IExtractedValueSrc[];
 
diff --git a/application/platform/types/index.ts b/application/platform/types/index.ts
index ae7a40e175..27fce49da4 100644
--- a/application/platform/types/index.ts
+++ b/application/platform/types/index.ts
@@ -8,3 +8,5 @@ export * as storage from './storage';
 export * as github from './github';
 export * as comment from './comment';
 export * as bookmark from './bookmark';
+export * as sde from './sde';
+export * as bindings from './bindings';
diff --git a/application/platform/types/observe/parser/dlt/index.ts b/application/platform/types/observe/parser/dlt/index.ts
index d60d943c82..94e69f696c 100644
--- a/application/platform/types/observe/parser/dlt/index.ts
+++ b/application/platform/types/observe/parser/dlt/index.ts
@@ -10,29 +10,11 @@ import * as obj from '../../../../env/obj';
 import * as Origin from '../../origin/index';
 import * as str from '../../../../env/str';
 
-export interface LevelDistribution {
-    non_log: number;
-    log_fatal: number;
-    log_error: number;
-    log_warning: number;
-    log_info: number;
-    log_debug: number;
-    log_verbose: number;
-    log_invalid: number;
-}
-
 export function getLogLevelName(level: number): string {
     const name = (DltLogLevelNames as Record<string, string>)[level];
     return name === undefined ? 'unknown' : name;
 }
 
-export interface StatisticInfo {
-    app_ids: [string, LevelDistribution][];
-    context_ids: [string, LevelDistribution][];
-    ecu_ids: [string, LevelDistribution][];
-    contained_non_verbose: boolean;
-}
-
 export const DltLogLevelNames = {
     1: 'Fatal',
     2: 'Error',
diff --git a/application/platform/types/range.ts b/application/platform/types/range.ts
index e5001e2f63..7c0692244a 100644
--- a/application/platform/types/range.ts
+++ b/application/platform/types/range.ts
@@ -1,8 +1,8 @@
 import * as num from '../env/num';
 
 export interface IRange {
-    from: number;
-    to: number;
+    start: number;
+    end: number;
 }
 
 export function fromTuple(
@@ -17,7 +17,7 @@ export function fromTuple(
         if (!num.isValidU32(range[1])) {
             return new Error(`End of range isn't valid: ${range[1]}; ${JSON.stringify(range)}`);
         }
-        return { from: range[0], to: range[1] };
+        return { start: range[0], end: range[1] };
     } else if (typeof range === 'object' && range !== undefined && range !== null) {
         const asObj = range as { start: number; end: number };
         if (!num.isValidU32(asObj.start)) {
@@ -28,23 +28,23 @@ export function fromTuple(
         if (!num.isValidU32(asObj.end)) {
             return new Error(`End of range isn't valid: ${asObj.end}; ${JSON.stringify(range)}`);
         }
-        return { from: asObj.start, to: asObj.end };
+        return { start: asObj.start, end: asObj.end };
     } else {
         return new Error(`Expecting tuple: [number, number]: ${JSON.stringify(range)}`);
     }
 }
 
 export class Range {
-    public readonly from: number;
-    public readonly to: number;
+    public readonly start: number;
+    public readonly end: number;
     public readonly spec: {
-        // true - will use i >= from; false - i > from
+        // true - will use i >= start; false - i > start
         left: boolean;
-        // true - will use i <= to; false - i > to
+        // true - will use i <= end; false - i > end
         right: boolean;
-        // true - range in this case will be valid for i < from
+        // true - range in this case will be valid for i < start
         before: boolean;
-        // true - range in this case will be valid for i > to
+        // true - range in this case will be valid for i > end
         after: boolean;
     } = {
         left: true,
@@ -57,34 +57,34 @@ export class Range {
         return src.filter((r) => range.in(index(r)));
     }
 
-    constructor(from: number, to: number) {
+    constructor(start: number, end: number) {
         if (
-            from > to ||
-            from < 0 ||
-            to < 0 ||
-            isNaN(from) ||
-            isNaN(to) ||
-            !isFinite(from) ||
-            !isFinite(to)
+            start > end ||
+            start < 0 ||
+            end < 0 ||
+            isNaN(start) ||
+            isNaN(end) ||
+            !isFinite(start) ||
+            !isFinite(end)
         ) {
-            throw new Error(`Invalid range: [${from} - ${to}]`);
+            throw new Error(`Invalid range: [${start} - ${end}]`);
         }
-        this.from = from;
-        this.to = to;
+        this.start = start;
+        this.end = end;
     }
 
     public asObj(): IRange {
-        return { from: this.from, to: this.to };
+        return { start: this.start, end: this.end };
     }
 
     public len(): number {
-        return this.to - this.from;
+        return this.end - this.start;
     }
 
     public get(): IRange {
         return {
-            from: this.from,
-            to: this.to,
+            start: this.start,
+            end: this.end,
         };
     }
 
@@ -109,38 +109,38 @@ export class Range {
     }
 
     public in(int: number): boolean {
-        if (this.spec.before && this.spec.left && int <= this.from) {
+        if (this.spec.before && this.spec.left && int <= this.start) {
             return true;
         }
-        if (this.spec.before && !this.spec.left && int < this.from) {
+        if (this.spec.before && !this.spec.left && int < this.start) {
             return true;
         }
-        if (this.spec.after && this.spec.right && int >= this.to) {
+        if (this.spec.after && this.spec.right && int >= this.end) {
             return true;
         }
-        if (this.spec.after && !this.spec.right && int > this.to) {
+        if (this.spec.after && !this.spec.right && int > this.end) {
             return true;
         }
         if (this.spec.after || this.spec.before) {
             return false;
         }
-        if (this.spec.left && this.spec.right && int >= this.from && int <= this.to) {
+        if (this.spec.left && this.spec.right && int >= this.start && int <= this.end) {
             return true;
         }
-        if (!this.spec.left && this.spec.right && int > this.from && int <= this.to) {
+        if (!this.spec.left && this.spec.right && int > this.start && int <= this.end) {
             return true;
         }
-        if (this.spec.left && !this.spec.right && int >= this.from && int < this.to) {
+        if (this.spec.left && !this.spec.right && int >= this.start && int < this.end) {
             return true;
         }
-        if (!this.spec.left && !this.spec.right && int > this.from && int < this.to) {
+        if (!this.spec.left && !this.spec.right && int > this.start && int < this.end) {
             return true;
         }
         return false;
     }
 
     public equal(range: Range): boolean {
-        if (this.from !== range.from || this.to !== range.to) {
+        if (this.start !== range.start || this.end !== range.end) {
             return false;
         }
         if (
@@ -161,27 +161,27 @@ export function fromIndexes(indexes: number[]): IRange[] {
     }
     const ranges: IRange[] = [];
     indexes.sort((a, b) => (a >= b ? 1 : -1));
-    let from: number = -1;
-    let to = -1;
+    let start: number = -1;
+    let end = -1;
     indexes.forEach((i) => {
         if (i < 0 || isNaN(i) || !isFinite(i)) {
             throw new Error(`Invalid index: ${i}`);
         }
-        if (to === -1) {
-            to = i;
+        if (end === -1) {
+            end = i;
         }
-        if (from === -1) {
-            from = i;
+        if (start === -1) {
+            start = i;
             return;
         }
-        if (i === to + 1) {
-            to = i;
+        if (i === end + 1) {
+            end = i;
             return;
         }
-        ranges.push({ from, to });
-        from = i;
-        to = i;
+        ranges.push({ start, end });
+        start = i;
+        end = i;
     });
-    from !== -1 && ranges.push({ from, to: indexes[indexes.length - 1] });
+    start !== -1 && ranges.push({ start, end: indexes[indexes.length - 1] });
     return ranges;
 }
diff --git a/application/platform/types/shells.ts b/application/platform/types/shells.ts
deleted file mode 100644
index 7c0686256b..0000000000
--- a/application/platform/types/shells.ts
+++ /dev/null
@@ -1,72 +0,0 @@
-import { utils } from '../log';
-import * as obj from '../env/obj';
-
-export class ShellProfile {
-    public readonly name: string;
-    public readonly path: string;
-    public readonly envvars: Map<string, string> | undefined;
-    public readonly symlink: boolean;
-
-    public static fromObj(smth: unknown): ShellProfile | Error {
-        try {
-            const name: string = obj.getAsNotEmptyString(smth, 'name');
-            const path: string = obj.getAsNotEmptyString(smth, 'path');
-            const symlink: boolean = obj.getAsBool(smth, 'symlink');
-            let envvars: Map<string, string> | undefined = undefined;
-            if ((smth as any).envvars instanceof Map) {
-                envvars = (smth as any).envvars;
-            } else if (
-                (smth as any).envvars !== null &&
-                (smth as any).envvars !== undefined &&
-                typeof (smth as any).envvars === 'object'
-            ) {
-                envvars = new Map();
-                Object.keys((smth as any).envvars).forEach((key: string) => {
-                    envvars?.set(key, (smth as any).envvars[key]);
-                });
-            }
-            return new ShellProfile(name, path, symlink, envvars);
-        } catch (err) {
-            return new Error(utils.error(err));
-        }
-    }
-
-    public static fromStr(str: string): ShellProfile | Error {
-        try {
-            const profile = JSON.parse(str);
-            const name: string = obj.getAsNotEmptyString(profile, 'name');
-            const path: string = obj.getAsNotEmptyString(profile, 'path');
-            const symlink: boolean = obj.getAsBool(profile, 'symlink');
-            let envvars: Map<string, string> | undefined = undefined;
-            if (
-                profile.envvars !== null &&
-                profile.envvars !== undefined &&
-                typeof profile.envvars === 'object'
-            ) {
-                envvars = new Map();
-                Object.keys(profile.envvars).forEach((key: string) => {
-                    envvars?.set(key, profile.envvars[key]);
-                });
-            }
-            return new ShellProfile(name, path, symlink, envvars);
-        } catch (err) {
-            return new Error(utils.error(err));
-        }
-    }
-
-    constructor(
-        name: string,
-        path: string,
-        symlink: boolean,
-        envvars: Map<string, string> | undefined,
-    ) {
-        this.path = path;
-        this.name = name;
-        this.symlink = symlink;
-        this.envvars = envvars;
-    }
-
-    public getEnvvarsCount(): number {
-        return this.envvars === undefined ? 0 : this.envvars.size;
-    }
-}
diff --git a/cli/CHANGELOG.md b/cli/CHANGELOG.md
index 89821e3afb..972e6c632f 100644
--- a/cli/CHANGELOG.md
+++ b/cli/CHANGELOG.md
@@ -1,3 +1,9 @@
+# 0.3.0
+
+## Changes:
+
+* Include protocol related functionality
+
 # 0.2.14
 
 ## Changes:
diff --git a/cli/Cargo.lock b/cli/Cargo.lock
index 5a23991b21..e89cfaba87 100644
--- a/cli/Cargo.lock
+++ b/cli/Cargo.lock
@@ -158,7 +158,7 @@ checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b"
 
 [[package]]
 name = "cargo-chipmunk"
-version = "0.2.14"
+version = "0.3.0"
 dependencies = [
  "anyhow",
  "clap",
diff --git a/cli/Cargo.toml b/cli/Cargo.toml
index aebb82d2c7..b2c65fa9a6 100644
--- a/cli/Cargo.toml
+++ b/cli/Cargo.toml
@@ -1,6 +1,6 @@
 [package]
 name = "cargo-chipmunk"
-version = "0.2.14"
+version = "0.3.0"
 authors = ["Ammar Abou Zor <ammar.abou.zor@accenture.com>"]
 edition = "2021"
 description = "CLI Tool for chipmunk application development"
diff --git a/cli/dir_checksum/tests/integration_tests.rs b/cli/dir_checksum/tests/integration_tests.rs
index b1d3271456..b4e611da5c 100644
--- a/cli/dir_checksum/tests/integration_tests.rs
+++ b/cli/dir_checksum/tests/integration_tests.rs
@@ -146,7 +146,7 @@ fn hash_individual_sub_directory() -> anyhow::Result<()> {
 
     // Create empty file
     let empty_file_path = &sub_dir.join("empty.txt");
-    let empty_file = File::create(&empty_file_path)?;
+    let empty_file = File::create(empty_file_path)?;
     drop(empty_file);
 
     let items = calc_individual_checksum(tmp_dir.path())?;
diff --git a/cli/integration_tests/build.py b/cli/integration_tests/build.py
index b4b26c1b03..b1b211de92 100644
--- a/cli/integration_tests/build.py
+++ b/cli/integration_tests/build.py
@@ -65,6 +65,8 @@ def run_build_tests():
     # Shared
     "platform/dist",
     "platform/node_modules",
+    # Protocol
+    "apps/protocol/pkg",
     # Binding
     "apps/rustcore/rs-bindings/dist",
     "apps/rustcore/rs-bindings/target",
diff --git a/cli/integration_tests/clean.py b/cli/integration_tests/clean.py
index 8576614d2f..616c5c4c68 100644
--- a/cli/integration_tests/clean.py
+++ b/cli/integration_tests/clean.py
@@ -19,6 +19,7 @@
     # binaries to avoid failing on Windows when CLI tool tries to remove it's own binary.
     "core",
     "shared",
+    "protocol",
     "binding",
     "wrapper",
     "wasm",
@@ -38,6 +39,8 @@
     # Shared
     "platform/dist",
     "platform/node_modules",
+    # Protocol
+    "apps/protocol/pkg",
     # Binding
     "apps/rustcore/rs-bindings/dist",
     "apps/rustcore/rs-bindings/target",
diff --git a/cli/src/jobs_runner/job_definition.rs b/cli/src/jobs_runner/job_definition.rs
index 7150417e7a..7653068a4f 100644
--- a/cli/src/jobs_runner/job_definition.rs
+++ b/cli/src/jobs_runner/job_definition.rs
@@ -89,7 +89,7 @@ mod tests {
         for target in Target::all() {
             for job_type in JobType::all() {
                 if !target.has_job(*job_type) {
-                    let job_def = JobDefinition::new(*target, job_type.clone());
+                    let job_def = JobDefinition::new(*target, *job_type);
                     assert!(
                         job_def.run_intern(false).await.is_none(),
                         "'{}' has no job for '{}' but it returns Some when calling run",
diff --git a/cli/src/jobs_runner/jobs_resolver.rs b/cli/src/jobs_runner/jobs_resolver.rs
index f467b924a3..764b4f86a7 100644
--- a/cli/src/jobs_runner/jobs_resolver.rs
+++ b/cli/src/jobs_runner/jobs_resolver.rs
@@ -142,6 +142,7 @@ fn is_job_involved(target: Target, current_job: JobType, main_job: &JobType) ->
                 | Target::Binding
                 | Target::Wrapper
                 | Target::Wasm
+                | Target::Protocol
                 | Target::Client
                 | Target::App => true,
             },
@@ -156,12 +157,12 @@ fn is_job_involved(target: Target, current_job: JobType, main_job: &JobType) ->
                 // before running the actual tests.
                 Target::Wrapper | Target::Wasm => true,
 
-                // Shared and Bindings don't have tests but they should be built for Wrapper and Wasm
+                // Shared, Bindings and Protocol don't have tests but they should be built for Wrapper and Wasm
                 // tests
-                Target::Shared | Target::Binding => {
+                Target::Shared | Target::Binding | Target::Protocol => {
                     assert!(
                         !matches!(current_job, JobType::Test { .. }),
-                        "Shared and Bindings targets don't have test jobs currently"
+                        "Shared, Bindings and Protocol targets don't have test jobs currently"
                     );
                     true
                 }
@@ -171,7 +172,7 @@ fn is_job_involved(target: Target, current_job: JobType, main_job: &JobType) ->
                 Target::Client | Target::App => {
                     assert!(
                         !matches!(current_job, JobType::Test { .. }),
-                        "Client and App targets don't have test jobs currently"
+                        "Client, App and Protocol targets don't have test jobs currently"
                     );
                     false
                 }
@@ -251,7 +252,12 @@ mod tests {
 
     #[test]
     fn flatten_wrapper_target() {
-        let expected = BTreeSet::from([Target::Shared, Target::Binding, Target::Wrapper]);
+        let expected = BTreeSet::from([
+            Target::Shared,
+            Target::Binding,
+            Target::Protocol,
+            Target::Wrapper,
+        ]);
         assert_eq!(flatten_targets_for_build(&[Target::Wrapper]), expected);
     }
 
@@ -259,6 +265,7 @@ mod tests {
     fn flatten_app_target() {
         let expected = BTreeSet::from([
             Target::Shared,
+            Target::Protocol,
             Target::Binding,
             Target::Wrapper,
             Target::Client,
@@ -272,13 +279,18 @@ mod tests {
     #[test]
     fn flatten_all_target() {
         let expected = BTreeSet::from_iter(Target::all().to_owned());
-        assert_eq!(flatten_targets_for_build(&Target::all()), expected);
+        assert_eq!(flatten_targets_for_build(Target::all()), expected);
     }
 
     #[test]
     fn flatten_core_client_target() {
-        let expected =
-            BTreeSet::from_iter([Target::Core, Target::Shared, Target::Wasm, Target::Client]);
+        let expected = BTreeSet::from_iter([
+            Target::Core,
+            Target::Protocol,
+            Target::Shared,
+            Target::Wasm,
+            Target::Client,
+        ]);
         assert_eq!(
             flatten_targets_for_build(&[Target::Core, Target::Client]),
             expected
@@ -320,6 +332,10 @@ mod tests {
                 JobDefinition::new(Target::Shared, JobType::Install { production }),
                 vec![],
             ),
+            (
+                JobDefinition::new(Target::Protocol, JobType::Build { production }),
+                vec![],
+            ),
             (
                 JobDefinition::new(Target::Shared, JobType::Build { production }),
                 vec![JobDefinition::new(
@@ -332,6 +348,7 @@ mod tests {
                 vec![
                     JobDefinition::new(Target::Shared, JobType::Install { production }),
                     JobDefinition::new(Target::Shared, JobType::Build { production }),
+                    JobDefinition::new(Target::Protocol, JobType::Build { production }),
                 ],
             ),
             (
@@ -339,6 +356,7 @@ mod tests {
                 vec![
                     JobDefinition::new(Target::Shared, JobType::Install { production }),
                     JobDefinition::new(Target::Shared, JobType::Build { production }),
+                    JobDefinition::new(Target::Protocol, JobType::Build { production }),
                     JobDefinition::new(Target::Binding, JobType::Install { production }),
                 ],
             ),
@@ -357,13 +375,83 @@ mod tests {
         );
     }
 
+    #[test]
+    /// Ensure testing ts targets will invoke all building targets involved in the dependencies tree.
+    fn resolve_test_wrapper() {
+        let production = false;
+        let expected = BTreeMap::from([
+            (
+                JobDefinition::new(Target::Shared, JobType::Install { production }),
+                vec![],
+            ),
+            (
+                JobDefinition::new(Target::Protocol, JobType::Build { production }),
+                vec![],
+            ),
+            (
+                JobDefinition::new(Target::Shared, JobType::Build { production }),
+                vec![JobDefinition::new(
+                    Target::Shared,
+                    JobType::Install { production },
+                )],
+            ),
+            (
+                JobDefinition::new(Target::Binding, JobType::Install { production }),
+                vec![
+                    JobDefinition::new(Target::Shared, JobType::Install { production }),
+                    JobDefinition::new(Target::Shared, JobType::Build { production }),
+                    JobDefinition::new(Target::Protocol, JobType::Build { production }),
+                ],
+            ),
+            (
+                JobDefinition::new(Target::Binding, JobType::Build { production }),
+                vec![
+                    JobDefinition::new(Target::Shared, JobType::Install { production }),
+                    JobDefinition::new(Target::Shared, JobType::Build { production }),
+                    JobDefinition::new(Target::Protocol, JobType::Build { production }),
+                    JobDefinition::new(Target::Binding, JobType::Install { production }),
+                ],
+            ),
+            (
+                JobDefinition::new(Target::Binding, JobType::AfterBuild { production }),
+                vec![
+                    JobDefinition::new(Target::Binding, JobType::Install { production }),
+                    JobDefinition::new(Target::Binding, JobType::Build { production }),
+                ],
+            ),
+            (
+                JobDefinition::new(Target::Wrapper, JobType::Build { production }),
+                vec![
+                    JobDefinition::new(Target::Shared, JobType::Install { production }),
+                    JobDefinition::new(Target::Shared, JobType::Build { production }),
+                    JobDefinition::new(Target::Protocol, JobType::Build { production }),
+                    JobDefinition::new(Target::Binding, JobType::Install { production }),
+                    JobDefinition::new(Target::Binding, JobType::Build { production }),
+                    JobDefinition::new(Target::Binding, JobType::AfterBuild { production }),
+                ],
+            ),
+            (
+                JobDefinition::new(Target::Wrapper, JobType::Test { production }),
+                vec![JobDefinition::new(
+                    Target::Wrapper,
+                    JobType::Build { production },
+                )],
+            ),
+        ]);
+
+        assert_eq!(
+            expected,
+            resolve(&[Target::Wrapper], JobType::Test { production })
+        );
+    }
+
     #[test]
     /// Resolves build for all targets and checks some cases in the dependencies-tree since the
     /// tree is too huge to be tested one by one.
     fn resolve_build_all_fuzzy() {
         let production = false;
 
-        let tree = resolve(&Target::all(), JobType::Build { production });
+        let tree = resolve(Target::all(), JobType::Build { production });
 
         assert!(
             tree.get(&JobDefinition::new(
diff --git a/cli/src/target/mod.rs b/cli/src/target/mod.rs
index 69d49f1437..a3fbddb323 100644
--- a/cli/src/target/mod.rs
+++ b/cli/src/target/mod.rs
@@ -27,6 +27,7 @@ mod binding;
 mod cli;
 mod client;
 mod core;
+mod protocol;
 mod target_kind;
 mod updater;
 mod wasm;
@@ -45,6 +46,8 @@ pub enum Target {
     Core,
     /// Represents the path `application/platform`
     Shared,
+    /// Represents the path `application/apps/protocol`
+    Protocol,
     /// Represents the path `application/apps/rustcore/rs-bindings`
     Binding,
     /// Represents the path `application/apps/rustcore/ts-bindings`
@@ -114,6 +117,7 @@ impl std::fmt::Display for Target {
             match self {
                 Target::Core => "Core",
                 Target::Wrapper => "Wrapper",
+                Target::Protocol => "Protocol",
                 Target::Binding => "Binding",
                 Target::Cli => "Cli",
                 Target::Client => "Client",
@@ -136,6 +140,7 @@ impl FromStr for Target {
             // This check to remember to add the newly added enums to this function
             match T::App {
                 T::Core => (),
+                T::Protocol => (),
                 T::Binding => (),
                 T::Wrapper => (),
                 T::Client => (),
@@ -150,6 +155,7 @@ impl FromStr for Target {
         match input {
             "Core" => Ok(T::Core),
             "Wrapper" => Ok(T::Wrapper),
+            "Protocol" => Ok(T::Protocol),
             "Binding" => Ok(T::Binding),
             "Cli" => Ok(T::Cli),
             "Client" => Ok(T::Client),
@@ -169,6 +175,7 @@ impl Target {
             // This check to remember to add the newly added enums to this function
             match Target::App {
                 Target::Core => (),
+                Target::Protocol => (),
                 Target::Binding => (),
                 Target::Wrapper => (),
                 Target::Client => (),
@@ -182,6 +189,7 @@ impl Target {
 
         [
             Target::Binding,
+            Target::Protocol,
             Target::Cli,
             Target::App,
             Target::Core,
@@ -206,6 +214,7 @@ impl Target {
     pub fn relative_cwd(self) -> PathBuf {
         let sub_parts = match self {
             Target::Core => ["application", "apps", "indexer"].iter(),
+            Target::Protocol => ["application", "apps", "protocol"].iter(),
             Target::Binding => ["application", "apps", "rustcore", "rs-bindings"].iter(),
             Target::Wrapper => ["application", "apps", "rustcore", "ts-bindings"].iter(),
             Target::Client => ["application", "client"].iter(),
@@ -222,9 +231,12 @@ impl Target {
     /// Provide the kind of the target between Rust or Type-Script
     pub fn kind(self) -> TargetKind {
         match self {
-            Target::Binding | Target::Core | Target::Cli | Target::Wasm | Target::Updater => {
-                TargetKind::Rs
-            }
+            Target::Protocol
+            | Target::Binding
+            | Target::Core
+            | Target::Cli
+            | Target::Wasm
+            | Target::Updater => TargetKind::Rs,
             Target::Client | Target::Wrapper | Target::Shared | Target::App => TargetKind::Ts,
         }
     }
@@ -232,12 +244,15 @@ impl Target {
     /// Provides the target which this target depend on
     pub fn deps(self) -> Vec<Target> {
         match self {
-            Target::Core | Target::Cli | Target::Shared | Target::Wasm | Target::Updater => {
-                Vec::new()
-            }
-            Target::Binding => vec![Target::Shared],
-            Target::Wrapper => vec![Target::Binding, Target::Shared],
-            Target::Client => vec![Target::Shared, Target::Wasm],
+            Target::Core
+            | Target::Cli
+            | Target::Shared
+            | Target::Wasm
+            | Target::Updater
+            | Target::Protocol => Vec::new(),
+            Target::Binding => vec![Target::Shared, Target::Protocol],
+            Target::Wrapper => vec![Target::Binding, Target::Shared, Target::Protocol],
+            Target::Client => vec![Target::Shared, Target::Wasm, Target::Protocol],
             Target::App => vec![Target::Wrapper, Target::Client, Target::Updater],
         }
     }
@@ -251,7 +266,11 @@ impl Target {
                 Target::Binding | Target::Client | Target::Shared | Target::App | Target::Wasm => {
                     true
                 }
-                Target::Core | Target::Wrapper | Target::Updater | Target::Cli => false,
+                Target::Core
+                | Target::Wrapper
+                | Target::Updater
+                | Target::Cli
+                | Target::Protocol => false,
             },
 
             JobType::AfterBuild { .. } => match self {
@@ -261,7 +280,8 @@ impl Target {
                 | Target::Wrapper
                 | Target::Wasm
                 | Target::Updater
-                | Target::Cli => false,
+                | Target::Cli
+                | Target::Protocol => false,
             },
             JobType::Test { .. } => match self {
                 Target::Wrapper | Target::Core | Target::Cli | Target::Wasm => true,
@@ -269,7 +289,8 @@ impl Target {
                 | Target::Binding
                 | Target::Client
                 | Target::Updater
-                | Target::App => false,
+                | Target::App
+                | Target::Protocol => false,
             },
             JobType::Run { .. } => false,
         }
@@ -280,6 +301,7 @@ impl Target {
         let build_cmd = match self {
             Target::Binding => binding::get_build_cmd(prod)?,
             Target::Wasm => wasm::get_build_cmd(prod),
+            Target::Protocol => protocol::get_build_cmd(prod),
             Target::Updater => updater::get_build_cmd(),
             rest_targets => rest_targets.kind().build_cmd(prod),
         };
@@ -345,6 +367,7 @@ impl Target {
             | Target::Wrapper
             | Target::Client
             | Target::Updater
+            | Target::Protocol
             | Target::App => None,
         }
     }
@@ -466,6 +489,9 @@ impl Target {
                 paths_to_remove.push(self.cwd().join("test_output"));
                 paths_to_remove.push(self.cwd().join("node_modules"));
             }
+            Target::Protocol => {
+                paths_to_remove.push(self.cwd().join("pkg"));
+            }
             Target::Wrapper => {
                 paths_to_remove.push(self.cwd().join("spec").join("build"));
                 let index_node_path = self.cwd().join("src").join("native").join("index.node");
@@ -575,6 +601,7 @@ impl Target {
             | Target::Wrapper
             | Target::Wasm
             | Target::Updater
+            | Target::Protocol
             | Target::Cli => return None,
         };
 
diff --git a/cli/src/target/protocol.rs b/cli/src/target/protocol.rs
new file mode 100644
index 0000000000..bc59bd05d6
--- /dev/null
+++ b/cli/src/target/protocol.rs
@@ -0,0 +1,19 @@
+use crate::dev_tools::DevTool;
+
+use super::ProcessCommand;
+
+pub fn get_build_cmd(prod: bool) -> ProcessCommand {
+    let env = if prod { "--release" } else { "--dev" };
+
+    ProcessCommand::new(
+        DevTool::WasmPack.cmd(),
+        vec![
+            String::from("build"),
+            String::from(env),
+            String::from("--target"),
+            String::from("nodejs"),
+            String::from("--color"),
+            String::from("always"),
+        ],
+    )
+}
diff --git a/rakefile.rb b/rakefile.rb
index d3734d12a9..902a552ed7 100644
--- a/rakefile.rb
+++ b/rakefile.rb
@@ -10,6 +10,7 @@
 require './scripts/elements/electron'
 require './scripts/elements/release'
 require './scripts/elements/updater'
+require './scripts/elements/protocol'
 require './scripts/tools/change_checker'
 
 CLOBBER.include("#{Paths::CLIENT}/.angular")
@@ -19,6 +20,7 @@
 
 namespace :clean do
   task all: [
+    'protocol:clean',
     'bindings:clean',
     'electron:clean',
     'client:clean',
diff --git a/scripts/elements/bindings.rb b/scripts/elements/bindings.rb
index cac836e8e7..0365d3bdee 100644
--- a/scripts/elements/bindings.rb
+++ b/scripts/elements/bindings.rb
@@ -85,6 +85,7 @@ def self.set_environment_vars
       exporting
       map
       observe
+      observing
       indexes
       concat
       cancel
@@ -92,6 +93,7 @@ def self.set_environment_vars
       stream
       promises
       benchmark
+      protocol
     ]
     test_specs.each do |spec|
       desc "run jasmine #{spec}-spec"
@@ -153,6 +155,7 @@ def self.set_environment_vars
 
   desc 'Build bindings'
   task build: [
+    'protocol:build',
     'platform:build',
     'bindings:install',
     'environment:check',
diff --git a/scripts/elements/protocol.rb b/scripts/elements/protocol.rb
new file mode 100644
index 0000000000..dba59838da
--- /dev/null
+++ b/scripts/elements/protocol.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+module Protocol
+  PKG = "#{Paths::PROTOCOL}/pkg"
+  TARGET = "#{Paths::PROTOCOL}/target"
+  TARGETS = [PKG, TARGET].freeze
+end
+
+namespace :protocol do
+
+  task :clean do
+    Protocol::TARGETS.each do |path|
+      path = "#{path}/.node_integrity" if File.basename(path) == 'node_modules'
+      if File.exist?(path)
+        Shell.rm_rf(path)
+        Reporter.removed('protocol', "removed: #{File.basename(path)}", '')
+      end
+    end
+  end
+
+  task rebuild: ['protocol:clean', 'protocol:build']
+
+  desc 'build protocol'
+  task build: ['environment:check'] do
+    Shell.rm_rf(Protocol::PKG) if @rebuild
+    Reporter.removed('protocol', File.basename(Protocol::PKG), '')
+    begin
+      Shell.chdir(Paths::PROTOCOL) do
+        duration = Shell.timed_sh 'wasm-pack build --target nodejs', 'build protocol'
+        Reporter.done('protocol', 'build', '', duration)
+      end
+    rescue StandardError
+      Reporter.failed('protocol', 'build', '')
+    end
+  end
+
+  desc 'Lint protocol'
+  task lint: 'protocol:install' do
+    Shell.chdir(Paths::PROTOCOL) do
+      duration = Shell.timed_sh 'cargo clippy', 'lint protocol'
+      Reporter.done('protocol', 'linting', '', duration)
+    end
+  end
+
+end
diff --git a/scripts/env/paths.rb b/scripts/env/paths.rb
index 5541f03866..fa20ff1290 100644
--- a/scripts/env/paths.rb
+++ b/scripts/env/paths.rb
@@ -64,6 +64,7 @@ def self.release_resources_folder
   JASMINE = './node_modules/.bin/electron ./node_modules/jasmine/bin/jasmine.js'
   PLATFORM = "#{ROOT}/application/platform"
   PLATFORM_DIST = "#{PLATFORM}/dist"
+  PROTOCOL = "#{ROOT}/application/apps/protocol"
   RELEASE = "#{ELECTRON}/release"
   RELEASE_BIN = "#{RELEASE}/#{Paths.release_bin_folder}"
   RELEASE_BUILD = "#{RELEASE}/#{Paths.release_build_folder}"