1
1
# litellm/proxy/guardrails/guardrail_hooks/pangea.py
2
2
import os
3
- import sys
4
-
5
- # Adds the parent directory to the system path to allow importing litellm modules
6
- sys .path .insert (
7
- 0 , os .path .abspath ("../../.." )
8
- )
9
- import json
10
3
from typing import Any , List , Literal , Optional , Union
11
4
12
5
from fastapi import HTTPException
13
6
14
- import litellm
15
7
from litellm ._logging import verbose_proxy_logger
16
8
from litellm .integrations .custom_guardrail import (
17
9
CustomGuardrail ,
@@ -50,7 +42,8 @@ class PangeaHandler(CustomGuardrail):
50
42
def __init__ (
51
43
self ,
52
44
guardrail_name : str ,
53
- pangea_recipe : str ,
45
+ pangea_input_recipe : Optional [str ] = None ,
46
+ pangea_output_recipe : Optional [str ] = None ,
54
47
api_key : Optional [str ] = None ,
55
48
api_base : Optional [str ] = None ,
56
49
** kwargs ,
@@ -80,20 +73,22 @@ def __init__(
80
73
or os .environ .get ("PANGEA_API_BASE" )
81
74
or "https://ai-guard.aws.us.pangea.cloud"
82
75
)
83
- self .pangea_recipe = pangea_recipe
76
+ self .pangea_input_recipe = pangea_input_recipe
77
+ self .pangea_output_recipe = pangea_output_recipe
84
78
self .guardrail_endpoint = f"{ self .api_base } /v1/text/guard"
85
79
86
80
# Pass relevant kwargs to the parent class
87
81
super ().__init__ (guardrail_name = guardrail_name , ** kwargs )
88
82
verbose_proxy_logger .info (
89
- f"Initialized Pangea Guardrail: name={ guardrail_name } , recipe={ pangea_recipe } , api_base={ self .api_base } "
83
+ f"Initialized Pangea Guardrail: name={ guardrail_name } , recipe={ pangea_input_recipe } , api_base={ self .api_base } "
90
84
)
91
85
92
86
def _prepare_payload (
93
87
self ,
94
88
messages : Optional [List [AllMessageValues ]] = None ,
95
89
text_input : Optional [str ] = None ,
96
90
request_data : Optional [dict ] = None ,
91
+ recipe : Optional [str ] = None ,
97
92
) -> dict :
98
93
"""
99
94
Prepares the payload for the Pangea AI Guard API request.
@@ -107,9 +102,12 @@ def _prepare_payload(
107
102
dict: The payload dictionary for the API request.
108
103
"""
109
104
payload : dict [str , Any ] = {
110
- "recipe" : self .pangea_recipe ,
111
105
"debug" : False , # Or make this configurable if needed
112
106
}
107
+
108
+ if recipe :
109
+ payload ["recipe" ] = recipe
110
+
113
111
if messages :
114
112
# Ensure messages are in the format Pangea expects (list of dicts with 'role' and 'content')
115
113
payload ["messages" ] = [
@@ -253,7 +251,7 @@ async def async_moderation_hook(
253
251
254
252
try :
255
253
payload = self ._prepare_payload (
256
- messages = messages , text_input = text_input , request_data = data
254
+ messages = messages , text_input = text_input , request_data = data , recipe = self . pangea_input_recipe
257
255
)
258
256
await self ._call_pangea_guard (
259
257
payload = payload , request_data = data , hook_name = "moderation_hook"
@@ -303,7 +301,7 @@ async def async_post_call_success_hook(
303
301
try :
304
302
# Scan only the output text in the post-call hook
305
303
payload = self ._prepare_payload (
306
- text_input = response_str , request_data = data
304
+ text_input = response_str , request_data = data , recipe = self . pangea_output_recipe
307
305
)
308
306
await self ._call_pangea_guard (
309
307
payload = payload ,
@@ -321,4 +319,4 @@ async def async_post_call_success_hook(
321
319
"error" : f"Error preparing Pangea payload for response: { ve } " ,
322
320
"guardrail_name" : self .guardrail_name ,
323
321
},
324
- )
322
+ )
0 commit comments