@@ -130,7 +130,8 @@ function is decorated with `@track`, the input and output of the function will b
130130nested chains:
131131
132132``` python
133- from opik.integrations.openai import track_openai, track
133+ from opik import track
134+ from opik.integrations.openai import track_openai
134135from openai import OpenAI
135136
136137# Wrap your OpenAI client
@@ -164,7 +165,7 @@ def generate_response(input_text, context):
164165 f " AI: "
165166 )
166167
167- response = client .chat.completions.create(
168+ response = openai_client .chat.completions.create(
168169 model = " gpt-3.5-turbo" ,
169170 messages = [{" role" : " user" , " content" : full_prompt}]
170171 )
@@ -182,7 +183,6 @@ If you are using LangChain to build your chains, you can use the `OpikTracer` to
182183log every step of the chain to Opik:
183184
184185``` python
185- from langchain.chains import LLMChain
186186from langchain_openai import OpenAI
187187from langchain.prompts import PromptTemplate
188188from opik.integrations.langchain import OpikTracer
@@ -198,10 +198,11 @@ prompt_template = PromptTemplate(
198198 template = " Translate the following text to French: {input} "
199199)
200200
201- llm_chain = LLMChain(llm = llm, prompt = prompt_template)
201+ # Use pipe operator to create LLM chain
202+ llm_chain = prompt_template | llm
202203
203204# Generate the translations
204- llm_chain.run( " Hello, how are you?" , callbacks = [opik_tracer])
205+ llm_chain.invoke({ " input " : " Hello, how are you?" } , callbacks = [opik_tracer])
205206```
206207
207208</TabItem >
0 commit comments