diff --git a/.vscode/launch.json b/.vscode/launch.json index e7be044..56f9804 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -1,12 +1,13 @@ { - "version": "0.2.0", - "configurations": [ - { - "name": "Attach to Node Functions", - "type": "node", - "request": "attach", - "port": 9229, - "preLaunchTask": "func: host start" - } - ] -} \ No newline at end of file + "version": "0.2.0", + "configurations": [ + { + "name": "Attach to Chrome", + "port": 9222, + "url": "http://localhost:4200/", + "request": "attach", + "type": "chrome", + "webRoot": "${workspaceFolder}/samples/openai-acs-msgraph/client" + } + ] +} diff --git a/DEBUGGING.md b/DEBUGGING.md new file mode 100644 index 0000000..8ad662b --- /dev/null +++ b/DEBUGGING.md @@ -0,0 +1,66 @@ +# Debugging (Tested on Ubuntu Linux) +- node/npm must be installed (eg using nvm) +- set the default versions: ``nvm alias default node`` + + + +To debug server and client code in VS Code do the following: + +## To run the postgres database +1. Launch docker compose from the terminal: +``` +cd samples/openai-acs-msgraph +docker compose up # or if using V1 docker-compose up +``` + +## To run the server in debug mode +2. Launch the server in debug mode: + +![Alt text](images/launch-debug-server.png) + + in the package.json of the server + + +## To run the client in debug mode +3. Launch the browser in debug mode (here we use chrome): + +google-chrome-stable --remote-debugging-port=9222 + + +4. Modify your launch.json like this: +``` +{ + "version": "0.2.0", + "configurations": [ + { + "name": "Attach to Chrome", + "port": 9222, + "url": "http://localhost:4200/", + "request": "attach", + "type": "chrome", + "webRoot": "${workspaceFolder}/samples/openai-acs-msgraph/client" + } + ] +} +``` +5. Launch your client from the terminal: +``` +cd samples/openai-acs-msgraph/client +npm start + +``` + +6. Attach client side debugger +![Alt text](images/attach-to-chrome.png) + + +## See how the debugger is working + +6. ![Alt text](images/generate-email-sms-messages.png) + + +7. client-side-breakpoint +![Alt text](images/client-side-breakpoint.png) + +8. server-side-breakpoint +![Alt text](images/server-side-breakpoint.png) \ No newline at end of file diff --git a/images/attach-to-chrome.png b/images/attach-to-chrome.png new file mode 100644 index 0000000..e2e9414 Binary files /dev/null and b/images/attach-to-chrome.png differ diff --git a/images/client-side-breakpoint.png b/images/client-side-breakpoint.png new file mode 100644 index 0000000..6b255dd Binary files /dev/null and b/images/client-side-breakpoint.png differ diff --git a/images/generate-email-sms-messages.png b/images/generate-email-sms-messages.png new file mode 100644 index 0000000..95897df Binary files /dev/null and b/images/generate-email-sms-messages.png differ diff --git a/images/launch-debug-server.png b/images/launch-debug-server.png new file mode 100644 index 0000000..b63755f Binary files /dev/null and b/images/launch-debug-server.png differ diff --git a/images/server-side-breakpoint.png b/images/server-side-breakpoint.png new file mode 100644 index 0000000..2ccba97 Binary files /dev/null and b/images/server-side-breakpoint.png differ diff --git a/samples/openai-acs-msgraph/server/typescript/openAI.ts b/samples/openai-acs-msgraph/server/typescript/openAI.ts index 0cedf0b..cf33240 100644 --- a/samples/openai-acs-msgraph/server/typescript/openAI.ts +++ b/samples/openai-acs-msgraph/server/typescript/openAI.ts @@ -63,7 +63,7 @@ async function getAzureOpenAIBYODCompletion(systemPrompt: string, userPrompt: st const fetchUrl = `${OPENAI_ENDPOINT}/openai/deployments/${OPENAI_MODEL}/extensions/chat/completions?api-version=${OPENAI_API_VERSION}`; const messageData: ChatGPTData = { - max_tokens: 1024, + max_tokens: 800, temperature, messages: [ { role: 'system', content: systemPrompt }, @@ -100,10 +100,10 @@ async function getAzureOpenAIBYODCompletion(systemPrompt: string, userPrompt: st return completion.error.message; } - const citations = (completion.choices[0]?.messages[0]?.content?.trim() ?? '') as string; - console.log('Azure OpenAI BYOD Citations: \n', citations); + // const citations = (completion.choices[0]?.message?.context?.trim() ?? '') as string; + // console.log('Azure OpenAI BYOD Citations: \n', citations); - let content = (completion.choices[0]?.messages[1]?.content?.trim() ?? '') as string; + let content = (completion.choices[0]?.message?.content?.trim() ?? '') as string; console.log('Azure OpenAI BYOD Output: \n', content); return content;