Skip to content

Commit

Permalink
Releasing Demo Application
Browse files Browse the repository at this point in the history
  • Loading branch information
chris-alexiuk committed Nov 15, 2023
1 parent dfde116 commit 220a7fc
Show file tree
Hide file tree
Showing 8 changed files with 246 additions and 0 deletions.
Binary file added .chainlit/.langchain.db
Binary file not shown.
63 changes: 63 additions & 0 deletions .chainlit/config.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
[project]
# If true (default), the app will be available to anonymous users.
# If false, users will need to authenticate and be part of the project to use the app.
public = true

# The project ID (found on https://cloud.chainlit.io).
# The project ID is required when public is set to false or when using the cloud database.
#id = ""

# Uncomment if you want to persist the chats.
# local will create a database in your .chainlit directory (requires node.js installed).
# cloud will use the Chainlit cloud database.
# custom will load use your custom client.
# database = "local"

# Whether to enable telemetry (default: true). No personal data is collected.
enable_telemetry = true

# List of environment variables to be provided by each user to use the app.
user_env = []

# Duration (in seconds) during which the session is saved when the connection is lost
session_timeout = 3600

[UI]
# Name of the app and chatbot.
name = "Chatbot"

# Description of the app and chatbot. This is used for HTML tags.
# description = ""

# The default value for the expand messages settings.
default_expand_messages = false

# Hide the chain of thought details from the user in the UI.
hide_cot = false

# Link to your github repo. This will add a github button in the UI's header.
# github = ""

# Override default MUI light theme. (Check theme.ts)
[UI.theme.light]
#background = "#FAFAFA"
#paper = "#FFFFFF"

[UI.theme.light.primary]
#main = "#F80061"
#dark = "#980039"
#light = "#FFE7EB"

# Override default MUI dark theme. (Check theme.ts)
[UI.theme.dark]
#background = "#FAFAFA"
#paper = "#FFFFFF"

[UI.theme.dark.primary]
#main = "#F80061"
#dark = "#980039"
#light = "#FFE7EB"


[meta]
generated_by = "0.6.2"
11 changes: 11 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
FROM python:3.11
RUN useradd -m -u 1000 user
USER user
ENV HOME=/home/user \
PATH=/home/user/.local/bin:$PATH
WORKDIR $HOME/app
COPY --chown=user . $HOME/app
COPY ./requirements.txt ~/app/requirements.txt
RUN pip install -r requirements.txt
COPY . .
CMD ["chainlit", "run", "app.py", "--port", "7860"]
Binary file added __pycache__/app.cpython-39.pyc
Binary file not shown.
Binary file added __pycache__/tools.cpython-39.pyc
Binary file not shown.
98 changes: 98 additions & 0 deletions app.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
from langchain.agents import AgentExecutor, AgentType, initialize_agent
from langchain.agents.structured_chat.prompt import SUFFIX
from langchain.chat_models import ChatOpenAI
from langchain.memory import ConversationBufferMemory
from tools import generate_image_tool

import chainlit as cl
from chainlit.action import Action
from chainlit.input_widget import Select, Switch, Slider


@cl.author_rename
def rename(orig_author):
mapping = {
"LLMChain": "Assistant",
}
return mapping.get(orig_author, orig_author)


@cl.cache
def get_memory():
return ConversationBufferMemory(memory_key="chat_history")


@cl.on_chat_start
async def start():
settings = await cl.ChatSettings(
[
Select(
id="Model",
label="OpenAI - Model",
values=["gpt-3.5-turbo", "gpt-4-1106-preview"],
initial_index=1,
),
Switch(id="Streaming", label="OpenAI - Stream Tokens", initial=True),
Slider(
id="Temperature",
label="OpenAI - Temperature",
initial=0,
min=0,
max=2,
step=0.1,
),
]
).send()
await setup_agent(settings)


@cl.on_settings_update
async def setup_agent(settings):
print("Setup agent with following settings: ", settings)

llm = ChatOpenAI(
temperature=settings["Temperature"],
streaming=settings["Streaming"],
model=settings["Model"],
)
memory = get_memory()
_SUFFIX = "Chat history:\n{chat_history}\n\n" + SUFFIX

agent = initialize_agent(
llm=llm,
tools=[generate_image_tool],
agent=AgentType.STRUCTURED_CHAT_ZERO_SHOT_REACT_DESCRIPTION,
memory=memory,
agent_kwargs={
"suffix": _SUFFIX,
"input_variables": ["input", "agent_scratchpad", "chat_history"],
},
)
cl.user_session.set("agent", agent)


@cl.on_message
async def main(message: cl.Message):
agent = cl.user_session.get("agent") # type: AgentExecutor
cl.user_session.set("generated_image", None)

# No async implementation in the Stability AI client, fallback to sync
res = await cl.make_async(agent.run)(
input=message.content, callbacks=[cl.LangchainCallbackHandler()]
)

elements = []
actions = []

generated_image_name = cl.user_session.get("generated_image")
generated_image = cl.user_session.get(generated_image_name)
if generated_image:
elements = [
cl.Image(
content=generated_image,
name=generated_image_name,
display="inline",
)
]

await cl.Message(content=res, elements=elements, actions=actions).send()
14 changes: 14 additions & 0 deletions chainlit.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
# Welcome to Chainlit! 🚀🤖

Hi there, Developer! 👋 We're excited to have you on board. Chainlit is a powerful tool designed to help you prototype, debug and share applications built on top of LLMs.

## Useful Links 🔗

- **Documentation:** Get started with our comprehensive [Chainlit Documentation](https://docs.chainlit.io) 📚
- **Discord Community:** Join our friendly [Chainlit Discord](https://discord.gg/ZThrUxbAYw) to ask questions, share your projects, and connect with other developers! 💬

We can't wait to see what you create with Chainlit! Happy coding! 💻😊

## Welcome screen

To modify the welcome screen, edit the `chainlit.md` file at the root of your project. If you do not want a welcome screen, just leave this file empty.
60 changes: 60 additions & 0 deletions tools.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
import io
import os
from openai import OpenAI
from langchain.tools import StructuredTool, Tool
from io import BytesIO
import requests
import json
from io import BytesIO

import chainlit as cl


def get_image_name():
image_count = cl.user_session.get("image_count")
if image_count is None:
image_count = 0
else:
image_count += 1

cl.user_session.set("image_count", image_count)

return f"image-{image_count}"


def _generate_image(prompt: str):
client = OpenAI()

response = client.images.generate(
model="dall-e-3",
prompt=prompt,
size="1024x1024",
quality="standard",
n=1,
)

image_payload = requests.get(response.data[0].url, stream=True)

image_bytes = BytesIO(image_payload.content)

print(type(image_bytes))

name = get_image_name()
cl.user_session.set(name, image_bytes.getvalue())
cl.user_session.set("generated_image", name)
return name


def generate_image(prompt: str):
image_name = _generate_image(prompt)
return f"Here is {image_name}."


generate_image_format = '{{"prompt": "prompt"}}'

generate_image_tool = Tool.from_function(
func=generate_image,
name="GenerateImage",
description=f"Useful to create an image from a text prompt. Input should be a single string strictly in the following JSON format: {generate_image_format}",
return_direct=True,
)

0 comments on commit 220a7fc

Please sign in to comment.