Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .env
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
#groq_api_key = os.environ['GROQ_API_KEY']

GROQ_API_KEY=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
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
.env
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
**This is a fork of Groq's demo chatbot app.** You can find it here: https://github.com/definitive-io/conversational-chatbot-groq

# Groq LangChain Conversational Chatbot

This repository contains a Streamlit application that allows users to interact with a conversational chatbot powered by the LangChain API. The application uses the Groq API to generate responses and maintains a history of the conversation to provide context for the chatbot's responses.
Expand Down
9 changes: 6 additions & 3 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,18 @@
from langchain.chains.conversation.memory import ConversationBufferWindowMemory
from langchain_groq import ChatGroq
from langchain.prompts import PromptTemplate
from dotenv import load_dotenv

load_dotenv()

def main():
"""
This function is the main entry point of the application. It sets up the Groq client, the Streamlit interface, and handles the chat interaction.
"""

# Get Groq API key

groq_api_key = os.environ['GROQ_API_KEY']
# Get Groq API key
#groq_api_key =

# Display the Groq logo
spacer, col = st.columns([5, 1])
Expand All @@ -37,7 +40,7 @@ def main():
system_prompt = st.sidebar.text_input("System prompt:")
model = st.sidebar.selectbox(
'Choose a model',
['llama3-8b-8192', 'mixtral-8x7b-32768', 'gemma-7b-it']
['llama3-70b-8192', 'llama3-8b-8192', 'mixtral-8x7b-32768', 'gemma-7b-it']
)
conversational_memory_length = st.sidebar.slider('Conversational memory length:', 1, 10, value = 5)

Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
python-dotenv
aiohttp==3.9.3
aiosignal==1.3.1
altair==5.2.0
Expand Down