1. What is Groq?
Groq is a company that builds specialized AI hardware and software designed specifically for ultra-fast inference of Large Language Models (LLMs).
Unlike traditional AI hardware vendors that focus on training, Groq is laser-focused on inference — the phase where trained models generate responses for real users.
Groq’s key innovation is the Language Processing Unit (LPU).
2. Building LLM, Prompt and StrOutput chains with LCEL
Here is an example:
import os
import streamlit as st
from dotenv import load_dotenv
from langchain_groq import ChatGroq
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.output_parsers import StrOutputParser
# ------------------------------------------------------
# Load environment variables
# ------------------------------------------------------
load_dotenv()
GROQ_API_KEY = os.getenv("GROQ_API_KEY")
if not GROQ_API_KEY:
st.error("GROQ_API_KEY is missing. Please set it in the .env file.")
st.stop()
# ------------------------------------------------------
# Initialize LLM (cached)
# ------------------------------------------------------
@st.cache_resource
def load_llm():
return ChatGroq(
model="llama-3.1-8b-instant",
groq_api_key=GROQ_API_KEY,
temperature=0.4
)
llm = load_llm()
# ------------------------------------------------------
# Prompt Template (Very Simple)
# ------------------------------------------------------
prompt = ChatPromptTemplate.from_messages(
[
("system", "You are a helpful AI assistant."),
("human", "{question}")
]
)
# ------------------------------------------------------
# Output Parser
# ------------------------------------------------------
output_parser = StrOutputParser()
# ------------------------------------------------------
# LCEL Chain
# ------------------------------------------------------
chain = prompt | llm | output_parser
# ------------------------------------------------------
# Streamlit UI
# ------------------------------------------------------
st.set_page_config(page_title="Simple GenAI App", page_icon="🤖")
st.title("🤖 Simple GenAI App (LangChain + Groq)")
st.write("Ask any question and get an instant answer from an open-source LLM.")
question = st.text_area(
"Your Question",
placeholder="What is LangChain?",
height=120
)
if st.button("Ask"):
if not question.strip():
st.warning("Please enter a question.")
else:
with st.spinner("Thinking..."):
try:
answer = chain.invoke({"question": question})
st.success("Answer")
st.write(answer)
except Exception as e:
st.error(f"Error: {e}")
st.markdown("---")
st.caption("Powered by LangChain + Groq")
