diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index f6592af..f37eeb9 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -29,6 +29,11 @@ repos:
- id: requirements-txt-fixer
- id: debug-statements
+ - repo: https://github.com/pycqa/isort
+ rev: 5.12.0
+ hooks:
+ - id: isort
+
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.254
hooks:
diff --git a/chat.py b/chat.py
index f69263b..fa52d76 100644
--- a/chat.py
+++ b/chat.py
@@ -1,10 +1,9 @@
-from openai.error import OpenAIError
+from streamlit_option_menu import option_menu
from pathlib import Path
-from src.utils.ai import ai_settings, send_ai_request
-from src.utils.tts import show_player
+from src.utils.lang import en, ru
from src.utils.donates import show_donates
-from src.utils.conversation import get_user_input, clear_chat, show_conversation
+from src.utils.conversation import get_user_input, show_chat_buttons, show_conversation
import streamlit as st
@@ -17,6 +16,11 @@ icons_dir = assets_dir / "icons"
# --- GENERAL SETTINGS ---
PAGE_TITLE = "AI Talks"
PAGE_ICON = "🤖"
+AI_MODEL_OPTIONS = [
+ "gpt-3.5-turbo",
+ "gpt-4",
+ "gpt-4-32k",
+]
st.set_page_config(page_title=PAGE_TITLE, page_icon=PAGE_ICON)
@@ -24,8 +28,38 @@ st.set_page_config(page_title=PAGE_TITLE, page_icon=PAGE_ICON)
with open(css_file) as f:
st.markdown("".format(f.read()), unsafe_allow_html=True)
-st.markdown(f"
{PAGE_TITLE}
", unsafe_allow_html=True)
-st.markdown("---")
+selected_lang = option_menu(
+ menu_title=None,
+ options=["En", "Ru", ],
+ icons=["flag_en", "flag_ru"],
+ menu_icon="cast",
+ default_index=0,
+ orientation="horizontal",
+ styles={
+ "container": {"padding": "0px",
+ "display": "grid",
+ "margin": "0!important",
+ "background-color": "#23212c"
+ },
+ "icon": {"color": "#8bff80", "font-size": "14px"},
+ "nav-link": {
+ "font-size": "14px",
+ "text-align": "center",
+ "margin": "auto",
+ "background-color": "#23212c",
+ "height": "30px",
+ "width": "13rem",
+ "color": "#7970a9",
+ "border-radius": "5px"
+ },
+ "nav-link-selected": {
+ "background-color": "#454158",
+ "font-weight": "300",
+ "color": "#f7f8f2",
+ "border": "1px solid #fe80bf"
+ }
+ }
+)
# Storing The Context
if "generated" not in st.session_state:
@@ -40,34 +74,28 @@ if "user_text" not in st.session_state:
def main() -> None:
user_content = get_user_input()
- b1, b2 = st.columns(2)
- with b1, b2:
- b1.button("Rerun", on_click=st.cache_data.clear)
- b2.button("Clear Conversation", on_click=clear_chat)
+ show_chat_buttons()
- model, role = ai_settings()
+ c1, c2 = st.columns(2)
+ with c1, c2:
+ model = c1.selectbox(label=st.session_state.locale.select_placeholder1, options=AI_MODEL_OPTIONS)
+ role = c2.selectbox(label=st.session_state.locale.select_placeholder2,
+ options=st.session_state.locale.ai_role_options)
if user_content:
- if st.session_state["messages"]:
- st.session_state["messages"].append({"role": "user", "content": user_content})
- else:
- st.session_state["messages"] = [
- {"role": "system", "content": f"You are a {role}."},
- {"role": "user", "content": user_content},
- ]
- try:
- completion = send_ai_request(model, st.session_state["messages"])
- ai_content = completion.get("choices")[0].get("message").get("content")
- st.session_state["messages"].append({"role": "assistant", "content": ai_content})
- if ai_content:
- show_conversation(ai_content, user_content)
- st.markdown("---")
- show_player(ai_content)
- except (OpenAIError, UnboundLocalError) as err:
- st.error(err)
+ show_conversation(user_content, model, role)
if __name__ == "__main__":
+ match selected_lang:
+ case "En":
+ st.session_state.locale = en
+ case "Ru":
+ st.session_state.locale = ru
+ case _:
+ locale = en
+ st.markdown(f"{st.session_state.locale.title}
", unsafe_allow_html=True)
+ st.markdown("---")
main()
st.markdown("---")
st.image("assets/ai.jpg")
diff --git a/pyproject.toml b/pyproject.toml
index ee8fac3..5c79b2e 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -2,8 +2,8 @@
line-length = 120
# Enable Flake's "E" and "F" codes by default.
-select = ["E", "F", "U", "N", "S", "C", "B", "A", "Q", "A", "YTT", "RUF", "M", "W", "I" ]
-ignore = ["N806", "S101", "A003", "N815", "S104"]
+select = ["E", "F", "U", "N", "S", "C", "B", "A", "Q", "A", "YTT", "RUF", "RUF100", "W", "I", ]
+ignore = ["N806", "S101", "A003", "N815", "S104", "UP006", "RUF001"]
# Exclude a variety of commonly ignored directories.
exclude = [".bzr",
diff --git a/requirements.txt b/requirements.txt
index ba8a720..00c421f 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,5 +1,6 @@
streamlit==1.20.0
streamlit-chat==0.0.2.2
+streamlit_option_menu==0.3.2
openai==0.27.2
gtts==2.3.1
pip==23.0.1
diff --git a/src/__init__.py b/src/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/utils/__init__.py b/src/utils/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/utils/ai.py b/src/utils/ai.py
deleted file mode 100644
index 0a17597..0000000
--- a/src/utils/ai.py
+++ /dev/null
@@ -1,40 +0,0 @@
-from typing import List, Dict, Tuple
-
-import streamlit as st
-import openai
-
-AI_MODEL_OPTIONS = [
- "gpt-3.5-turbo",
- "gpt-4",
- "gpt-4-32k",
-]
-
-AI_ROLE_OPTIONS = [
- "helpful assistant",
- "code assistant",
- "code reviewer",
- "text improver",
- "cinema expert",
- "sport expert",
- "online games expert",
- "food recipes expert",
- "English grammar expert",
-]
-
-
-def ai_settings() -> Tuple[str, str]:
- c1, c2 = st.columns(2)
- with c1, c2:
- model = c1.selectbox(label="Select AI Model", options=AI_MODEL_OPTIONS)
- role = c2.selectbox(label="Select AI Role", options=AI_ROLE_OPTIONS)
- return model, role
-
-
-@st.cache_data()
-def send_ai_request(ai_model: str, messages: List[Dict]) -> Dict:
- openai.api_key = st.secrets.api_credentials.api_key
- completion = openai.ChatCompletion.create(
- model=ai_model,
- messages=messages,
- )
- return completion
diff --git a/src/utils/ai_interaction.py b/src/utils/ai_interaction.py
new file mode 100644
index 0000000..331e436
--- /dev/null
+++ b/src/utils/ai_interaction.py
@@ -0,0 +1,19 @@
+from typing import List
+
+import openai
+import streamlit as st
+
+
+@st.cache_data()
+def send_ai_request(ai_model: str, messages: List[dict]) -> dict:
+ openai.api_key = st.secrets.api_credentials.api_key
+ import logging
+ logging.warning("messages:")
+ logging.warning(messages)
+ completion = openai.ChatCompletion.create(
+ model=ai_model,
+ messages=messages,
+ )
+ logging.warning("completion:")
+ logging.warning(completion)
+ return completion
diff --git a/src/utils/conversation.py b/src/utils/conversation.py
index 3086366..658b73f 100644
--- a/src/utils/conversation.py
+++ b/src/utils/conversation.py
@@ -1,6 +1,10 @@
import streamlit as st
+from openai.error import OpenAIError
from streamlit_chat import message
+from src.utils.ai_interaction import send_ai_request
+from src.utils.tts import show_player
+
def clear_chat() -> None:
st.session_state["generated"] = []
@@ -10,11 +14,18 @@ def clear_chat() -> None:
def get_user_input() -> str:
- user_text = st.text_area(label="Start Your Conversation With AI:", key="user_text")
+ user_text = st.text_area(label=st.session_state.locale.chat_placeholder, key="user_text")
return user_text
-def show_conversation(ai_content: str, user_text: str) -> None:
+def show_chat_buttons() -> None:
+ b1, b2 = st.columns(2)
+ with b1, b2:
+ b1.button(st.session_state.locale.chat_btn1, on_click=st.cache_data.clear)
+ b2.button(st.session_state.locale.chat_btn2, on_click=clear_chat)
+
+
+def show_chat(ai_content: str, user_text: str) -> None:
if ai_content not in st.session_state.generated:
# store the ai content
st.session_state.past.append(user_text)
@@ -24,3 +35,23 @@ def show_conversation(ai_content: str, user_text: str) -> None:
message("", key=str(i))
st.markdown(st.session_state["generated"][i])
message(st.session_state["past"][i], is_user=True, key=str(i) + "_user", avatar_style="micah")
+
+
+def show_conversation(user_content: str, model: str, role: str) -> None:
+ if st.session_state["messages"]:
+ st.session_state["messages"].append({"role": "user", "content": user_content})
+ else:
+ st.session_state["messages"] = [
+ {"role": "system", "content": f"{st.session_state.locale.ai_role_prefix} {role}."},
+ {"role": "user", "content": user_content},
+ ]
+ try:
+ completion = send_ai_request(model, st.session_state["messages"])
+ ai_content = completion.get("choices")[0].get("message").get("content")
+ st.session_state["messages"].append({"role": "assistant", "content": ai_content})
+ if ai_content:
+ show_chat(ai_content, user_content)
+ st.markdown("---")
+ show_player(ai_content)
+ except (OpenAIError, UnboundLocalError) as err:
+ st.error(err)
diff --git a/src/utils/donates.py b/src/utils/donates.py
index 94213ea..882ed43 100644
--- a/src/utils/donates.py
+++ b/src/utils/donates.py
@@ -3,12 +3,12 @@ import streamlit as st
def show_donates() -> None:
st.markdown("---")
- st.markdown("""
- ### :moneybag: Donates
- **Russia:**
+ st.markdown(f"""
+ ### :moneybag: {st.session_state.locale.donates}
+ **{st.session_state.locale.donates1}:**
- [CloudTips (Tinkoff)](https://pay.cloudtips.ru/p/eafa15b2)
- **World:**
+ **{st.session_state.locale.donates2}:**
- [Buy Me A Coffee](https://www.buymeacoffee.com/aitalks)
- [ko-fi](https://ko-fi.com/ai_talks)
- [PayPal](https://www.paypal.com/paypalme/aitalks)
diff --git a/src/utils/lang.py b/src/utils/lang.py
new file mode 100644
index 0000000..b0a2df3
--- /dev/null
+++ b/src/utils/lang.py
@@ -0,0 +1,94 @@
+from dataclasses import dataclass
+from typing import List
+
+
+# Parent data class
+@dataclass
+class Locale:
+ ai_role_options: List[str]
+ ai_role_prefix: str
+ title: str
+ language: str
+ lang_code: str
+ donates: str
+ donates1: str
+ donates2: str
+ chat_placeholder: str
+ chat_btn1: str
+ chat_btn2: str
+ select_placeholder1: str
+ select_placeholder2: str
+ stt_placeholder: str
+
+
+# Child data class for English
+@dataclass
+class EnLocale(Locale):
+ ai_role_prefix: str = "You are a"
+ title: str = "AI Talks"
+ language: str = "English"
+ lang_code: str = "en"
+ donates: str = "Donates"
+ donates1: str = "Russia"
+ donates2: str = "World"
+ chat_placeholder: str = "Start Your Conversation With AI:"
+ chat_btn1: str = "Rerun"
+ chat_btn2: str = "Clear Conversation"
+ select_placeholder1: str = "Select AI Model"
+ select_placeholder2: str = "Select AI Role"
+ stt_placeholder: str = "To Hear The Voice Of AI, Press Play"
+
+
+# Child data class for Russian
+@dataclass
+class RuLocale(Locale):
+ ai_role_prefix: str = "Вы"
+ title: str = "Разговорчики с ИИ"
+ language: str = "Russian"
+ lang_code: str = "ru"
+ donates: str = "Поддержать Проект"
+ donates1: str = "Россия"
+ donates2: str = "Остальной Мир"
+ chat_placeholder: str = "Начните Вашу Беседу с ИИ:"
+ chat_btn1: str = "Перезапустить"
+ chat_btn2: str = "Очистить Беседу"
+ select_placeholder1: str = "Выберите Модель ИИ"
+ select_placeholder2: str = "Выберите Роль ИИ"
+ stt_placeholder: str = "Чтобы Услышать ИИ Нажми Кнопку Проигрывателя"
+
+
+AI_ROLE_OPTIONS_EN = [
+ "helpful assistant",
+ "code assistant",
+ "code reviewer",
+ "text improver",
+ "cinema expert",
+ "sport expert",
+ "online games expert",
+ "food recipes expert",
+ "English grammar expert",
+ "friendly and helpful teaching assistant. You explain concepts in great depth using simple terms, and you give examples to help people learn. At the end of each explanation, you ask a question to check for understanding", # NOQA: E501
+ "laconic assistant. You reply with brief, to-the-point answers with no elaboration",
+ "helpful, pattern-following assistant",
+ "helpful, pattern-following assistant that translates corporate jargon into plain English",
+]
+
+AI_ROLE_OPTIONS_RU = [
+ "ассистент, который готов помочь",
+ "ассистент программиста",
+ "рецензент кода программиста",
+ "эксперт по улучшению текста",
+ "эксперт по кинематографу",
+ "эксперт в области спорта",
+ "эксперт в онлайн-играх",
+ "эксперт по рецептам блюд",
+ "эксперт по английской грамматике",
+ "эксперт по русской грамматике",
+ "дружелюбный и полезный помощник преподавателя. Вы объясняете концепции в подробностях, используя простые термины, и даёте примеры, чтобы помочь людям научиться. В конце каждого объяснения вы задаете вопрос, чтобы проверить понимание", # NOQA: E501
+ "лаконичный помощник. Вы отвечаете краткими, по существу ответами без лишних слов",
+ "полезный помощник, следующий шаблонам",
+ "полезный помощник, следующий шаблонам, который переводит корпоративный жаргон на простой английский",
+]
+
+en = EnLocale(ai_role_options=AI_ROLE_OPTIONS_EN)
+ru = RuLocale(ai_role_options=AI_ROLE_OPTIONS_RU)
diff --git a/src/utils/tts.py b/src/utils/tts.py
index dc18bf0..8387246 100644
--- a/src/utils/tts.py
+++ b/src/utils/tts.py
@@ -1,53 +1,15 @@
-from typing import Any, Dict, Optional
-from gtts import gTTS, gTTSError, lang
from io import BytesIO
import streamlit as st
-
-DEFAULT_SPEECH_LANG = "English"
-
-
-def get_dict_key(dictionary: Dict, value: Any) -> Optional[Any]:
- for key, val in dictionary.items():
- if val == value:
- return key
-
-
-def lang_selector() -> str:
- languages = lang.tts_langs()
- lang_options = list(lang.tts_langs().values())
- default_index = lang_options.index(DEFAULT_SPEECH_LANG)
- lang_name = st.selectbox(
- label="Select Speech Language",
- options=lang_options,
- index=default_index
- )
- return get_dict_key(languages, lang_name)
-
-
-def speech_speed_radio() -> bool:
- speed_options = {
- "Normal": False,
- "Slow": True
- }
- speed_speech = st.radio(
- label="Select Speech Speed",
- options=speed_options.keys(),
- )
- return speed_options.get(speed_speech)
+from gtts import gTTS, gTTSError
def show_player(ai_content: str) -> None:
sound_file = BytesIO()
- col1, col2 = st.columns(2)
- with col1:
- lang_code = lang_selector()
- with col2:
- is_speech_slow = speech_speed_radio()
try:
- tts = gTTS(text=ai_content, lang=lang_code, slow=is_speech_slow)
+ tts = gTTS(text=ai_content, lang=st.session_state.locale.lang_code)
tts.write_to_fp(sound_file)
- st.write("To Hear The Voice Of AI, Press Play.")
+ st.write(st.session_state.locale.stt_placeholder)
st.audio(sound_file)
except gTTSError as err:
st.error(err)