drop garbage
This commit is contained in:
parent
3e0219b82b
commit
1ff262d106
@ -65,10 +65,10 @@ repos:
|
||||
- id: python-use-type-annotations
|
||||
- id: text-unicode-replacement-char
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v1.2.0
|
||||
hooks:
|
||||
- id: mypy
|
||||
additional_dependencies: [ types-PyYAML==6.0.10, types_requests==2.28.8, types-ujson==5.6.0 ]
|
||||
args: [ --ignore-missing-imports, --warn-no-return, --warn-redundant-casts, --disallow-incomplete-defs ]
|
||||
exclude: ^setup.py
|
||||
# - repo: https://github.com/pre-commit/mirrors-mypy
|
||||
# rev: v1.2.0
|
||||
# hooks:
|
||||
# - id: mypy
|
||||
# additional_dependencies: [ types-PyYAML==6.0.10, types_requests==2.28.8, types-ujson==5.6.0 ]
|
||||
# args: [ --ignore-missing-imports, --warn-no-return, --warn-redundant-casts, --disallow-incomplete-defs ]
|
||||
# exclude: ^setup.py
|
||||
|
@ -26,8 +26,6 @@ AI_MODEL_OPTIONS: list[str] = [
|
||||
"gpt-3.5-turbo",
|
||||
"gpt-4",
|
||||
"gpt-4-32k",
|
||||
"bard",
|
||||
"phind-gpt-4",
|
||||
]
|
||||
|
||||
st.set_page_config(page_title=PAGE_TITLE, page_icon=PAGE_ICON)
|
||||
|
@ -1,107 +0,0 @@
|
||||
"""
|
||||
Reverse engineering of Google Bard from https://github.com/discordtehe/Bard
|
||||
"""
|
||||
import json
|
||||
import random
|
||||
import re
|
||||
import string
|
||||
|
||||
import requests
|
||||
import streamlit as st
|
||||
|
||||
US_AG = "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36"
|
||||
|
||||
|
||||
class BardChat:
|
||||
"""
|
||||
A class to interact with Google Bard.
|
||||
Parameters
|
||||
session_id: str
|
||||
The __Secure-1PSID cookie.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
"headers",
|
||||
"_reqid",
|
||||
"SNlM0e",
|
||||
"conversation_id",
|
||||
"response_id",
|
||||
"choice_id",
|
||||
"session",
|
||||
]
|
||||
|
||||
def __init__(self, session_id):
|
||||
headers = {
|
||||
"Host": "bard.google.com",
|
||||
"X-Same-Domain": "1",
|
||||
"User-Agent": US_AG,
|
||||
"Content-Type": "application/x-www-form-urlencoded;charset=UTF-8",
|
||||
"Origin": "https://bard.google.com",
|
||||
"Referer": "https://bard.google.com/",
|
||||
}
|
||||
self._reqid = int("".join(random.choices(string.digits, k=4)))
|
||||
self.conversation_id = ""
|
||||
self.response_id = ""
|
||||
self.choice_id = ""
|
||||
self.session = requests.Session()
|
||||
self.session.headers = headers
|
||||
self.session.cookies.set("__Secure-1PSID", session_id)
|
||||
self.SNlM0e = self.__get_snlm0e()
|
||||
|
||||
def __get_snlm0e(self):
|
||||
resp = self.session.get(url="https://bard.google.com/", timeout=10)
|
||||
# Find "SNlM0e":"<ID>"
|
||||
if resp.status_code != 200:
|
||||
st.error("Could not get Google Bard")
|
||||
SNlM0e = re.search(r"SNlM0e\":\"(.*?)\"", resp.text).group(1)
|
||||
return SNlM0e
|
||||
|
||||
def ask(self, message: str) -> dict:
|
||||
"""
|
||||
Send a message to Google Bard and return the response.
|
||||
:param message: The message to send to Google Bard.
|
||||
:return: A dict containing the response from Google Bard.
|
||||
"""
|
||||
# url params
|
||||
params = {
|
||||
"bl": "boq_assistant-bard-web-server_20230326.21_p0",
|
||||
"_reqid": str(self._reqid),
|
||||
"rt": "c",
|
||||
}
|
||||
|
||||
# message arr -> data["f.req"]. Message is double json stringified
|
||||
message_struct = [
|
||||
[message],
|
||||
None,
|
||||
[self.conversation_id, self.response_id, self.choice_id],
|
||||
]
|
||||
data = {
|
||||
"f.req": json.dumps([None, json.dumps(message_struct)]),
|
||||
"at": self.SNlM0e,
|
||||
}
|
||||
|
||||
# do the request!
|
||||
resp = self.session.post(
|
||||
"https://bard.google.com/_/BardChatUi/data/assistant.lamda.BardFrontendService/StreamGenerate",
|
||||
params=params,
|
||||
data=data,
|
||||
timeout=120,
|
||||
)
|
||||
|
||||
chat_data = json.loads(resp.content.splitlines()[3])[0][2]
|
||||
if not chat_data:
|
||||
return {"content": f"Google Bard encountered an error: {resp.content}."}
|
||||
json_chat_data = json.loads(chat_data)
|
||||
results = {
|
||||
"content": json_chat_data[0][0],
|
||||
"conversation_id": json_chat_data[1][0],
|
||||
"response_id": json_chat_data[1][1],
|
||||
"factualityQueries": json_chat_data[3],
|
||||
"textQuery": json_chat_data[2][0] if json_chat_data[2] is not None else "",
|
||||
"choices": [{"id": i[0], "content": i[1]} for i in json_chat_data[4]],
|
||||
}
|
||||
self.conversation_id = results["conversation_id"]
|
||||
self.response_id = results["response_id"]
|
||||
self.choice_id = results["choices"][0]["id"]
|
||||
self._reqid += 100000
|
||||
return results
|
@ -1,285 +0,0 @@
|
||||
from datetime import datetime
|
||||
from queue import Empty, Queue
|
||||
from threading import Thread
|
||||
from time import time
|
||||
from urllib.parse import quote
|
||||
|
||||
from curl_cffi.requests import post
|
||||
|
||||
cf_clearance = ""
|
||||
user_agent = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36" # noqa: E501
|
||||
|
||||
|
||||
class PhindResponse:
|
||||
class Completion:
|
||||
class Choices:
|
||||
def __init__(self, choice: dict) -> None:
|
||||
self.text = choice["text"]
|
||||
self.content = self.text.encode()
|
||||
self.index = choice["index"]
|
||||
self.logprobs = choice["logprobs"]
|
||||
self.finish_reason = choice["finish_reason"]
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"""<__main__.APIResponse.Completion.Choices(\n text = {self.text.encode()},\n index = {self.index},\n logprobs = {self.logprobs},\n finish_reason = {self.finish_reason})object at 0x1337>""" # noqa: E501
|
||||
|
||||
def __init__(self, choices: dict) -> None:
|
||||
self.choices = [self.Choices(choice) for choice in choices]
|
||||
|
||||
class Usage:
|
||||
def __init__(self, usage_dict: dict) -> None:
|
||||
self.prompt_tokens = usage_dict["prompt_tokens"]
|
||||
self.completion_tokens = usage_dict["completion_tokens"]
|
||||
self.total_tokens = usage_dict["total_tokens"]
|
||||
|
||||
def __repr__(self):
|
||||
return f"""<__main__.APIResponse.Usage(\n prompt_tokens = {self.prompt_tokens},\n completion_tokens = {self.completion_tokens},\n total_tokens = {self.total_tokens})object at 0x1337>""" # noqa: E501
|
||||
|
||||
def __init__(self, response_dict: dict) -> None:
|
||||
self.response_dict = response_dict
|
||||
self.id = response_dict["id"]
|
||||
self.object = response_dict["object"]
|
||||
self.created = response_dict["created"]
|
||||
self.model = response_dict["model"]
|
||||
self.completion = self.Completion(response_dict["choices"])
|
||||
self.usage = self.Usage(response_dict["usage"])
|
||||
|
||||
def json(self) -> dict:
|
||||
return self.response_dict
|
||||
|
||||
|
||||
class Search:
|
||||
@staticmethod
|
||||
def create(prompt: str, actual_search: bool = True, language: str = "en") -> dict: # None = no search
|
||||
if user_agent == "":
|
||||
raise ValueError("user_agent must be set, refer to documentation")
|
||||
|
||||
if not actual_search:
|
||||
return {
|
||||
"_type": "SearchResponse",
|
||||
"queryContext": {
|
||||
"originalQuery": prompt
|
||||
},
|
||||
"webPages": {
|
||||
"webSearchUrl": f"https://www.bing.com/search?q={quote(prompt)}",
|
||||
"totalEstimatedMatches": 0,
|
||||
"value": []
|
||||
},
|
||||
"rankingResponse": {
|
||||
"mainline": {
|
||||
"items": []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
headers = {
|
||||
"authority": "www.phind.com",
|
||||
"accept": "*/*",
|
||||
"accept-language": "en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3",
|
||||
"cookie": f"cf_clearance={cf_clearance}",
|
||||
"origin": "https://www.phind.com",
|
||||
"referer": "https://www.phind.com/search?q=hi&c=&source=searchbox&init=true",
|
||||
"sec-ch-ua": '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
|
||||
"sec-ch-ua-mobile": "?0",
|
||||
"sec-ch-ua-platform": '"macOS"',
|
||||
"sec-fetch-dest": "empty",
|
||||
"sec-fetch-mode": "cors",
|
||||
"sec-fetch-site": "same-origin",
|
||||
"user-agent": user_agent
|
||||
}
|
||||
|
||||
return post("https://www.phind.com/api/bing/search", headers=headers, json={
|
||||
"q": prompt,
|
||||
"userRankList": {},
|
||||
"browserLanguage": language}).json()["rawBingResults"]
|
||||
|
||||
|
||||
class Completion:
|
||||
@staticmethod
|
||||
def create(
|
||||
model="gpt-4",
|
||||
prompt: str = "",
|
||||
results: dict = None,
|
||||
creative: bool = False,
|
||||
detailed: bool = False,
|
||||
code_context: str = "",
|
||||
language: str = "en") -> PhindResponse:
|
||||
|
||||
if user_agent == "":
|
||||
raise ValueError("user_agent must be set, refer to documentation")
|
||||
|
||||
if results is None:
|
||||
results = Search.create(prompt, actual_search=True)
|
||||
|
||||
if len(code_context) > 2999:
|
||||
raise ValueError("codeContext must be less than 3000 characters")
|
||||
|
||||
models = {
|
||||
"gpt-4": "expert",
|
||||
"gpt-3.5-turbo": "intermediate",
|
||||
"gpt-3.5": "intermediate",
|
||||
}
|
||||
|
||||
json_data = {
|
||||
"question": prompt,
|
||||
"bingResults": results, # response.json()['rawBingResults'],
|
||||
"codeContext": code_context,
|
||||
"options": {
|
||||
"skill": models[model],
|
||||
"date": datetime.now().strftime("%d/%m/%Y"),
|
||||
"language": language,
|
||||
"detailed": detailed,
|
||||
"creative": creative
|
||||
}
|
||||
}
|
||||
|
||||
headers = {
|
||||
"authority": "www.phind.com",
|
||||
"accept": "*/*",
|
||||
"accept-language": "en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3",
|
||||
"content-type": "application/json",
|
||||
"cookie": f"cf_clearance={cf_clearance}",
|
||||
"origin": "https://www.phind.com",
|
||||
"referer": "https://www.phind.com/search?q=hi&c=&source=searchbox&init=true",
|
||||
"sec-ch-ua": '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
|
||||
"sec-ch-ua-mobile": "?0",
|
||||
"sec-ch-ua-platform": '"macOS"',
|
||||
"sec-fetch-dest": "empty",
|
||||
"sec-fetch-mode": "cors",
|
||||
"sec-fetch-site": "same-origin",
|
||||
"user-agent": user_agent
|
||||
}
|
||||
|
||||
completion = ""
|
||||
response = post("https://www.phind.com/api/infer/answer", headers=headers, json=json_data, timeout=99999,
|
||||
impersonate="chrome110")
|
||||
for line in response.text.split("\r\n\r\n"):
|
||||
completion += (line.replace("data: ", ""))
|
||||
|
||||
return PhindResponse({
|
||||
"id": f"cmpl-1337-{int(time())}",
|
||||
"object": "text_completion",
|
||||
"created": int(time()),
|
||||
"model": models[model],
|
||||
"choices": [{
|
||||
"text": completion,
|
||||
"index": 0,
|
||||
"logprobs": None,
|
||||
"finish_reason": "stop"
|
||||
}],
|
||||
"usage": {
|
||||
"prompt_tokens": len(prompt),
|
||||
"completion_tokens": len(completion),
|
||||
"total_tokens": len(prompt) + len(completion)
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
class StreamingCompletion:
|
||||
message_queue = Queue()
|
||||
stream_completed = False
|
||||
|
||||
@staticmethod
|
||||
def request(model, prompt, results, creative, detailed, code_context, language) -> None:
|
||||
|
||||
models = {
|
||||
"gpt-4": "expert",
|
||||
"gpt-3.5-turbo": "intermediate",
|
||||
"gpt-3.5": "intermediate",
|
||||
}
|
||||
|
||||
json_data = {
|
||||
"question": prompt,
|
||||
"bingResults": results,
|
||||
"codeContext": code_context,
|
||||
"options": {
|
||||
"skill": models[model],
|
||||
"date": datetime.now().strftime("%d/%m/%Y"),
|
||||
"language": language,
|
||||
"detailed": detailed,
|
||||
"creative": creative
|
||||
}
|
||||
}
|
||||
|
||||
headers = {
|
||||
"authority": "www.phind.com",
|
||||
"accept": "*/*",
|
||||
"accept-language": "en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3",
|
||||
"content-type": "application/json",
|
||||
"cookie": f"cf_clearance={cf_clearance}",
|
||||
"origin": "https://www.phind.com",
|
||||
"referer": "https://www.phind.com/search?q=hi&c=&source=searchbox&init=true",
|
||||
"sec-ch-ua": '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
|
||||
"sec-ch-ua-mobile": "?0",
|
||||
"sec-ch-ua-platform": '"macOS"',
|
||||
"sec-fetch-dest": "empty",
|
||||
"sec-fetch-mode": "cors",
|
||||
"sec-fetch-site": "same-origin",
|
||||
"user-agent": user_agent
|
||||
}
|
||||
|
||||
post("https://www.phind.com/api/infer/answer",
|
||||
headers=headers, json=json_data, timeout=99999, impersonate="chrome110",
|
||||
content_callback=StreamingCompletion.handle_stream_response)
|
||||
|
||||
StreamingCompletion.stream_completed = True
|
||||
|
||||
@staticmethod
|
||||
def create(
|
||||
model: str = "gpt-4",
|
||||
prompt: str = "",
|
||||
results: dict = None,
|
||||
creative: bool = False,
|
||||
detailed: bool = False,
|
||||
code_context: str = "",
|
||||
language: str = "en"):
|
||||
|
||||
if user_agent == "":
|
||||
raise ValueError("user_agent must be set, refer to documentation")
|
||||
|
||||
if results is None:
|
||||
results = Search.create(prompt, actual_search=True)
|
||||
|
||||
if len(code_context) > 2999:
|
||||
raise ValueError("codeContext must be less than 3000 characters")
|
||||
|
||||
Thread(target=StreamingCompletion.request, args=[
|
||||
model, prompt, results, creative, detailed, code_context, language]).start()
|
||||
|
||||
while StreamingCompletion.stream_completed is not True or not StreamingCompletion.message_queue.empty():
|
||||
try:
|
||||
chunk = StreamingCompletion.message_queue.get(timeout=0)
|
||||
|
||||
if chunk == b"data: \r\ndata: \r\ndata: \r\n\r\n":
|
||||
chunk = b"data: \n\n\r\n\r\n"
|
||||
|
||||
chunk = chunk.decode()
|
||||
|
||||
chunk = chunk.replace("data: \r\n\r\ndata: ", "data: \n")
|
||||
chunk = chunk.replace("\r\ndata: \r\ndata: \r\n\r\n", "\n\n\r\n\r\n")
|
||||
chunk = chunk.replace("data: ", "").replace("\r\n\r\n", "")
|
||||
|
||||
yield PhindResponse({
|
||||
"id": f"cmpl-1337-{int(time())}",
|
||||
"object": "text_completion",
|
||||
"created": int(time()),
|
||||
"model": model,
|
||||
"choices": [{
|
||||
"text": chunk,
|
||||
"index": 0,
|
||||
"logprobs": None,
|
||||
"finish_reason": "stop"
|
||||
}],
|
||||
"usage": {
|
||||
"prompt_tokens": len(prompt),
|
||||
"completion_tokens": len(chunk),
|
||||
"total_tokens": len(prompt) + len(chunk)
|
||||
}
|
||||
})
|
||||
|
||||
except Empty:
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def handle_stream_response(response):
|
||||
StreamingCompletion.message_queue.put(response)
|
@ -2,11 +2,8 @@ from random import randrange
|
||||
|
||||
import streamlit as st
|
||||
from openai.error import InvalidRequestError, OpenAIError
|
||||
from requests.exceptions import TooManyRedirects
|
||||
from streamlit_chat import message
|
||||
|
||||
from .agi import phind
|
||||
from .agi.bard import BardChat
|
||||
from .agi.chat_gpt import create_gpt_completion
|
||||
from .stt import show_voice_input
|
||||
from .tts import show_audio_player
|
||||
@ -100,33 +97,6 @@ def show_gpt_conversation() -> None:
|
||||
st.error(err)
|
||||
|
||||
|
||||
def show_bard_conversation() -> None:
|
||||
try:
|
||||
bard = BardChat(st.secrets.api_credentials.bard_session)
|
||||
ai_content = bard.ask(st.session_state.user_text)
|
||||
st.warning(ai_content.get("content"))
|
||||
except (TooManyRedirects, AttributeError) as err:
|
||||
st.error(err)
|
||||
|
||||
|
||||
def phind_get_answer(question: str):
|
||||
phind.cf_clearance = st.secrets.api_credentials.phind_cf_clearance
|
||||
phind.user_agent = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36" # noqa: E501
|
||||
|
||||
try:
|
||||
result = phind.Completion.create(
|
||||
model="gpt-4",
|
||||
prompt=question,
|
||||
results=phind.Search.create(question, actual_search=True),
|
||||
creative=False,
|
||||
detailed=False,
|
||||
code_context=""
|
||||
)
|
||||
st.markdown(result.completion.choices[0].text)
|
||||
except Exception as e:
|
||||
st.error(e)
|
||||
|
||||
|
||||
def show_conversation() -> None:
|
||||
if st.session_state.messages:
|
||||
st.session_state.messages.append({"role": "user", "content": st.session_state.user_text})
|
||||
@ -136,9 +106,4 @@ def show_conversation() -> None:
|
||||
{"role": "system", "content": ai_role},
|
||||
{"role": "user", "content": st.session_state.user_text},
|
||||
]
|
||||
if st.session_state.model == "bard":
|
||||
show_bard_conversation()
|
||||
elif st.session_state.model == "phind-gpt-4":
|
||||
phind_get_answer(st.session_state.user_text)
|
||||
else:
|
||||
show_gpt_conversation()
|
||||
|
@ -1,10 +1,9 @@
|
||||
pip>=23.1
|
||||
streamlit>=1.21.0
|
||||
pip>=23.1.2
|
||||
streamlit>=1.22.0
|
||||
streamlit-chat>=0.0.2.2
|
||||
streamlit_option_menu>=0.3.2
|
||||
openai>=0.27.4
|
||||
openai>=0.27.5
|
||||
gtts>=2.3.1
|
||||
bokeh==2.4.2
|
||||
streamlit-bokeh-events>=0.1.2
|
||||
watchdog>=3.0.0
|
||||
curl_cffi>=0.5.5
|
||||
|
Loading…
Reference in New Issue
Block a user