Spaces:
Runtime error
Runtime error
Update chain_app.py
Browse files- chain_app.py +85 -20
chain_app.py
CHANGED
|
@@ -831,6 +831,36 @@ async def chat_profile():
|
|
| 831 |
|
| 832 |
]
|
| 833 |
),
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 834 |
cl.ChatProfile(
|
| 835 |
name = "gemma2-9B",
|
| 836 |
markdown_description = 'Google Generation 2 Open Source LLM with 9B parameters',
|
|
@@ -1766,14 +1796,34 @@ async def on_chat_start():
|
|
| 1766 |
# await cl.Message(
|
| 1767 |
# content='Im one of the best open source models that cohere released. i am configured by 2 iranian boys named Artin Daneshvar and Sadra Noadosut to help you out!'
|
| 1768 |
# ).send()
|
| 1769 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1770 |
if chat_profile == 'gemma2-9B':
|
| 1771 |
await cl.ChatSettings(
|
| 1772 |
[
|
| 1773 |
Select(
|
| 1774 |
id="Google-Model",
|
| 1775 |
label="Google - Model",
|
| 1776 |
-
values=["
|
| 1777 |
initial_index=0,
|
| 1778 |
),
|
| 1779 |
Slider(
|
|
@@ -1912,15 +1962,12 @@ async def main(message: cl.Message):
|
|
| 1912 |
cl.Action(name="No", value="No", label="❌ No"),
|
| 1913 |
],
|
| 1914 |
).send()
|
| 1915 |
-
|
| 1916 |
-
name = await cl.AskUserMessage(content="What is your name?", timeout=10).send()
|
| 1917 |
-
|
| 1918 |
if res and res.get("value") == "Yes":
|
| 1919 |
if chat_profile == 'neural-brain-AI':
|
| 1920 |
completion = openai_client.chat.completions.create(
|
| 1921 |
model="ft:gpt-3.5-turbo-1106:nb:aria1:9UWDrLJK",
|
| 1922 |
messages=[
|
| 1923 |
-
{"role": "system", "content": f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help
|
| 1924 |
{"role": "user", "content": message.content}
|
| 1925 |
]
|
| 1926 |
)
|
|
@@ -1932,7 +1979,7 @@ async def main(message: cl.Message):
|
|
| 1932 |
elif chat_profile == "Dorna-AI":
|
| 1933 |
result = hf_text_client.predict(
|
| 1934 |
message=message.content,
|
| 1935 |
-
request=f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help
|
| 1936 |
param_3=512,
|
| 1937 |
param_4=0.7,
|
| 1938 |
param_5=0.95,
|
|
@@ -1946,7 +1993,7 @@ async def main(message: cl.Message):
|
|
| 1946 |
completion = openai_client.chat.completions.create(
|
| 1947 |
model="gpt-4o-mini",
|
| 1948 |
messages=[
|
| 1949 |
-
{"role": "system", "content": f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help
|
| 1950 |
{"role": "user", "content": message.content}
|
| 1951 |
]
|
| 1952 |
)
|
|
@@ -1975,7 +2022,7 @@ async def main(message: cl.Message):
|
|
| 1975 |
completion = openai_client.chat.completions.create(
|
| 1976 |
model="gpt-4",
|
| 1977 |
messages=[
|
| 1978 |
-
{"role": "system", "content": f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help
|
| 1979 |
{"role": "user", "content": message.content}
|
| 1980 |
]
|
| 1981 |
)
|
|
@@ -1988,7 +2035,7 @@ async def main(message: cl.Message):
|
|
| 1988 |
completion = openai_client.chat.completions.create(
|
| 1989 |
model="gpt-3.5-turbo",
|
| 1990 |
messages=[
|
| 1991 |
-
{"role": "system", "content": f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help
|
| 1992 |
{"role": "user", "content": message.content}
|
| 1993 |
]
|
| 1994 |
)
|
|
@@ -2000,7 +2047,7 @@ async def main(message: cl.Message):
|
|
| 2000 |
completion = openai_client.chat.completions.create(
|
| 2001 |
model="GPT-3.5-turbo-0125",
|
| 2002 |
messages=[
|
| 2003 |
-
{"role": "system", "content": f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help
|
| 2004 |
{"role": "user", "content": message.content}
|
| 2005 |
]
|
| 2006 |
)
|
|
@@ -2012,7 +2059,7 @@ async def main(message: cl.Message):
|
|
| 2012 |
completion = openai_client.chat.completions.create(
|
| 2013 |
model="gpt-3.5-turbo-1106",
|
| 2014 |
messages=[
|
| 2015 |
-
{"role": "system", "content": f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help
|
| 2016 |
{"role": "user", "content": message.content}
|
| 2017 |
]
|
| 2018 |
)
|
|
@@ -2054,7 +2101,7 @@ async def main(message: cl.Message):
|
|
| 2054 |
client = Client("Qwen/Qwen2-57b-a14b-instruct-demo", hf_token=hf_token)
|
| 2055 |
result = client.predict(
|
| 2056 |
query=message.content,
|
| 2057 |
-
system=f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help
|
| 2058 |
api_name="/model_chat"
|
| 2059 |
)
|
| 2060 |
await cl.Message(
|
|
@@ -2065,7 +2112,7 @@ async def main(message: cl.Message):
|
|
| 2065 |
client = Client("Qwen/Qwen2-7b-instruct-demo", hf_token=hf_token)
|
| 2066 |
result = client.predict(
|
| 2067 |
query=message.content,
|
| 2068 |
-
system=f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help
|
| 2069 |
api_name="/model_chat"
|
| 2070 |
)
|
| 2071 |
await cl.Message(
|
|
@@ -2077,7 +2124,7 @@ async def main(message: cl.Message):
|
|
| 2077 |
client = Client("Qwen/Qwen2-1.5b-instruct-demo", hf_token=hf_token)
|
| 2078 |
result = client.predict(
|
| 2079 |
query=message.content,
|
| 2080 |
-
system=f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help
|
| 2081 |
api_name="/model_chat"
|
| 2082 |
)
|
| 2083 |
await cl.Message(
|
|
@@ -2089,7 +2136,7 @@ async def main(message: cl.Message):
|
|
| 2089 |
client = Client("Qwen/Qwen2-0.5B-Instruct", hf_token=hf_token)
|
| 2090 |
result = client.predict(
|
| 2091 |
query=message.content,
|
| 2092 |
-
system=f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help
|
| 2093 |
api_name="/model_chat"
|
| 2094 |
)
|
| 2095 |
await cl.Message(
|
|
@@ -2100,7 +2147,7 @@ async def main(message: cl.Message):
|
|
| 2100 |
client = Client("Qwen/Qwen1.5-110B-Chat-demo", hf_token=hf_token)
|
| 2101 |
result = client.predict(
|
| 2102 |
query=message.content,
|
| 2103 |
-
system=f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help
|
| 2104 |
api_name="/model_chat"
|
| 2105 |
)
|
| 2106 |
await cl.Message(
|
|
@@ -2112,7 +2159,7 @@ async def main(message: cl.Message):
|
|
| 2112 |
client = Client("Qwen/Qwen1.5-32B-Chat-demo", hf_token=hf_token)
|
| 2113 |
result = client.predict(
|
| 2114 |
query=message.content,
|
| 2115 |
-
system=f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help
|
| 2116 |
api_name="/model_chat"
|
| 2117 |
)
|
| 2118 |
await cl.Message(
|
|
@@ -2124,7 +2171,7 @@ async def main(message: cl.Message):
|
|
| 2124 |
client = Client("Qwen/qwen1.5-MoE-A2.7B-Chat-demo", hf_token=hf_token)
|
| 2125 |
result = client.predict(
|
| 2126 |
query=message.content,
|
| 2127 |
-
system=f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help
|
| 2128 |
api_name="/model_chat"
|
| 2129 |
)
|
| 2130 |
await cl.Message(
|
|
@@ -2306,6 +2353,24 @@ async def main(message: cl.Message):
|
|
| 2306 |
|
| 2307 |
# Send the concatenated content as a message
|
| 2308 |
await cl.Message(content=complete_content).send()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2309 |
|
| 2310 |
elif chat_profile == 'gemma2-9B':
|
| 2311 |
completion = groq_client.chat.completions.create(
|
|
@@ -2370,7 +2435,7 @@ async def main(message: cl.Message):
|
|
| 2370 |
elif chat_profile == "zephyr-7B":
|
| 2371 |
result = hf_text_client.predict(
|
| 2372 |
message=message.content,
|
| 2373 |
-
request=f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help
|
| 2374 |
param_3=512,
|
| 2375 |
param_4=0.7,
|
| 2376 |
param_5=0.95,
|
|
|
|
| 831 |
|
| 832 |
]
|
| 833 |
),
|
| 834 |
+
|
| 835 |
+
cl.ChatProfile(
|
| 836 |
+
name = "gemma2-27B",
|
| 837 |
+
markdown_description = 'Google Generation 2 Open Source LLM with 27B parameters',
|
| 838 |
+
starters=[
|
| 839 |
+
cl.Starter(
|
| 840 |
+
label="Morning routine ideation",
|
| 841 |
+
message="Can you help me create a personalized morning routine that would help increase my productivity throughout the day? Start by asking me about my current habits and what activities energize me in the morning.",
|
| 842 |
+
icon="https://chainlit-rag-copilot-r2xd.onrender.com/public/idea.svg",
|
| 843 |
+
),
|
| 844 |
+
|
| 845 |
+
cl.Starter(
|
| 846 |
+
label="Explain superconductors",
|
| 847 |
+
message="Explain superconductors like I'm five years old.",
|
| 848 |
+
icon="https://chainlit-rag-copilot-r2xd.onrender.com/public/learn.svg",
|
| 849 |
+
),
|
| 850 |
+
cl.Starter(
|
| 851 |
+
label="Python script for daily email reports",
|
| 852 |
+
message="Write a script to automate sending daily email reports in Python, and walk me through how I would set it up.",
|
| 853 |
+
icon="https://chainlit-rag-copilot-r2xd.onrender.com/public/terminal.svg",
|
| 854 |
+
),
|
| 855 |
+
cl.Starter(
|
| 856 |
+
label="Text inviting friend to wedding",
|
| 857 |
+
message="Write a text asking a friend to be my plus-one at a wedding next month. I want to keep it super short and casual, and offer an out.",
|
| 858 |
+
icon="https://chainlit-rag-copilot-r2xd.onrender.com/public/write.svg",
|
| 859 |
+
)
|
| 860 |
+
|
| 861 |
+
]
|
| 862 |
+
),
|
| 863 |
+
|
| 864 |
cl.ChatProfile(
|
| 865 |
name = "gemma2-9B",
|
| 866 |
markdown_description = 'Google Generation 2 Open Source LLM with 9B parameters',
|
|
|
|
| 1796 |
# await cl.Message(
|
| 1797 |
# content='Im one of the best open source models that cohere released. i am configured by 2 iranian boys named Artin Daneshvar and Sadra Noadosut to help you out!'
|
| 1798 |
# ).send()
|
| 1799 |
+
|
| 1800 |
+
if chat_profile == 'gemma2-27B':
|
| 1801 |
+
await cl.ChatSettings(
|
| 1802 |
+
[
|
| 1803 |
+
Select(
|
| 1804 |
+
id="Google-Model",
|
| 1805 |
+
label="Google - Model",
|
| 1806 |
+
values=["Gemma2-27B"],
|
| 1807 |
+
initial_index=0,
|
| 1808 |
+
),
|
| 1809 |
+
Slider(
|
| 1810 |
+
id="Temperature",
|
| 1811 |
+
label="Model Temperature",
|
| 1812 |
+
initial=0.7,
|
| 1813 |
+
min=0,
|
| 1814 |
+
max=1,
|
| 1815 |
+
step=0.1,
|
| 1816 |
+
),
|
| 1817 |
+
]
|
| 1818 |
+
).send()
|
| 1819 |
+
|
| 1820 |
if chat_profile == 'gemma2-9B':
|
| 1821 |
await cl.ChatSettings(
|
| 1822 |
[
|
| 1823 |
Select(
|
| 1824 |
id="Google-Model",
|
| 1825 |
label="Google - Model",
|
| 1826 |
+
values=["Gemma2-9B"],
|
| 1827 |
initial_index=0,
|
| 1828 |
),
|
| 1829 |
Slider(
|
|
|
|
| 1962 |
cl.Action(name="No", value="No", label="❌ No"),
|
| 1963 |
],
|
| 1964 |
).send()
|
|
|
|
|
|
|
|
|
|
| 1965 |
if res and res.get("value") == "Yes":
|
| 1966 |
if chat_profile == 'neural-brain-AI':
|
| 1967 |
completion = openai_client.chat.completions.create(
|
| 1968 |
model="ft:gpt-3.5-turbo-1106:nb:aria1:9UWDrLJK",
|
| 1969 |
messages=[
|
| 1970 |
+
{"role": "system", "content": f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help people"},
|
| 1971 |
{"role": "user", "content": message.content}
|
| 1972 |
]
|
| 1973 |
)
|
|
|
|
| 1979 |
elif chat_profile == "Dorna-AI":
|
| 1980 |
result = hf_text_client.predict(
|
| 1981 |
message=message.content,
|
| 1982 |
+
request=f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help people",
|
| 1983 |
param_3=512,
|
| 1984 |
param_4=0.7,
|
| 1985 |
param_5=0.95,
|
|
|
|
| 1993 |
completion = openai_client.chat.completions.create(
|
| 1994 |
model="gpt-4o-mini",
|
| 1995 |
messages=[
|
| 1996 |
+
{"role": "system", "content": f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help people"},
|
| 1997 |
{"role": "user", "content": message.content}
|
| 1998 |
]
|
| 1999 |
)
|
|
|
|
| 2022 |
completion = openai_client.chat.completions.create(
|
| 2023 |
model="gpt-4",
|
| 2024 |
messages=[
|
| 2025 |
+
{"role": "system", "content": f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help people"},
|
| 2026 |
{"role": "user", "content": message.content}
|
| 2027 |
]
|
| 2028 |
)
|
|
|
|
| 2035 |
completion = openai_client.chat.completions.create(
|
| 2036 |
model="gpt-3.5-turbo",
|
| 2037 |
messages=[
|
| 2038 |
+
{"role": "system", "content": f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help people"},
|
| 2039 |
{"role": "user", "content": message.content}
|
| 2040 |
]
|
| 2041 |
)
|
|
|
|
| 2047 |
completion = openai_client.chat.completions.create(
|
| 2048 |
model="GPT-3.5-turbo-0125",
|
| 2049 |
messages=[
|
| 2050 |
+
{"role": "system", "content": f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help people"},
|
| 2051 |
{"role": "user", "content": message.content}
|
| 2052 |
]
|
| 2053 |
)
|
|
|
|
| 2059 |
completion = openai_client.chat.completions.create(
|
| 2060 |
model="gpt-3.5-turbo-1106",
|
| 2061 |
messages=[
|
| 2062 |
+
{"role": "system", "content": f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help people"},
|
| 2063 |
{"role": "user", "content": message.content}
|
| 2064 |
]
|
| 2065 |
)
|
|
|
|
| 2101 |
client = Client("Qwen/Qwen2-57b-a14b-instruct-demo", hf_token=hf_token)
|
| 2102 |
result = client.predict(
|
| 2103 |
query=message.content,
|
| 2104 |
+
system=f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help people",
|
| 2105 |
api_name="/model_chat"
|
| 2106 |
)
|
| 2107 |
await cl.Message(
|
|
|
|
| 2112 |
client = Client("Qwen/Qwen2-7b-instruct-demo", hf_token=hf_token)
|
| 2113 |
result = client.predict(
|
| 2114 |
query=message.content,
|
| 2115 |
+
system=f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help people",
|
| 2116 |
api_name="/model_chat"
|
| 2117 |
)
|
| 2118 |
await cl.Message(
|
|
|
|
| 2124 |
client = Client("Qwen/Qwen2-1.5b-instruct-demo", hf_token=hf_token)
|
| 2125 |
result = client.predict(
|
| 2126 |
query=message.content,
|
| 2127 |
+
system=f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help people",
|
| 2128 |
api_name="/model_chat"
|
| 2129 |
)
|
| 2130 |
await cl.Message(
|
|
|
|
| 2136 |
client = Client("Qwen/Qwen2-0.5B-Instruct", hf_token=hf_token)
|
| 2137 |
result = client.predict(
|
| 2138 |
query=message.content,
|
| 2139 |
+
system=f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help people",
|
| 2140 |
api_name="/model_chat"
|
| 2141 |
)
|
| 2142 |
await cl.Message(
|
|
|
|
| 2147 |
client = Client("Qwen/Qwen1.5-110B-Chat-demo", hf_token=hf_token)
|
| 2148 |
result = client.predict(
|
| 2149 |
query=message.content,
|
| 2150 |
+
system=f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help people",
|
| 2151 |
api_name="/model_chat"
|
| 2152 |
)
|
| 2153 |
await cl.Message(
|
|
|
|
| 2159 |
client = Client("Qwen/Qwen1.5-32B-Chat-demo", hf_token=hf_token)
|
| 2160 |
result = client.predict(
|
| 2161 |
query=message.content,
|
| 2162 |
+
system=f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help people",
|
| 2163 |
api_name="/model_chat"
|
| 2164 |
)
|
| 2165 |
await cl.Message(
|
|
|
|
| 2171 |
client = Client("Qwen/qwen1.5-MoE-A2.7B-Chat-demo", hf_token=hf_token)
|
| 2172 |
result = client.predict(
|
| 2173 |
query=message.content,
|
| 2174 |
+
system=f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help people",
|
| 2175 |
api_name="/model_chat"
|
| 2176 |
)
|
| 2177 |
await cl.Message(
|
|
|
|
| 2353 |
|
| 2354 |
# Send the concatenated content as a message
|
| 2355 |
await cl.Message(content=complete_content).send()
|
| 2356 |
+
|
| 2357 |
+
elif chat_profile == 'gemma2-27B':
|
| 2358 |
+
client = Client("gokaygokay/Gemma-2-llamacpp")
|
| 2359 |
+
result = client.predict(
|
| 2360 |
+
message=message.content,
|
| 2361 |
+
model="gemma-2-27b-it-Q5_K_M.gguf",
|
| 2362 |
+
system_message=f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help people",
|
| 2363 |
+
max_tokens=2048,
|
| 2364 |
+
temperature=0.7,
|
| 2365 |
+
top_p=0.95,
|
| 2366 |
+
top_k=40,
|
| 2367 |
+
repeat_penalty=1.1,
|
| 2368 |
+
api_name="/chat"
|
| 2369 |
+
)
|
| 2370 |
+
await cl.Message(
|
| 2371 |
+
content=result[1][0][1]
|
| 2372 |
+
|
| 2373 |
+
).send()
|
| 2374 |
|
| 2375 |
elif chat_profile == 'gemma2-9B':
|
| 2376 |
completion = groq_client.chat.completions.create(
|
|
|
|
| 2435 |
elif chat_profile == "zephyr-7B":
|
| 2436 |
result = hf_text_client.predict(
|
| 2437 |
message=message.content,
|
| 2438 |
+
request=f"You are neural nexus official chatbot, you are made by Artin Daneshvar and Sadra Noadoust and you are here to help people",
|
| 2439 |
param_3=512,
|
| 2440 |
param_4=0.7,
|
| 2441 |
param_5=0.95,
|