Spaces:
Runtime error
Runtime error
Update chain_app.py
Browse files- chain_app.py +8 -25
chain_app.py
CHANGED
|
@@ -2471,19 +2471,12 @@ async def main(message: cl.Message):
|
|
| 2471 |
stop=None,
|
| 2472 |
)
|
| 2473 |
|
| 2474 |
-
complete_content = ""
|
| 2475 |
-
|
| 2476 |
-
# Iterate over each chunk
|
| 2477 |
for chunk in completion:
|
| 2478 |
-
# Retrieve the content from the current chunk
|
| 2479 |
-
content = chunk.choices[0].delta.content
|
| 2480 |
-
|
| 2481 |
# Check if the content is not None before concatenating it
|
| 2482 |
-
if
|
| 2483 |
-
|
| 2484 |
|
| 2485 |
-
# Send the concatenated content as a message
|
| 2486 |
-
await cl.Message(content=complete_content).send()
|
| 2487 |
|
| 2488 |
elif chat_profile == 'gemma-7B':
|
| 2489 |
completion = groq_client.chat.completions.create(
|
|
@@ -2501,20 +2494,12 @@ async def main(message: cl.Message):
|
|
| 2501 |
stop=None,
|
| 2502 |
)
|
| 2503 |
|
| 2504 |
-
complete_content = ""
|
| 2505 |
-
|
| 2506 |
-
# Iterate over each chunk
|
| 2507 |
for chunk in completion:
|
| 2508 |
-
# Retrieve the content from the current chunk
|
| 2509 |
-
content = chunk.choices[0].delta.content
|
| 2510 |
-
|
| 2511 |
# Check if the content is not None before concatenating it
|
| 2512 |
-
if
|
| 2513 |
-
|
| 2514 |
|
| 2515 |
-
# Send the concatenated content as a message
|
| 2516 |
-
await cl.Message(content=complete_content).send()
|
| 2517 |
-
|
| 2518 |
elif chat_profile == "zephyr-7B":
|
| 2519 |
result = hf_text_client.predict(
|
| 2520 |
message=message.content,
|
|
@@ -2524,10 +2509,8 @@ async def main(message: cl.Message):
|
|
| 2524 |
param_5=0.95,
|
| 2525 |
api_name="/chat"
|
| 2526 |
)
|
| 2527 |
-
|
| 2528 |
-
|
| 2529 |
-
content=model_response
|
| 2530 |
-
).send()
|
| 2531 |
|
| 2532 |
elif chat_profile == 'mixtral-8x7B':
|
| 2533 |
completion = groq_client.chat.completions.create(
|
|
|
|
| 2471 |
stop=None,
|
| 2472 |
)
|
| 2473 |
|
|
|
|
|
|
|
|
|
|
| 2474 |
for chunk in completion:
|
| 2475 |
+
# Retrieve the content from the current chunk
|
|
|
|
|
|
|
| 2476 |
# Check if the content is not None before concatenating it
|
| 2477 |
+
if chunk is not None:
|
| 2478 |
+
await msg.stream_token(chunk.choices[0].delta.content)
|
| 2479 |
|
|
|
|
|
|
|
| 2480 |
|
| 2481 |
elif chat_profile == 'gemma-7B':
|
| 2482 |
completion = groq_client.chat.completions.create(
|
|
|
|
| 2494 |
stop=None,
|
| 2495 |
)
|
| 2496 |
|
|
|
|
|
|
|
|
|
|
| 2497 |
for chunk in completion:
|
| 2498 |
+
# Retrieve the content from the current chunk
|
|
|
|
|
|
|
| 2499 |
# Check if the content is not None before concatenating it
|
| 2500 |
+
if chunk is not None:
|
| 2501 |
+
await msg.stream_token(chunk.choices[0].delta.content)
|
| 2502 |
|
|
|
|
|
|
|
|
|
|
| 2503 |
elif chat_profile == "zephyr-7B":
|
| 2504 |
result = hf_text_client.predict(
|
| 2505 |
message=message.content,
|
|
|
|
| 2509 |
param_5=0.95,
|
| 2510 |
api_name="/chat"
|
| 2511 |
)
|
| 2512 |
+
for token in result:
|
| 2513 |
+
await msg.stream_token(token)
|
|
|
|
|
|
|
| 2514 |
|
| 2515 |
elif chat_profile == 'mixtral-8x7B':
|
| 2516 |
completion = groq_client.chat.completions.create(
|