@@ -388,84 +388,88 @@ async def prompt(self, update: Update, context: ContextTypes.DEFAULT_TYPE):
388388 total_tokens = 0
389389
390390 if self .config ['stream' ]:
391- await update .effective_message .reply_chat_action (
392- action = constants .ChatAction .TYPING ,
393- message_thread_id = get_thread_id (update )
394- )
391+ async def _reply ():
392+ nonlocal total_tokens
393+ await update .effective_message .reply_chat_action (
394+ action = constants .ChatAction .TYPING ,
395+ message_thread_id = get_thread_id (update )
396+ )
395397
396- stream_response = self .openai .get_chat_response_stream (chat_id = chat_id , query = prompt )
397- i = 0
398- prev = ''
399- sent_message = None
400- backoff = 0
401- stream_chunk = 0
402-
403- async for content , tokens in stream_response :
404- if len (content .strip ()) == 0 :
405- continue
406-
407- stream_chunks = split_into_chunks (content )
408- if len (stream_chunks ) > 1 :
409- content = stream_chunks [- 1 ]
410- if stream_chunk != len (stream_chunks ) - 1 :
411- stream_chunk += 1
412- try :
413- await edit_message_with_retry (context , chat_id , str (sent_message .message_id ),
414- stream_chunks [- 2 ])
415- except :
416- pass
398+ stream_response = self .openai .get_chat_response_stream (chat_id = chat_id , query = prompt )
399+ i = 0
400+ prev = ''
401+ sent_message = None
402+ backoff = 0
403+ stream_chunk = 0
404+
405+ async for content , tokens in stream_response :
406+ if len (content .strip ()) == 0 :
407+ continue
408+
409+ stream_chunks = split_into_chunks (content )
410+ if len (stream_chunks ) > 1 :
411+ content = stream_chunks [- 1 ]
412+ if stream_chunk != len (stream_chunks ) - 1 :
413+ stream_chunk += 1
414+ try :
415+ await edit_message_with_retry (context , chat_id , str (sent_message .message_id ),
416+ stream_chunks [- 2 ])
417+ except :
418+ pass
419+ try :
420+ sent_message = await update .effective_message .reply_text (
421+ message_thread_id = get_thread_id (update ),
422+ text = content if len (content ) > 0 else "..."
423+ )
424+ except :
425+ pass
426+ continue
427+
428+ cutoff = get_stream_cutoff_values (update , content )
429+ cutoff += backoff
430+
431+ if i == 0 :
417432 try :
433+ if sent_message is not None :
434+ await context .bot .delete_message (chat_id = sent_message .chat_id ,
435+ message_id = sent_message .message_id )
418436 sent_message = await update .effective_message .reply_text (
419437 message_thread_id = get_thread_id (update ),
420- text = content if len (content ) > 0 else "..."
438+ reply_to_message_id = get_reply_to_message_id (self .config , update ),
439+ text = content
421440 )
422441 except :
423- pass
424- continue
425-
426- cutoff = get_stream_cutoff_values (update , content )
427- cutoff += backoff
442+ continue
428443
429- if i == 0 :
430- try :
431- if sent_message is not None :
432- await context .bot .delete_message (chat_id = sent_message .chat_id ,
433- message_id = sent_message .message_id )
434- sent_message = await update .effective_message .reply_text (
435- message_thread_id = get_thread_id (update ),
436- reply_to_message_id = get_reply_to_message_id (self .config , update ),
437- text = content
438- )
439- except :
440- continue
444+ elif abs (len (content ) - len (prev )) > cutoff or tokens != 'not_finished' :
445+ prev = content
441446
442- elif abs (len (content ) - len (prev )) > cutoff or tokens != 'not_finished' :
443- prev = content
447+ try :
448+ use_markdown = tokens != 'not_finished'
449+ await edit_message_with_retry (context , chat_id , str (sent_message .message_id ),
450+ text = content , markdown = use_markdown )
444451
445- try :
446- use_markdown = tokens != 'not_finished'
447- await edit_message_with_retry ( context , chat_id , str ( sent_message . message_id ),
448- text = content , markdown = use_markdown )
452+ except RetryAfter as e :
453+ backoff += 5
454+ await asyncio . sleep ( e . retry_after )
455+ continue
449456
450- except RetryAfter as e :
451- backoff += 5
452- await asyncio .sleep (e . retry_after )
453- continue
457+ except TimedOut :
458+ backoff += 5
459+ await asyncio .sleep (0.5 )
460+ continue
454461
455- except TimedOut :
456- backoff += 5
457- await asyncio .sleep (0.5 )
458- continue
462+ except Exception :
463+ backoff += 5
464+ continue
459465
460- except Exception :
461- backoff += 5
462- continue
466+ await asyncio .sleep (0.01 )
463467
464- await asyncio .sleep (0.01 )
468+ i += 1
469+ if tokens != 'not_finished' :
470+ total_tokens = int (tokens )
465471
466- i += 1
467- if tokens != 'not_finished' :
468- total_tokens = int (tokens )
472+ await wrap_with_indicator (update , context , _reply , constants .ChatAction .TYPING )
469473
470474 else :
471475 async def _reply ():
0 commit comments