Use new textual markdown streaming
This commit is contained in:
parent
5d60927395
commit
3710a5b93b
2 changed files with 13 additions and 7 deletions
|
|
@ -8,6 +8,6 @@ lorem-text
|
||||||
platformdirs
|
platformdirs
|
||||||
pyperclip
|
pyperclip
|
||||||
simple_parsing
|
simple_parsing
|
||||||
textual[syntax]
|
textual[syntax] >= 4
|
||||||
tiktoken
|
tiktoken
|
||||||
websockets
|
websockets
|
||||||
|
|
|
||||||
|
|
@ -145,7 +145,6 @@ class Tui(App[None]):
|
||||||
await self.container.mount_all(
|
await self.container.mount_all(
|
||||||
[markdown_for_step(User(query)), output], before="#pad"
|
[markdown_for_step(User(query)), output], before="#pad"
|
||||||
)
|
)
|
||||||
tokens: list[str] = []
|
|
||||||
update: asyncio.Queue[bool] = asyncio.Queue(1)
|
update: asyncio.Queue[bool] = asyncio.Queue(1)
|
||||||
|
|
||||||
for markdown in self.container.children:
|
for markdown in self.container.children:
|
||||||
|
|
@ -166,15 +165,22 @@ class Tui(App[None]):
|
||||||
)
|
)
|
||||||
|
|
||||||
async def render_fun() -> None:
|
async def render_fun() -> None:
|
||||||
|
old_len = 0
|
||||||
while await update.get():
|
while await update.get():
|
||||||
if tokens:
|
content = message.content
|
||||||
output.update("".join(tokens).strip())
|
new_len = len(content)
|
||||||
|
new_content = content[old_len:new_len]
|
||||||
|
if new_content:
|
||||||
|
if old_len:
|
||||||
|
await output.append(new_content)
|
||||||
|
else:
|
||||||
|
output.update(content)
|
||||||
self.container.scroll_end()
|
self.container.scroll_end()
|
||||||
await asyncio.sleep(0.1)
|
old_len = new_len
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
|
||||||
async def get_token_fun() -> None:
|
async def get_token_fun() -> None:
|
||||||
async for token in self.api.aask(session, query):
|
async for token in self.api.aask(session, query):
|
||||||
tokens.append(token)
|
|
||||||
message.content += token
|
message.content += token
|
||||||
try:
|
try:
|
||||||
update.put_nowait(True)
|
update.put_nowait(True)
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue