From b61d94aef0311046cd3ea4152111fa57eb734754 Mon Sep 17 00:00:00 2001 From: LoganDark Date: Fri, 26 May 2023 05:23:58 -0700 Subject: [PATCH] Flush output every token in generate_completions.py (#73) --- rwkv/generate_completions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rwkv/generate_completions.py b/rwkv/generate_completions.py index ae8c2e7..c065cef 100644 --- a/rwkv/generate_completions.py +++ b/rwkv/generate_completions.py @@ -64,7 +64,7 @@ for GENERATION in range(generation_count): for i in range(tokens_per_generation): token = sampling.sample_logits(logits, temperature, top_p) - print(tokenizer.decode([token]), end='') + print(tokenizer.decode([token]), end='', flush=True) logits, state = model.eval(token, state, state, logits)