Skip to content

Commit

Permalink
🐛 fix ensure_non_empty issues #99
Browse files Browse the repository at this point in the history
  • Loading branch information
zhzLuke96 committed Jul 12, 2024
1 parent ee07d82 commit bfc3789
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 1 deletion.
1 change: 1 addition & 0 deletions modules/ChatTTS/ChatTTS/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -647,6 +647,7 @@ def _infer_code(
stream=stream,
context=self.context,
stream_chunk_size=params.stream_chunk_size,
ensure_non_empty=params.ensure_non_empty,
)

del emb, input_ids
Expand Down
6 changes: 5 additions & 1 deletion modules/ChatTTS/ChatTTS/model/gpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -506,6 +506,10 @@ def generate(

del logits

if i == 0:
# when i == 0, we want to ensure that the first token is not eos_token
scores[:, eos_token] = 0

idx_next = torch.multinomial(scores, num_samples=1).to(finish.device)

if not infer_text:
Expand Down Expand Up @@ -561,7 +565,7 @@ def generate(
)
for result in new_gen:
yield result
return
return

del inputs_ids
inputs_ids = inputs_ids_tmp
Expand Down

0 comments on commit bfc3789

Please sign in to comment.