-
Notifications
You must be signed in to change notification settings - Fork 0
/
convert_file.py
50 lines (41 loc) · 1.75 KB
/
convert_file.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
import argparse
import re
import logging
from typing import Any
import transformers # noqa: F401
import os
import json
from transformers import pipeline, set_seed
from transformers import AutoConfig, OPTForCausalLM, AutoModelForCausalLM, AutoTokenizer
from chatproto.conversation.history import ConversationHistory
from chatproto.registry import get_conv_settings
from katheryne.tools.chatbot import get_generator, get_model_response, get_user_input, stop_response
settings = get_conv_settings("openbuddy")
generator = get_generator("llm_trainer/lightning_logs/version_0/huggingface_format/checkpoint-step-931", settings, device="cuda:1")
set_seed(42)
instruction = """A chat between a curious user and an artificial intelligence assistant.
The assistant gives helpful, detailed, and polite answers to the user's questions."""
instruction = """Consider a conversation between User (a human) and Assistant (named Buddy).
Buddy can fluently speak the user's language (e.g. English, Chinese).
Buddy possesses vast knowledge about the world, history, and culture."""
instruction = ""
num_rounds = 0
history = ConversationHistory(
system=instruction,
messages=[],
offset=0,
settings=settings,
)
with open("a.java", "r", encoding="utf-8") as f:
user_input = f.read()
history.append_message(settings.roles[0], "Please translate the following code into cangjie:\n```\n" + user_input + "\n```")
history.append_message(settings.roles[1], None)
prompt = history.get_prompt()
response = get_model_response(generator, prompt, 2048)[0]['generated_text']
response = response[len(prompt):]
output = response
# output = stop_response(response)
history.messages[-1][1] = output
print("-" * 30 + f" Round {num_rounds} " + "-" * 30)
print(f"{output}")
# user_input = f"{output}\n\n"