-
Notifications
You must be signed in to change notification settings - Fork 12
/
gui.py
176 lines (149 loc) · 6.86 KB
/
gui.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
# -*- coding: utf-8 -*-
"""
@Time : 2024/1/15 11:45
@Auth : Juexiao Zhou
@File :app.py
@IDE :PyCharm
@Page: www.joshuachou.ink
"""
import argparse
import os
from src.agent import Agent
import gradio as gr
import time
import yaml
import subprocess
import sys
def parse_args():
parser = argparse.ArgumentParser(description="AutoBA-GUI v0.1")
parser.add_argument('--port', type=int, default=5904)
args = parser.parse_args()
return args
def gradio_reset():
return None, None, None
def get_an_example():
_input_file_path_description = "./experiments/case1.1/data/SRR1234567_R1.fastq.gz: Paired-end Illumina WGS reads, forward\n" \
"./experiments/case1.1/data/SRR1234567_R2.fastq.gz: Paired-end Illumina WGS reads, reverse\n" \
"./experiments/case1.1/data/TruSeq3-PE.fa: Adapter sequences for trimming\n" \
"./experiments/case1.1/data/Reference.fasta: Reference genome for the organism under study in FASTA format"
_input_outoput_path = "./experiments/case1.1/output"
_input_goal = "To perform genome assembly using paired-end WGS data"
return _input_file_path_description, _input_outoput_path, _input_goal
def print_to_textbox(*args, **kwargs):
global HISTORY
text = " ".join(map(str, args)) + "\n"
if len(HISTORY) > 100:
HISTORY = HISTORY[1:]
HISTORY.append(text)
else:
HISTORY.append(text)
def run(input_file_path_description, input_outoput_path, input_goal, model_engine, openai_api, execute):
AIAgent = Agent(initial_data_list=input_file_path_description.split('\n'),
output_dir=input_outoput_path,
initial_goal_description=input_goal,
model_engine=model_engine,
openai_api=openai_api,
execute=execute,
blacklist='STAR,java,perl,annovar',
gui_mode=False)
AIAgent.run()
return 'Job Finished!'
if __name__ == '__main__':
args = parse_args()
# model_folder = args.model_root
# model_files = [os.path.join(model_folder, f) for f in os.listdir(model_folder) if os.path.isfile(os.path.join(model_folder, f))]
FORCE_STOP = False
HISTORY = []
with gr.Blocks() as demo:
gr.Markdown("""<h1 align="center">AutoBA-GUI v0.1</h1>""")
gr.Markdown("""<h3>An AI Agent for Fully Automated Multi-omic Analyses</h3>""")
with gr.Row():
with gr.Column():
input_file_path_description = gr.TextArea(
label="File path and description",
placeholder="Enter the absolute file path and file description in the following format, e.g.: \n"
"/data/SRR1234567_R1.fastq.gz: Paired-end Illumina WGS reads, forward\n"
"/data/SRR1234567_R2.fastq.gz: Paired-end Illumina WGS reads, reverse\n"
"/data/TruSeq3-PE.fa: Adapter sequences for trimming\n"
"/data/Reference.fasta: Reference genome for the organism under study in FASTA format",
max_lines=999999,
container=True,
)
input_outoput_path = gr.TextArea(
label="Output path",
placeholder="Enter absolute output path, e.g.: \n"
"/output",
max_lines=999999,
container=True,
)
input_goal = gr.TextArea(
label="Goal",
placeholder="Describe your goal for analysis, e.g.: \n"
"To perform genome assembly using paired-end WGS data",
max_lines=999999,
container=True,
)
local_model_engines = ['codellama-7bi',
'codellama-13bi',
'codellama-34bi',
'llama2-7bc',
'llama2-13bc',
'llama2-70bc']
gpt_model_engines = ['gpt-3.5-turbo',
'gpt-4',
'gpt-3.5-turbo-1106',
'gpt-4-0613',
'gpt-4-32k-0613',
'gpt-4-1106-preview']
model_engines = local_model_engines + gpt_model_engines
with gr.Row():
model_engine = gr.Dropdown(
label="Select model engine",
choices=model_engines,
value='gpt-4-1106-preview',
max_choices=1,
container=True,
interactive=True
)
openai_api = gr.Textbox(
label="OpenAI API",
placeholder="sk-xxxxx. Leave it empty for local version ",
max_lines=1,
container=True,
interactive=True
)
execute = gr.Checkbox(
label="Execute Code",
value=False,
visible=True
)
with gr.Row():
example_button = gr.Button(
value="Get an example",
interactive=True,
variant="primary"
)
upload_button = gr.Button(
value="Run",
interactive=True,
variant="primary"
)
#clear = gr.Button("Restart")
with gr.Row():
with gr.Column():
gr.Markdown('### Check command line for realtime outputs')
output_log = gr.TextArea(
label="Check command line for realtime outputs",
max_lines=5,
container=True,
lines=5,
)
gr.Markdown("""This site was created by King Abdullah University of Science and Technology (KAUST).""")
click_event = upload_button.click(run,
[input_file_path_description, input_outoput_path, input_goal, model_engine, openai_api,
execute],
[output_log])
example_button.click(get_an_example,
[],
[input_file_path_description, input_outoput_path, input_goal])
demo.launch(share=True, server_port=args.port, server_name='0.0.0.0')