Skip to content

Commit

Permalink
Merge branch 'main' of github.com:UniversityOfHelsinkiCS/gptwrapper
Browse files Browse the repository at this point in the history
  • Loading branch information
erikao1998 committed Jul 22, 2024
2 parents 357921c + 7b922c8 commit 284cf1e
Show file tree
Hide file tree
Showing 8 changed files with 450 additions and 701 deletions.
762 changes: 257 additions & 505 deletions package-lock.json

Large diffs are not rendered by default.

5 changes: 3 additions & 2 deletions src/client/components/Chat/Conversation.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,9 @@ const Conversation = ({
<Box mb={1}>
<Typography variant="h6">{t('chat:conversation')}</Typography>
</Box>
{messages.map(({ role, content }) => (
<Response key={content} role={role} content={content} />
{messages.map(({ role, content }, index) => (
// eslint-disable-next-line
<Response key={content + index} role={role} content={content} />
))}
{completion && (
<>
Expand Down
177 changes: 109 additions & 68 deletions src/client/components/Chat/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,18 @@ import useUserStatus from '../../hooks/useUserStatus'
import PromptSelector from './PromptSelector'
import TokenUsageWarning from './TokenUsageWarning'
import useInfoTexts from '../../hooks/useInfoTexts'

import useRetryTimeout from '../../hooks/useRetryTimeout'

const WAIT_FOR_STREAM_TIMEOUT = 4000
const ALLOWED_FILE_TYPES = [
'text/plain',
'text/html',
'text/css',
'text/csv',
'text/markdown',
'text/md',
'application/pdf',
]
const chatPersistingEnabled = false // import.meta.env.VITE_CHAT_PERSISTING

/**
Expand Down Expand Up @@ -76,6 +87,7 @@ const Chat = () => {
const [tokenUsageWarning, setTokenUsageWarning] = useState('')
const [tokenWarningVisible, setTokenWarningVisible] = useState(false)
const [modelTemperature, setModelTemperature] = useState(0.5)
const [setRetryTimeout, clearRetryTimeout] = useRetryTimeout()

const { t, i18n } = useTranslation()
const { language } = i18n
Expand Down Expand Up @@ -110,68 +122,25 @@ const Chat = () => {
setMessage('')
setMessages(messages.slice(0, -1))
setTokenWarningVisible(false)
clearRetryTimeout()
}

const handleSend = async (userConsent: boolean) => {
const formData = new FormData()

let file = inputFileRef.current.files[0] as File

const allowedFileTypes = [
'text/plain',
'text/html',
'text/css',
'text/csv',
'text/markdown',
'text/md',
'application/pdf',
]

if (file) {
if (allowedFileTypes.includes(file.type)) {
formData.append('file', file)
} else {
file = null
}
}

if (!userConsent) {
setMessages((prev) => [
...prev,
{ role: 'user', content: message + (file ? `\n\n${file.name}` : '') },
])
}
const handleReset = () => {
if (streamController) streamController.abort()

setStreamController(undefined)
setMessages([])
setSystem('')
setMessage('')
const { tokenUsageAnalysis, stream, controller } =
await getCompletionStream(
system,
messages.concat(
userConsent
? []
: [
{
role: 'user',
content: message + (file ? `${t('fileInfoPrompt')}` : ''),
},
]
),
model,
formData,
userConsent,
modelTemperature,
courseId
)

if (tokenUsageAnalysis && tokenUsageAnalysis.message) {
setTokenUsageWarning(tokenUsageAnalysis.message)
setTokenWarningVisible(true)
return
}
setCompletion('')
inputFileRef.current.value = ''
setFileName('')
clearRetryTimeout()
}

const processStream = async (stream: ReadableStream) => {
try {
const reader = stream.getReader()
setStreamController(controller)

let content = ''
const decoder = new TextDecoder()
Expand Down Expand Up @@ -204,24 +173,96 @@ const Chat = () => {
refetchStatus()
inputFileRef.current.value = ''
setFileName('')
clearRetryTimeout()
}
}

const handleContinue = () => {
handleSend(true)
setTokenWarningVisible(false)
const handleRetry = async (
getCompletionParams: Parameters<typeof getCompletionStream>[0],
abortController: AbortController
) => {
if (!abortController || abortController.signal.aborted) return

abortController?.abort('Creating a stream took too long')
const newAbortController = new AbortController()
setStreamController(newAbortController)

const { stream: retriedStream } = await getCompletionStream({
...getCompletionParams,
abortController: newAbortController,
})

await processStream(retriedStream)
}

const handleReset = () => {
if (streamController) streamController.abort()
const handleSend = async (userConsent: boolean) => {
const formData = new FormData()
let file = inputFileRef.current.files[0] as File
if (file) {
if (ALLOWED_FILE_TYPES.includes(file.type)) {
formData.append('file', file)
} else {
file = null
}
}

setStreamController(undefined)
setMessages([])
setSystem('')
if (!userConsent) {
setMessages((prev) => [
...prev,
{ role: 'user', content: message + (file ? `\n\n${file.name}` : '') },
])
}

// Abort the old request if a new one is sent
// Also clear the retry timeout and message
streamController?.abort('Sending a new request, aborting the old one')
clearRetryTimeout()
setMessage('')
setCompletion('')
inputFileRef.current.value = ''
setFileName('')

const abortController = new AbortController()
setStreamController(abortController)

const getCompletionsArgs = {
system,
messages: messages.concat(
userConsent
? []
: [
{
role: 'user',
content: message + (file ? `${t('fileInfoPrompt')}` : ''),
},
]
),
model,
formData,
userConsent,
modelTemperature,
courseId,
abortController,
}
// Retry the request if the server is stuck for WAIT_FOR_STREAM_TIMEOUT seconds
setRetryTimeout(
() => handleRetry(getCompletionsArgs, abortController),
WAIT_FOR_STREAM_TIMEOUT
)

const { tokenUsageAnalysis, stream } =
await getCompletionStream(getCompletionsArgs)

if (tokenUsageAnalysis && tokenUsageAnalysis.message) {
setTokenUsageWarning(tokenUsageAnalysis.message)
setTokenWarningVisible(true)
return
}

clearRetryTimeout()
await processStream(stream)
}

const handleContinue = () => {
handleSend(true)
setTokenWarningVisible(false)
}

const handleStop = () => {
Expand All @@ -241,7 +282,7 @@ const Chat = () => {
setActivePromptId(promptId)
}

const handleSlider = (event: Event, newValue: number | number[]) => {
const handleSlider = (_: Event, newValue: number | number[]) => {
setModelTemperature(newValue as number)
}

Expand Down
52 changes: 36 additions & 16 deletions src/client/components/Chat/util.ts
Original file line number Diff line number Diff line change
@@ -1,15 +1,26 @@
import { Message } from '../../types'
import { postAbortableStream } from '../../util/apiClient'

export const getCompletionStream = async (
system: string,
messages: Message[],
model: string,
formData: FormData,
userConsent: boolean,
modelTemperature: number,
interface GetCompletoinStreamProps {
system: string
messages: Message[]
model: string
formData: FormData
userConsent: boolean
modelTemperature: number
courseId?: string
) => {
abortController?: AbortController
}
export const getCompletionStream = async ({
system,
messages,
model,
formData,
userConsent,
modelTemperature,
courseId,
abortController,
}: GetCompletoinStreamProps) => {
const data = {
courseId,
options: {
Expand All @@ -28,16 +39,25 @@ export const getCompletionStream = async (

formData.set('data', JSON.stringify(data))

return postAbortableStream('/ai/stream', formData)
return postAbortableStream('/ai/stream', formData, abortController)
}

export const getCourseCompletionStream = async (
id: string,
system: string,
messages: Message[],
model: string,
interface GetCourseCompletionStreamProps {
id: string
system: string
messages: Message[]
model: string
courseId: string
) => {
abortController?: AbortController
}
export const getCourseCompletionStream = async ({
id,
system,
messages,
model,
courseId,
abortController,
}: GetCourseCompletionStreamProps) => {
const data = {
id,
courseId,
Expand All @@ -55,5 +75,5 @@ export const getCourseCompletionStream = async (
const formData = new FormData()
formData.set('data', JSON.stringify(data))

return postAbortableStream(`/ai/stream/`, formData)
return postAbortableStream(`/ai/stream/`, formData, abortController)
}
36 changes: 36 additions & 0 deletions src/client/hooks/useRetryTimeout.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import { useState, useRef, useCallback } from 'react'

const useRetryTimeout = (): [
(cb: () => Promise<void> | void, time: number) => void,
() => void,
] => {
// eslint-disable-next-line @typescript-eslint/naming-convention, @typescript-eslint/no-unused-vars
const [_, setDummyState] = useState(false) // Dummy state to force re-render
const timeoutRef = useRef<NodeJS.Timeout | null>(null)

const setRetryTimeout = useCallback(
(cb: () => Promise<void> | void, time: number) => {
if (timeoutRef.current !== null) {
clearTimeout(timeoutRef.current)
}
const timeoutId = setTimeout(() => {
cb()
}, time)
timeoutRef.current = timeoutId
setDummyState((prev) => !prev) // Trigger a re-render
},
[]
)

const clearRetryTimeout = useCallback(() => {
if (timeoutRef.current !== null) {
clearTimeout(timeoutRef.current)
timeoutRef.current = null
setDummyState((prev) => !prev) // Trigger a re-render
}
}, [])

return [setRetryTimeout, clearRetryTimeout]
}

export default useRetryTimeout
8 changes: 6 additions & 2 deletions src/client/util/apiClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,12 @@ apiClient.interceptors.request.use((config) => {
return newConfig
})

export const postAbortableStream = async (path: string, formData: FormData) => {
const controller = new AbortController()
export const postAbortableStream = async (
path: string,
formData: FormData,
externalController?: AbortController
) => {
const controller = externalController ?? new AbortController()

const adminHeaders = {} as any
const adminLoggedInAs = localStorage.getItem('adminLoggedInAs')
Expand Down
Loading

0 comments on commit 284cf1e

Please sign in to comment.