Skip to content
This repository has been archived by the owner on Apr 23, 2024. It is now read-only.

Add docker fix and Parallel processing implementation. #462

Merged
merged 113 commits into from
Apr 23, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
113 commits
Select commit Hold shift + click to select a range
1581646
Update Download.ts
birdup000 Apr 17, 2023
e612a76
Round Robin Algorithm add
birdup000 Apr 17, 2023
022e487
Update Download.ts
birdup000 Apr 17, 2023
5f09cf2
implement parallel upload
birdup000 Apr 17, 2023
29be422
Update Upload.tsx
birdup000 Apr 17, 2023
774c8b0
Update Upload.tsx
birdup000 Apr 17, 2023
cc50ebf
Update Upload.tsx
birdup000 Apr 17, 2023
d0d803c
Update Upload.tsx
birdup000 Apr 17, 2023
e8d09dd
Update Upload.tsx
birdup000 Apr 17, 2023
f87be55
Update Upload.tsx
birdup000 Apr 17, 2023
4248b88
implement axios parallel
birdup000 Apr 17, 2023
a4d3783
Update Upload.tsx
birdup000 Apr 17, 2023
37b382e
Update Upload.tsx
birdup000 Apr 17, 2023
5ebf30d
Update Upload.tsx
birdup000 Apr 19, 2023
c5f1a65
Parallel Processing test 1
birdup000 Apr 20, 2023
4ff7df2
Update Upload.tsx
birdup000 Apr 20, 2023
261021c
Update Upload.tsx
birdup000 Apr 20, 2023
e1175c3
Update Upload.tsx
birdup000 Apr 20, 2023
8b9c9a0
Update Upload.tsx
birdup000 Apr 20, 2023
124e22c
Update Upload.tsx
birdup000 Apr 20, 2023
dc81e5e
Update Upload.tsx
birdup000 Apr 20, 2023
29e669d
Update Upload.tsx
birdup000 Apr 20, 2023
50b37ca
Update Upload.tsx
birdup000 Apr 20, 2023
b5302e7
Update Upload.tsx
birdup000 Apr 20, 2023
dc1870e
Implement worker threads
birdup000 Apr 20, 2023
50e57e4
Update Upload.tsx
birdup000 Apr 20, 2023
4b98e56
Update Upload.tsx
birdup000 Apr 20, 2023
b1c30d7
Update Upload.tsx
birdup000 Apr 20, 2023
df00d66
Update Upload.tsx
birdup000 Apr 20, 2023
ab00fb2
Update Upload.tsx
birdup000 Apr 20, 2023
81af720
Update Upload.tsx
birdup000 Apr 20, 2023
f9855aa
Update Upload.tsx
birdup000 Apr 20, 2023
59f7c86
Update Upload.tsx
birdup000 Apr 20, 2023
0ac3dac
implement inline workers
birdup000 Apr 20, 2023
0fe765e
Update Upload.tsx
birdup000 Apr 20, 2023
9da807b
Update Upload.tsx
birdup000 Apr 20, 2023
3aec1a8
Update Upload.tsx
birdup000 Apr 20, 2023
aac4ab3
Update Upload.tsx
birdup000 Apr 20, 2023
5cebeb0
Update Upload.tsx
birdup000 Apr 20, 2023
d78ed95
Update Upload.tsx
birdup000 Apr 20, 2023
359f4a7
Update Upload.tsx
birdup000 Apr 20, 2023
545ac53
Update Upload.tsx
birdup000 Apr 20, 2023
14e31fa
Update Upload.tsx
birdup000 Apr 20, 2023
9b9e832
Update Upload.tsx
birdup000 Apr 20, 2023
d2574b7
Inline WORKER
birdup000 Apr 20, 2023
ce4d92b
Update Upload.tsx
birdup000 Apr 20, 2023
86c43f1
Update install.docker.new.sh
birdup000 Apr 21, 2023
b94fc10
add bit heaps and add divide and conquer algorithm
birdup000 Apr 21, 2023
0746eb6
Merge branch 'experiment' of https://github.com/greengeckowizard/tele…
birdup000 Apr 21, 2023
fb482a6
Update Download.ts
birdup000 Apr 21, 2023
4ba58bc
Update Download.ts
birdup000 Apr 21, 2023
48e4cfa
implement better divide and conquer with the fast priority queue expe…
birdup000 Apr 21, 2023
88f220c
Update Download.ts
birdup000 Apr 21, 2023
3b037f4
Update Download.ts
birdup000 Apr 21, 2023
1bc91c5
optimize the chunk generation for better use less memory experimental
birdup000 Apr 21, 2023
3ca4334
Update Download.ts
birdup000 Apr 21, 2023
b6bc94b
Implement linked list for better concatenation
birdup000 Apr 21, 2023
92b3fc8
Update Download.ts
birdup000 Apr 21, 2023
6bfe3ed
Update Download.ts
birdup000 Apr 21, 2023
b6a5eba
Update Download.ts
birdup000 Apr 21, 2023
bee9f36
Update Download.ts
birdup000 Apr 21, 2023
afad0f4
Update Download.ts
birdup000 Apr 21, 2023
aa07ff6
Update Download.ts
birdup000 Apr 21, 2023
82e1870
revert the linked list can add implementation later
birdup000 Apr 22, 2023
0b4becb
Edit attributes to executable
birdup000 Apr 22, 2023
de96977
Update install.docker.new.sh
birdup000 Apr 22, 2023
141c4c0
Update install.docker.new.sh
birdup000 Apr 22, 2023
706770d
Update Dockerfile
birdup000 Apr 22, 2023
be02245
Update install.docker.new.sh
birdup000 Apr 22, 2023
7c70ef1
Update Dockerfile
birdup000 Apr 22, 2023
f342707
Update Dockerfile
birdup000 Apr 22, 2023
7d96344
Update package.json
birdup000 Apr 22, 2023
41d13d8
Update Dockerfile
birdup000 Apr 22, 2023
5923dd6
hopefully fix the docker script take me back
birdup000 Apr 22, 2023
ff8ae8b
Update install.docker.new.sh
birdup000 Apr 22, 2023
5241b35
Update install.docker.new.sh
birdup000 Apr 22, 2023
ad966e5
Update package.json
birdup000 Apr 22, 2023
7dd668a
fix the ssl unsupported patch 1
birdup000 Apr 22, 2023
f6c0b16
Update install.docker.new.sh
birdup000 Apr 22, 2023
dad650e
Update docker-compose.yml
birdup000 Apr 22, 2023
9219be2
Update install.docker.new.sh
birdup000 Apr 22, 2023
1537a74
Delete docker/data directory
birdup000 Apr 22, 2023
0e9da53
Update package.json
birdup000 Apr 22, 2023
ed61170
Update migration.sql
birdup000 Apr 22, 2023
d89b589
Update migration.sql
birdup000 Apr 22, 2023
098bd56
Update migration.sql
birdup000 Apr 22, 2023
9291617
Update migration.sql
birdup000 Apr 22, 2023
c08d813
Update migration.sql
birdup000 Apr 22, 2023
e577b10
Update migration.sql
birdup000 Apr 22, 2023
31f50f9
Update Dockerfile
birdup000 Apr 22, 2023
e5a7cc6
Delete api/prisma/migrations/20220525012308_add_password_files directory
birdup000 Apr 22, 2023
5bf5e31
Update migration.sql
birdup000 Apr 22, 2023
bc625ee
Update Dockerfile
birdup000 Apr 22, 2023
de743ff
Update Dockerfile
birdup000 Apr 22, 2023
fd48f29
Update Dockerfile
birdup000 Apr 22, 2023
f6df814
Update Dockerfile
birdup000 Apr 22, 2023
cedc799
Update Dockerfile
birdup000 Apr 23, 2023
5b077b0
Update install.docker.new.sh
birdup000 Apr 23, 2023
7e92111
Update docker-compose.yml
birdup000 Apr 23, 2023
73d1ff2
Create migration.sql
birdup000 Apr 23, 2023
5489d62
Create migration.sql
birdup000 Apr 23, 2023
e103575
Create migration.sql
birdup000 Apr 23, 2023
7eb4362
Update migration.sql
birdup000 Apr 23, 2023
2e049a8
Delete api/prisma/20220525012308_add_password_files directory
birdup000 Apr 23, 2023
f4eca77
Update docker-compose.yml
birdup000 Apr 23, 2023
2615910
Dyslexia typo lol fix
birdup000 Apr 23, 2023
6f2d36c
Delete api/20220525012308_add_password_files directory
birdup000 Apr 23, 2023
b6db0df
Update docker-compose.yml
birdup000 Apr 23, 2023
92d9ecd
Update docker-compose.yml
birdup000 Apr 23, 2023
26be905
Update migration.sql
birdup000 Apr 23, 2023
fe85805
Update migration.sql
birdup000 Apr 23, 2023
f428c69
Update docker-compose.yml
birdup000 Apr 23, 2023
c08716b
Update install.docker.new.sh
birdup000 Apr 23, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM node:18.15.0 as build
FROM node:18.16.0 as build
ARG REACT_APP_TG_API_ID
ARG REACT_APP_TG_API_HASH

Expand All @@ -10,7 +10,9 @@ COPY api/package.json api/package.json
COPY web/package.json web/package.json
COPY docker/.env .
RUN yarn cache clean
RUN yarn install --network-timeout 1000000
RUN yarn install
RUN yarn global add prisma
RUN npx browserslist@latest --update-db
COPY . .
RUN export NODE_OPTIONS="--openssl-legacy-provider --no-experimental-fetch"
RUN yarn workspaces run build
1 change: 0 additions & 1 deletion docker/data/placeholder.txt

This file was deleted.

2 changes: 1 addition & 1 deletion docker/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ version: '3'

services:
teledrive:
command: bash -c "export NODE_OPTIONS='--openssl-legacy-provider --no-experimental-fetch' && yarn api prisma migrate deploy || yarn api prisma migrate resolve --applied 20220420012853_init && node api/dist/index.js"
command: bash -c "export NODE_OPTIONS='--openssl-legacy-provider --no-experimental-fetch' && yarn api prisma migrate deploy || yarn api prisma migrate resolve --applied "20220420012853_init" && node api/dist/index.js"
labels:
traefik.http.routers.server.rule: Host(`teledrive.localhost`)
traefik.port: 4000
Expand Down
Empty file modified install.caprover.sh
100644 → 100755
Empty file.
85 changes: 35 additions & 50 deletions install.docker.new.sh
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,70 +1,55 @@
#!/bin/bash

set -e -x
set -e

# Check if configuration file exists
if [ ! -f docker/.env ]; then
echo "Error: Configuration file not found."
exit 1
fi
echo "Node Version: $(node -v)"
echo "cURL Version: $(curl --version | head -n 1)"
echo "Docker Version: $(docker -v)"
echo "Docker Compose Version: $(docker compose version)"

# Create /docker directory if it doesn't exist
if [ ! -d docker ]; then
mkdir docker
fi
if [ ! -f docker/.env ]
then
echo "Generating .env file..."

# Configure Node.js and cURL
export NODE_OPTIONS="--openssl-legacy-provider --no-experimental-fetch"
printf "Node.js Version: %s\n" "$(node -v)"
printf "cURL Version: %s\n" "$(curl --version | head -n 1)"
ENV="develop"

# Check Docker and Docker Compose versions
printf "Docker Version: %s\n" "$(docker -v)"
printf "Docker Compose Version: %s\n" "$(docker compose -v)"
echo "Preparing your keys from https://my.telegram.org/"
read -p "Enter your TG_API_ID: " TG_API_ID
read -p "Enter your TG_API_HASH: " TG_API_HASH

# Disable Git-related functionality in BuildKit
export DOCKER_BUILDKIT=1
export BUILDKIT_PROGRESS=plain
export BUILDKIT_INLINE_CACHE=1
export BUILDKIT_ENABLE_LEGACY_GIT=0
echo
read -p "Enter your ADMIN_USERNAME: " ADMIN_USERNAME
read -p "Enter your PORT: " PORT
PORT="${PORT:=4000}"

# If no parameters are provided, start services using Docker Compose
if [ $# -eq 0 ]; then
# Stop and start services using Docker Compose
docker compose down
docker compose up --build --force-recreate -d
DB_PASSWORD=$(openssl rand -hex 48)

echo "ENV=$ENV" > docker/.env
echo "PORT=$PORT" >> docker/.env
echo "TG_API_ID=$TG_API_ID" >> docker/.env
echo "TG_API_HASH=$TG_API_HASH" >> docker/.env
echo "ADMIN_USERNAME=$ADMIN_USERNAME" >> docker/.env
export DATABASE_URL=postgresql://postgres:$DB_PASSWORD@db:5432/teledrive
echo "DB_PASSWORD=$DB_PASSWORD" >> docker/.env

cd docker
docker compose build teledrive
docker compose up -d
sleep 2
docker compose exec teledrive yarn workspace api prisma migrate reset
docker compose exec teledrive yarn workspace api prisma migrate deploy
# Update PostgreSQL password
DB_PASSWORD=$(openssl rand -hex 16)
docker compose exec docker-db-1 psql -U postgres -c "ALTER USER postgres PASSWORD '${DB_PASSWORD}';"
printf "Generated random DB_PASSWORD: %s\n" "$DB_PASSWORD"
fi
else
git pull origin experiment

export $(cat docker/.env | xargs)

# If "update" parameter is provided, update services using Docker Compose
if [ "$1" == "update" ]; then
cd docker
if ! git branch --list experiment >/dev/null; then
git branch experiment origin/experiment
fi
git checkout experiment
export $(grep -v '^#' docker/.env | xargs)
docker compose down
docker compose up --build --force-recreate -d
sleep 2
docker compose up -d
docker compose exec teledrive yarn workspace api prisma migrate deploy
git reset --hard
git clean -f
git pull origin experiment
fi

# If "permissions" parameter is provided, check if the current user has permission to modify necessary directories and files
if [ "$1" == "permissions" ]; then
if [ ! -w /var/run/docker.sock ] || [ ! -w ./docker/.env ] || [ ! -w ./docker/data ]; then
printf "This script requires permission to modify some files and directories.\n"
printf "Giving permission to the current user...\n"
sudo chown -R "$(whoami)" /var/run/docker.sock ./docker/.env ./docker/data
else
printf "No permissions required.\n"
fi
fi
4 changes: 3 additions & 1 deletion web/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,9 @@
"use-debounce": "^7.0.1",
"web-vitals": "^0.2.4",
"world_countries_lists": "^2.5.1",
"concat-stream": "^2.0.0"
"concat-stream": "^2.0.0",
"axios-parallel": "^1.1.2",
"fastpriorityqueue": "0.7.4"
},
"scripts": {
"start": "react-scripts --openssl-legacy-provider --no-experimental-fetch start",
Expand Down
181 changes: 107 additions & 74 deletions web/src/pages/dashboard/components/Upload.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -37,14 +37,16 @@ const Upload: React.FC<Props> = ({ dataFileList: [fileList, setFileList], parent
await new Promise(res => setTimeout(res, 3000 * ++retry))
await cb?.()
if (retry === RETRY_COUNT) {
notification.error({ message: 'Failed to upload file', description: <>
<Typography.Paragraph>
{error?.response?.data?.error || error.message || 'Something error'}
</Typography.Paragraph>
<Typography.Paragraph code>
{JSON.stringify(error?.response?.data || error?.data || error, null, 2)}
</Typography.Paragraph>
</> })
notification.error({
message: 'Failed to upload file', description: <>
<Typography.Paragraph>
{error?.response?.data?.error || error.message || 'Something error'}
</Typography.Paragraph>
<Typography.Paragraph code>
{JSON.stringify(error?.response?.data || error?.data || error, null, 2)}
</Typography.Paragraph>
</>
})
throw error
}
}
Expand Down Expand Up @@ -138,14 +140,17 @@ const Upload: React.FC<Props> = ({ dataFileList: [fileList, setFileList], parent
if (type === 'channel') {
peer = new Api.InputPeerChannel({
channelId: BigInt(peerId) as any,
accessHash: BigInt(accessHash as string) as any })
accessHash: BigInt(accessHash as string) as any
})
} else if (type === 'user') {
peer = new Api.InputPeerUser({
userId: BigInt(peerId.toString()) as any,
accessHash: BigInt(accessHash.toString()) as any })
accessHash: BigInt(accessHash.toString()) as any
})
} else if (type === 'chat') {
peer = new Api.InputPeerChat({
chatId: BigInt(peerId) as any })
chatId: BigInt(peerId) as any
})
}
}
return await client.sendFile(peer || 'me', {
Expand All @@ -160,7 +165,7 @@ const Upload: React.FC<Props> = ({ dataFileList: [fileList, setFileList], parent
attributes: forceDocument ? [
new Api.DocumentAttributeFilename({ fileName: response.file.name })
] : undefined,
workers: 6
workers: 1
})
}

Expand Down Expand Up @@ -203,72 +208,98 @@ const Upload: React.FC<Props> = ({ dataFileList: [fileList, setFileList], parent
}
}
} else {
for (let j = 0; j < fileParts; j++) {
const fileBlob = file.slice(j * MAX_UPLOAD_SIZE, Math.min(j * MAX_UPLOAD_SIZE + MAX_UPLOAD_SIZE, file.size))
const parts = Math.ceil(fileBlob.size / CHUNK_SIZE)
const PARALLELISM = 8

if (!deleted) {
const uploadPart = async (i: number) => {
if (responses?.length && cancelUploading.current && file.uid === cancelUploading.current) {
await Promise.all(responses.map(async response => {
try {
await req.delete(`/files/${response?.file.id}`)
} catch (error) {
// ignore
}
}))
cancelUploading.current = null
deleted = true
window.onbeforeunload = undefined as any
} else {
const blobPart = fileBlob.slice(i * CHUNK_SIZE, Math.min(i * CHUNK_SIZE + CHUNK_SIZE, file.size))
const data = new FormData()
data.append('upload', blobPart)
interface Response {
file?: {
id: string
}
}

const beginUpload = async () => {
const { data: response } = await req.post(`/files/upload${i > 0 && responses[j]?.file?.id ? `/${responses[j]?.file.id}` : ''}`, data, {
params: {
...parent?.id ? { parent_id: parent.id } : {},
relative_path: file.webkitRelativePath || null,
name: `${file.name}${fileParts > 1 ? `.part${String(j + 1).padStart(3, '0')}` : ''}`,
size: fileBlob.size,
mime_type: file.type || mime.lookup(file.name) || 'application/octet-stream',
part: i,
total_part: parts,
},
})
return response
const uploadFile = async (file: File, parent?: { id: string }, onProgress?: (progress: any, file: File) => void) => {
const fileParts = Math.ceil(file.size / MAX_UPLOAD_SIZE)
const totalAllParts = fileParts * Math.ceil(file.size / CHUNK_SIZE)
const totalParts = 0
let deleted = false
const responses: Response[] = []

const workerScript = `
const uploadPart = async (file, j, i) => {
const fileBlob = file.slice(
j * MAX_UPLOAD_SIZE,
Math.min(j * MAX_UPLOAD_SIZE + MAX_UPLOAD_SIZE, file.size)
)
const blobPart = fileBlob.slice(i * CHUNK_SIZE, Math.min(i * CHUNK_SIZE + CHUNK_SIZE, file.size))
const data = new FormData()
data.append('upload', blobPart)
const { data: response } = await axios.post(
\`/files/upload\${i > 0 && responses[j]?.file?.id ? \`/\${responses[j]?.file?.id}\` : ''}\`,
data,
{
params: {
...parent?.id ? { parent_id: parent.id } : {},
relative_path: file.webkitRelativePath || null,
name: \`\${file.name}\${fileParts > 1 ? \`.part\${String(j + 1).padStart(3, '0')}\` : ''}\`,
size: fileBlob.size,
mime_type: file.type || mime.lookup(file.name) || 'application/octet-stream',
part: i,
total_part: fileParts,
},
}
)
responses[j] = response
const percent = (++totalParts / totalAllParts * 100).toFixed(1)
self.postMessage({ percent })
}

self.onmessage = (event) => {
const { file, j, partIndex } = event.data
uploadPart(file, j, partIndex)
}
`
const workerBlob = new Blob([workerScript], { type: 'application/javascript' })
const workerUrl = URL.createObjectURL(workerBlob)

let trial = 0
while (trial < RETRY_COUNT) {
const uploadPart = async (j: number, i: number) => {
if (responses?.length && cancelUploading.current && file.name === cancelUploading.current) {
await Promise.all(
responses.map(async (response) => {
try {
responses[j] = await beginUpload()
trial = RETRY_COUNT
await req.delete(`/files/${response?.file?.id}`)
} catch (error) {
if (trial >= RETRY_COUNT) {
throw error
}
await new Promise(res => setTimeout(res, ++trial * 3000))
// ignore
}
})
)
cancelUploading.current = null
deleted = true
window.onbeforeunload = undefined as any
} else {
const worker = new Worker(workerUrl)
worker.postMessage({ file, j, partIndex: i })
worker.onmessage = (event) => {
const { percent } = event.data
onProgress?.({ percent }, file)
if (++i < fileParts) {
uploadPart(j, i)
}

const percent = (++totalParts / totalAllParts * 100).toFixed(1)
onProgress({ percent }, file)
}
}
}

const group = 2
await uploadPart(0)
for (let i = 1; i < parts - 1; i += group) {
if (deleted) break
const others = Array.from(Array(i + group).keys()).slice(i, Math.min(parts - 1, i + group))
await Promise.all(others.map(async j => await uploadPart(j)))
}
if (!deleted && parts - 1 > 0) {
await uploadPart(parts - 1)
}
const promises: Promise<void>[] = []

for (let j = 0; j < fileParts; j++) {
if (deleted) break
const groupIndex = Math.floor(j / PARALLELISM)
const partIndex = j % PARALLELISM
promises[groupIndex] = promises[groupIndex] || Promise.resolve()
promises[groupIndex] = promises[groupIndex].then(() => uploadPart(j, 0))
}

await Promise.all(promises)

URL.revokeObjectURL(workerUrl)
}
}

Expand All @@ -290,14 +321,16 @@ const Upload: React.FC<Props> = ({ dataFileList: [fileList, setFileList], parent
notification.error({
key: 'fileUploadError',
message: error?.response?.status || 'Something error',
...error?.response?.data ? { description: <>
<Typography.Paragraph>
{error?.response?.data?.error || error.message || 'Something error'}
</Typography.Paragraph>
<Typography.Paragraph code>
{JSON.stringify(error?.response?.data || error?.data || error, null, 2)}
</Typography.Paragraph>
</> } : {}
...error?.response?.data ? {
description: <>
<Typography.Paragraph>
{error?.response?.data?.error || error.message || 'Something error'}
</Typography.Paragraph>
<Typography.Paragraph code>
{JSON.stringify(error?.response?.data || error?.data || error, null, 2)}
</Typography.Paragraph>
</>
} : {}
})
// filesWantToUpload.current = filesWantToUpload.current?.map(f => f.uid === file.uid ? { ...f, status: 'done' } : f)
filesWantToUpload.current = filesWantToUpload.current?.map(f => f.uid === file.uid ? null : f).filter(Boolean)
Expand Down
Loading