-
Notifications
You must be signed in to change notification settings - Fork 4
/
async_openai_embedding_two.py
48 lines (37 loc) · 1.52 KB
/
async_openai_embedding_two.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
import pandas as pd
import os
import asyncio
from openai import AsyncOpenAI
from pathlib import Path
# path of the script that is running
df = pd.read_csv('processed/formatted_tweets.csv')
print("worked")
model = "text-embedding-3-large" # can try with large
client = AsyncOpenAI(api_key=os.environ.get("OPENAI_API_KEY"))
async def get_embeddings(texts, model=model):
# Clean up each text entry
texts = [text.replace("\n", " ") for text in texts]
# Make the asynchronous API call
response = await client.embeddings.create(input=texts, model=model)
# Extract embeddings
embeddings = [item.embedding for item in response.data]
return embeddings
async def process_batch(df, start_index, batch_size):
batch_texts = df['text'][start_index:start_index + batch_size].tolist()
batch_embeddings = await get_embeddings(batch_texts)
# Properly assign embeddings to each corresponding row
for j, embedding in enumerate(batch_embeddings):
df.at[start_index + j, 'embeddings'] = embedding
async def main(df, batch_size):
tasks = []
for i in range(0, len(df), batch_size):
tasks.append(process_batch(df, i, batch_size))
# Run all the tasks concurrently
await asyncio.gather(*tasks)
# Set your batch size
batch_size = 32 # 32 * 200 = 6400 < 8191 tokens (embedding size)
df['embeddings'] = [None] * len(df)
# Run the main function with asyncio
asyncio.run(main(df, batch_size))
df.to_csv('processed/embeddings/openai_embeddings_async_v1.csv')
print("embeddings successfully saved dear anon")