-
Notifications
You must be signed in to change notification settings - Fork 1
/
scrape.py
63 lines (50 loc) · 1.42 KB
/
scrape.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
#amazon website scraper with gui
#option1
import time
import openpyxl
from selenium import webdriver
from selenium.webdriver.common.by import By
from tkinter import *
from tkinter import ttk
# Initialize the driver
driver = webdriver.Chrome()
# Create the Excel workbook
workbook = openpyxl.Workbook()
sheet = workbook.active
sheet.title = 'Scraped Data'
sheet.append(['Title', 'Price'])
# Create the GUI
root = Tk()
root.title('Scrape Data')
root.geometry('400x300')
# Create the label
label = Label(root, text='Enter a search term:')
label.pack()
# Create the entry field
search_term = StringVar()
entry = Entry(root, textvariable=search_term)
entry.pack()
# Create the button
def scrape_data():
# Get the search term
term = search_term.get()
# Navigate to the website
driver.get(f'https://www.amazon.co.in/s?k={term}')
# Wait for the page to load
time.sleep(5)
# Scrape the data
results = driver.find_elements(By.CSS_SELECTOR, '.s-result-item')
for result in results:
try:
title = result.find_element(By.CSS_SELECTOR, 'h2').text
price = result.find_element(By.CSS_SELECTOR, '.a-price-whole').text
sheet.append([title, price])
except:
pass
# Save the Excel workbook
workbook.save(f'{term}.xlsx')
# Create the button
button = Button(root, text='Scrape Data', command=scrape_data)
button.pack()
# Start the GUI
root.mainloop()