# Find the table containing the financial data table = soup.find_all('table')[0]
# Extract the financial data from the table column_data = []
# Add the last 4 digits of the URL to the first column column_data.append(url[-10:-6])
for cell in table.find_all('td'): column_data.append(cell.text) data.append(column_data)
# Add a random delay between 3 and 13 seconds # # Create a DataFrame from the extracted data df = pd.DataFrame(data)
# Save the DataFrame to an Excel file #df.to_excel('financial_data.xlsx', index=False, header=False) df.to_excel('我要的資料.xlsx', index=False, header=False)
from datetime import datetime now = datetime.now() print(now)