Hi, i rewrote the code for multiple ships:
# from line 11
def retrieve_website(ship_url):
""" (1) First we are going to get the data from the website """
# Create headers, otherwise vesselfinder will block you
headers = {'user-agent': 'Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.27 Safari/537.17'}
# URL of the ship you want to track, execute the request and parse it to the variable 'soup'
url = 'ship_url'
reqs = requests.get(url, headers=headers)
soup = BeautifulSoup(reqs.text, 'lxml')
# Save file to local disk
with open("output1.html", "w", encoding='utf-8') as file:
file.write(str(soup))
# from line 94
""" Start the program """
# function to run list with ships
def run_list:
for ship_url in ship_list:
retrieve_website(ship_url)
# Create counter
ctr = 0
# Start the funtion the first time when the program starts
ship_list = ['url-1','url-2','url-3']
for ship_url in ship_list:
retrieve_website(ship_url)
# Re-run every 15 minutes the function
schedule.every(900).seconds.do(run_list)
while True:
schedule.run_pending()
time.sleep(1)