Skip to content

Commit

Permalink
added change on wirtek, parked ausy
Browse files Browse the repository at this point in the history
  • Loading branch information
andreireporter13 committed Oct 5, 2023
1 parent ac20085 commit cca6e2f
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 2 deletions.
3 changes: 2 additions & 1 deletion sites/A_OOI_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,8 @@
'masabi_scraper.py',
'test_raben.py',
'test_hrsro.py',
'test_vitesco.py',]
'test_vitesco.py',
'ausy_scraper.py',]

path = os.path.dirname(os.path.abspath(__file__))

Expand Down
8 changes: 7 additions & 1 deletion sites/wirtek_scraper.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
# Scrape data from wirtek.com/compile
#
from A_OO_get_post_soup_update_dec import DEFAULT_HEADERS, update_peviitor_api
from L_00_logo import update_logo
#
import requests
from bs4 import BeautifulSoup
Expand All @@ -25,19 +26,21 @@ def data_scrape_from_wirtek():
for job in job_grid:
link_job = job.a['href']
title_job = job.find('div', class_='careers-grid__job-name').text.strip()
location = job['data-location'].split("'")[1].title()

lst_with_jobs_data.append({
"id": str(uuid.uuid4()),
"job_title": title_job,
"job_link": link_job,
"company": "wirtek",
"country": "Romania",
"city": "Romania"
"city": location
})

return lst_with_jobs_data


# update data on peviitor!
@update_peviitor_api
def scrape_and_update_peviitor(company_name, data_list):
"""
Expand All @@ -50,3 +53,6 @@ def scrape_and_update_peviitor(company_name, data_list):
company_name = 'wirtek'
data_list = data_scrape_from_wirtek()
scrape_and_update_peviitor(company_name, data_list)

print(update_logo('wirtek',
'https://www.wirtek.com/hs-fs/hubfs/Wirtek_logo_22_years_v01_132x52px.gif?width=132&height=52&name=Wirtek_logo_22_years_v01_132x52px.gif'))

0 comments on commit cca6e2f

Please sign in to comment.