Skip to content

Commit

Permalink
Merge pull request #260 from techeer-sv/BE/#253
Browse files Browse the repository at this point in the history
Be/#253 마감기한지난 채용공고 삭제 로직 구현
  • Loading branch information
baekhangyeol authored Oct 5, 2023
2 parents 98c1db9 + 5a1324d commit 17182de
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 3 deletions.
2 changes: 1 addition & 1 deletion crawling_python/global_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,4 +31,4 @@ def get_driver():
service = webdriver.ChromeService(executable_path=path)
driver = webdriver.Chrome(options=chrome_options, service=service)

return driver
return driver
18 changes: 16 additions & 2 deletions crawling_python/scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,21 @@
import schedule
import subprocess

from crawling_python.global_utils import get_database_connect

kst = pytz.timezone('Asia/Seoul')

def delete_expired_data(today):
kst = pytz.timezone('Asia/Seoul')
connect = get_database_connect()
cursor = connect.cursor()

query = "DELETE FROM job WHERE expiration_date < %s"
cursor.execute(query, (today,))

connect.commit()
cursor.close()
connect.close()

def crawl_jobkorea():
subprocess.run(["python", "crawling_jobkorea.py"])
Expand All @@ -13,8 +26,9 @@ def crawl_saramin():
subprocess.run(["python", "crawling_saramin.py"])


schedule.every(3).days.at("06:00").do(crawl_saramin)
schedule.every(3).days.at("06:03").do(crawl_jobkorea)
schedule.every(3).days.at("00:00").do(crawl_saramin)
schedule.every(3).days.at("00:03").do(crawl_jobkorea)
schedule.every().day.at("00:05").do(delete_expired_data)

if __name__ == "__main__":
while True:
Expand Down

0 comments on commit 17182de

Please sign in to comment.