From b25f04d21868395c65f9b669da999ddc07ef388e Mon Sep 17 00:00:00 2001 From: paprykdev <58005447+paprykdev@users.noreply.github.com> Date: Tue, 12 Nov 2024 05:16:21 +0100 Subject: [PATCH] feat: initial commit --- .gitignore | 32 ++++++++++++++++++++++++++++++++ app/main.py | 13 +++++++++++++ app/requirements.txt | 8 ++++++++ app/scraper.py | 17 +++++++++++++++++ 4 files changed, 70 insertions(+) create mode 100644 .gitignore create mode 100644 app/main.py create mode 100644 app/requirements.txt create mode 100644 app/scraper.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..d18c60c --- /dev/null +++ b/.gitignore @@ -0,0 +1,32 @@ +# Ignore Python bytecode files +*.pyc +*.pyo +__pycache__/ + +# Ignore virtual environment directories +.venv/ +venv/ +env/ + +# Ignore system files +.DS_Store +Thumbs.db + +# Ignore log files +*.log + +# Ignore temporary files +*.tmp +*.swp + +# Ignore output files +dist/ +build/ +*.egg-info/ + +# Ignore environment files +.env + +# IDE files +.idea/ +.vscode/ diff --git a/app/main.py b/app/main.py new file mode 100644 index 0000000..f87ceb4 --- /dev/null +++ b/app/main.py @@ -0,0 +1,13 @@ +from scraper import scraper +import time + + +def main(): + print("Starting the application...\n\n") + scraper() + print("\n\nApplication finished!") + time.sleep(8) + + +if __name__ == "__main__": + main() diff --git a/app/requirements.txt b/app/requirements.txt new file mode 100644 index 0000000..bada454 --- /dev/null +++ b/app/requirements.txt @@ -0,0 +1,8 @@ +beautifulsoup4==4.12.3 +bs4==0.0.2 +certifi==2024.8.30 +charset-normalizer==3.4.0 +idna==3.10 +requests==2.32.3 +soupsieve==2.6 +urllib3==2.2.3 diff --git a/app/scraper.py b/app/scraper.py new file mode 100644 index 0000000..d9c4dc0 --- /dev/null +++ b/app/scraper.py @@ -0,0 +1,17 @@ +import os +import json + + +def scraper(): + directory = "dist" + file_path = os.path.join(directory, "data.json") + + data = [] + + try: + os.mkdir("dist") + except FileExistsError: + pass + with open(file_path, "w", encoding="utf-8") as file: + json.dump(data, file) + print("Data has been scraped!")