update - route of uploading students list - add additional handling with potential errors

This commit is contained in:
dominik24c 2022-05-19 18:15:11 +02:00
parent 22bd08c312
commit 8ac8e869c1
6 changed files with 60 additions and 25 deletions

View File

@ -9,6 +9,7 @@ from .dependencies import db, ma
from .commands.startapp import startapp
from .utils import import_models
from .api import api_bp
from .errors import request_entity_too_large
def create_app(config_name: str = None) -> Flask:
@ -35,4 +36,7 @@ def create_app(config_name: str = None) -> Flask:
# register commands
app.cli.add_command(startapp)
# register errors
app.register_error_handler(413, request_entity_too_large)
return app

View File

@ -1,5 +1,6 @@
from typing import TypedDict, Tuple
from flask import current_app
from flask_sqlalchemy import BaseQuery
from sqlalchemy import desc
@ -36,3 +37,8 @@ def paginate_models(page: str, query: BaseQuery) -> PaginationResponse or Tuple[
'items': query.items,
'max_pages': query.pages
}
def is_allowed_extensions(filename: str):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in current_app.config['ALLOWED_EXTENSIONS']

View File

@ -8,8 +8,12 @@ class Config:
BASE_DIR = Path(__file__).resolve().parent.parent
SRC_DIR = BASE_DIR / "app"
EXCLUDED_DIRS = ["__pycache__", "commands"]
ENABLE_CORS = os.environ.get('ENABLE_CORS') or False
ALLOWED_EXTENSIONS = {'csv'}
MAX_CONTENT_LENGTH = 10 * 1024 * 1024 # 10 MB
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_DATABASE_URI = f'sqlite:///{BASE_DIR / "db.sqlite"}'

View File

@ -1,8 +1,10 @@
from typing import Tuple
from random import randint
from itertools import islice
from flask import Blueprint, request, Response
from marshmallow import ValidationError
from sqlalchemy.exc import IntegrityError
from flask_sqlalchemy import get_debug_queries
from ...students.models import Student
@ -10,7 +12,7 @@ from ..schemas import StudentSchema, StudentEditSchema, StudentCreateSchema
from ...dependencies import db
from ..utils import parse_csv
from ..exceptions import InvalidNameOrTypeHeaderException
from ...base.utils import paginate_models
from ...base.utils import paginate_models, is_allowed_extensions
bp = Blueprint("students", __name__, url_prefix="/students")
@ -30,7 +32,7 @@ def list_students() -> Tuple[dict, int]:
response = paginate_models(page, student_query)
if isinstance(response, tuple):
return response
print(get_debug_queries()[0])
# print(get_debug_queries()[0])
return {"students": students_schema.dump(response['items']), "max_pages": response['max_pages']}, 200
@ -100,15 +102,29 @@ def create_student() -> Tuple[dict, int] or Response:
@bp.route("/upload/", methods=["POST"])
def upload_students() -> Tuple[dict, int]:
"""Maybe in the future move to celery workers"""
if (uploaded_file := request.files.get('file')) is None:
if (uploaded_file := request.files.get('file')) is None or uploaded_file.filename == '':
return {"error": "You didn't attach a csv file!"}, 400
try:
students = parse_csv(uploaded_file)
except InvalidNameOrTypeHeaderException:
return {"error": "Invalid format of csv file!"}, 400
if uploaded_file and is_allowed_extensions(uploaded_file.filename):
try:
students = parse_csv(uploaded_file)
except InvalidNameOrTypeHeaderException:
return {"error": "Invalid format of csv file!"}, 400
try:
while True:
sliced_students = islice(students, 5)
list_of_students = list(sliced_students)
if len(list_of_students) == 0:
break
db.session.add_all(list_of_students)
db.session.commit()
except IntegrityError as e:
# print(e)
# in the future create sql query checks index and add only these students, which didn't exist in db
return {"error": "These students have already exist!"}, 400
else:
return {"error": "Invalid extension of file"}, 400
for student in students:
db.session.add(student)
db.session.commit()
return {"message": "Students was created by uploading csv file!"}, 200

View File

@ -1,4 +1,4 @@
from typing import List
from typing import Generator, Any
from random import randint
import pandas as pd
@ -7,31 +7,29 @@ from .exceptions import InvalidNameOrTypeHeaderException
from ..students.models import Student
def check_columns(df: pd.DataFrame, columns: List[str]) -> bool:
def check_columns(df: pd.DataFrame) -> bool:
headers = set(df.keys().values)
columns = ['first_name', 'last_name', 'index', 'mode']
if len(headers - set(columns)) != 0:
return False
flag = True
col_types = ['object', 'object', 'int', 'int']
print(df.dtypes['first_name'])
for name, col_type in zip(columns, col_types):
if name not in df and df.dtypes[name].startswith(col_type):
if not str(df.dtypes[name]).startswith(col_type):
flag = False
break
return flag
def parse_csv(file) -> List[Student]:
def parse_csv(file) -> Generator[Student, Any, None]:
df = pd.read_csv(file)
columns = ['first_name', 'last_name', 'index', 'mode']
if not check_columns(df, columns):
if not check_columns(df):
raise InvalidNameOrTypeHeaderException
students = []
for _, item in df.iterrows():
data = {}
for c in columns:
data[c] = item[c]
data['email'] = f'student{randint(1, 300_000)}@gmail.com'
students.append(Student(**data))
students = (Student(**dict(item.items(), email=f'student{randint(1, 300_000)}@gmail.com'))
for _, item in df.iterrows())
return students

7
backend/app/errors.py Normal file
View File

@ -0,0 +1,7 @@
from typing import Tuple
from werkzeug.exceptions import RequestEntityTooLarge
def request_entity_too_large(error: RequestEntityTooLarge) -> Tuple[dict, int]:
return {'error': 'File too large!'}, 413