add unit tests for backend

This commit is contained in:
dominik24c 2023-01-04 22:51:58 +01:00
parent c448f8a642
commit 008fb92583
16 changed files with 212 additions and 17 deletions

View File

@ -54,8 +54,9 @@ class MessageSchema(ma.Schema):
message = fields.Str(required=True)
id = fields.Str(required=False)
class FileSchema(ma.Schema):
file = fields.Raw(type='file', required=True)
file = fields.Raw(metadata={'type': 'file'}, required=True)
class StudentQuerySchema(ma.Schema):

View File

@ -10,7 +10,7 @@ def validate_mode(value: str) -> str:
class YearGroupSchema(Schema):
name = fields.Str(validate=validate.Regexp('^\d{4}/\d{4}$'), required=True)
name = fields.Str(validate=validate.Regexp(r'^\d{4}/\d{4}$'), required=True)
mode = fields.Str(validate=validate_mode, required=True)

View File

@ -3,7 +3,7 @@ import json
from collections import defaultdict
from datetime import datetime, timedelta
from io import BytesIO
from typing import Generator, Union, Any, List, Tuple
from typing import Generator, Union, Any, List, Tuple, TextIO
from pathlib import Path
import pandas as pd
@ -30,7 +30,7 @@ def check_columns(df: pd.DataFrame) -> bool:
return False
flag = True
col_types = ['object', 'object', 'int', 'float64', 'object']
col_types = ['object', 'object', 'int', 'int64', 'object']
for name, col_type in zip(columns, col_types):
if not str(df.dtypes[name]).startswith(col_type):
@ -40,9 +40,9 @@ def check_columns(df: pd.DataFrame) -> bool:
return flag
def parse_csv(file: FileStorage) -> Generator[Student, Any, None]:
def parse_csv(file: Union[FileStorage, TextIO]) -> Generator[Student, Any, None]:
df = pd.read_csv(file)
# raise Exception(df.to_string())
if not check_columns(df):
raise InvalidNameOrTypeHeaderException

View File

@ -23,6 +23,7 @@ def import_models() -> None:
models_module = "models"
for dirname in directories:
try:
importlib.import_module(f"app.{dirname}.{models_module}")
importlib.import_module(f"app.app.{dirname}.{models_module}")
except ModuleNotFoundError:
print(models_module, dirname)
warnings.warn(f"Not found module {models_module}.py in package {dirname}")

View File

@ -1,5 +1,5 @@
from dotenv import load_dotenv
from app import create_app
from .app import create_app
load_dotenv()
app = create_app()

4
backend/pytest.ini Normal file
View File

@ -0,0 +1,4 @@
[pytest]
filterwarnings =
error
ignore::UserWarning

View File

View File

@ -3,16 +3,24 @@ from typing import Generator
import pytest
from flask import Flask
from flask.testing import FlaskClient
from flask.ctx import AppContext
from app import create_app
from ..app import create_app
from ..app.dependencies import db
@pytest.fixture()
def app() -> Generator[Flask, None, None]:
app = create_app("testing")
yield app
def test_app() -> Generator[Flask, None, None]:
yield create_app("testing")
@pytest.fixture()
def client(app: Flask) -> FlaskClient:
return app.test_client()
def test_app_ctx_with_db(test_app) -> Generator[AppContext, None, None]:
with test_app.app_context() as ctx:
db.create_all()
yield ctx
@pytest.fixture()
def test_client(test_app: Flask) -> FlaskClient:
return test_app.test_client()

View File

@ -1,2 +0,0 @@
def test_comparing_two_number():
assert 1 == 1

View File

View File

@ -0,0 +1,142 @@
import datetime
import pandas as pd
import pytest
from flask import current_app
from ...app.dependencies import db
from ...app.base.utils import is_allowed_extensions, order_by_column_name, paginate_models
from ...app.coordinator.utils import check_columns, parse_csv, generate_range_dates, generate_csv
from ...app.coordinator.exceptions import InvalidNameOrTypeHeaderException
from ...app.students.models import Student, Group
def test_is_allowed_extensions(test_app) -> None:
with test_app.app_context():
for ext in current_app.config.get('ALLOWED_EXTENSIONS'):
assert is_allowed_extensions(f'file.{ext}') is True
def test_is_allowed_extensions_with_invalid_extensions(test_app) -> None:
with test_app.app_context():
assert is_allowed_extensions('file.invalid_ext') is False
assert is_allowed_extensions('file') is False
def test_order_by_column_name_ascending_mode(test_app) -> None:
with test_app.app_context():
query = order_by_column_name(Student.query, 'index', 'desc')
assert 'ORDER BY students."index"' in str(query)
def test_order_by_column_name_descending_mode(test_app) -> None:
with test_app.app_context():
query = order_by_column_name(Student.query, 'index', 'desc')
assert 'ORDER BY students."index" DESC' in str(query)
def test_paginate_models(test_app_ctx_with_db) -> None:
with test_app_ctx_with_db:
st = Student(index=123456, first_name='Dominic', last_name='Smith', pesel='99010109876', email='xxx@gmail.com')
st1 = Student(index=123457, first_name='John', last_name='Newton', pesel='99010109871', email='zzz@gmail.com')
db.session.add_all([st, st1])
db.session.commit()
result = paginate_models(1, Student.query, 1)
items = result.get('items', [])
max_pages = result.get('max_pages', 0)
assert len(items) == 1
assert max_pages == 2
def test_check_columns() -> None:
dummy_data = {'NAZWISKO': ['Smith'], 'IMIE': ['Dominic'], 'INDEKS': [343433], 'PESEL': [90020178654],
'EMAIL': ['domsmi@gmail.com']}
df = pd.DataFrame(data=dummy_data)
assert check_columns(df) is True
def test_check_columns_with_invalid_column_names() -> None:
dummy_data = {'col1': [1, 2], 'col2': [2, 3]}
df = pd.DataFrame(data=dummy_data)
assert check_columns(df) is False
def test_check_columns_with_invalid_column_types() -> None:
dummy_data = {'NAZWISKO': [999], 'IMIE': ['Dominic'], 'INDEKS': [343433], 'PESEL': [90020178654],
'EMAIL': ['domsmi@gmail.com']}
df = pd.DataFrame(data=dummy_data)
assert check_columns(df) is False
def get_path_to_fake_data(filename: str) -> str:
base_dir = current_app.config.get('BASE_DIR', '/')
return base_dir / 'tmp_data' / filename
def test_parse_csv(test_app) -> None:
with test_app.app_context():
with open(get_path_to_fake_data('students.csv')) as f:
students = sorted(list(parse_csv(f)), key=lambda s: s.index)
indexes = [452790 + i for i in range(3)]
assert len(students) == len(indexes)
for st, idx in zip(students, indexes):
assert st.index == idx
def test_parse_csv_with_invalid_column_header_name_in_csv_file(test_app) -> None:
with test_app.app_context():
with open(get_path_to_fake_data('students_column_name.csv')) as f:
with pytest.raises(InvalidNameOrTypeHeaderException):
parse_csv(f)
def test_parse_csv_with_invalid_column_type_in_csv_file(test_app) -> None:
with test_app.app_context():
with open(get_path_to_fake_data('students_column_type.csv')) as f:
with pytest.raises(InvalidNameOrTypeHeaderException):
parse_csv(f)
def test_generate_range_dates() -> None:
start_date = datetime.datetime(2022, 2, 2, 8, 0, 0, 0)
end_date = datetime.datetime(2022, 2, 2, 12, 0, 0, 0)
step = 30
expected_dates_amount = (end_date - start_date).total_seconds() / 60.0 / step
dates = list(generate_range_dates(start_date, end_date, step))
assert expected_dates_amount == len(dates)
for date in dates:
assert start_date <= date < end_date
def test_generate_csv(test_app_ctx_with_db) -> None:
students_data = [
{'first_name': 'Dominic', 'last_name': 'Smith', 'email': 'xxe@gmail.com', 'index': 123456,
'pesel': '98070234293'},
{'first_name': 'Matthew', 'last_name': 'Cash', 'email': 'zze@gmail.com', 'index': 123455,
'pesel': '98070234291'},
{'first_name': 'Martin', 'last_name': 'Rose', 'email': 'nne@gmail.com', 'index': 123446,
'pesel': '98070234223'},
]
with test_app_ctx_with_db:
students = [Student(**data) for data in students_data]
db.session.add_all(students)
db.session.commit()
gr1 = Group(name="new-project")
gr2 = Group(name="system-pri")
gr1.students.append(students[0])
gr1.students.append(students[1])
gr2.students.append(students[2])
db.session.add_all([gr1, gr2])
db.session.commit()
students_and_groups = [(students[0], gr1), (students[1], gr1), (students[2], gr2)]
generated_csv = generate_csv(students_and_groups)
for data in students_data:
for value in data.values():
assert str(value) in generated_csv

View File

@ -0,0 +1,29 @@
import datetime
import pytest
from marshmallow import ValidationError
from ...app.coordinator.validators import validate_index, validate_datetime_greater_than_now
def test_validate_index() -> None:
assert validate_index(123456) is None
def test_validate_index_with_invalid_value() -> None:
with pytest.raises(ValidationError):
validate_index(12345)
with pytest.raises(ValidationError):
validate_index(1234567)
def test_validate_datetime_greater_than_now() -> None:
d = datetime.datetime.now() + datetime.timedelta(days=2)
assert validate_datetime_greater_than_now(d) is None
def test_validate_datetime_greater_than_now_with_invalid_data() -> None:
d = datetime.datetime.now() - datetime.timedelta(days=2)
with pytest.raises(ValidationError):
validate_datetime_greater_than_now(d)

View File

@ -0,0 +1,4 @@
NAZWISKO,IMIE,INDEKS,PESEL,EMAIL
Drzewiński,Patryk,452790,11111111111,patdrz1@st.amu.edu.pl
Skowronek,Adam,452791,22222222222,adasko8@st.amu.edu.pl
Kuzmenko,Mariia,452792,33333333333,markuz5@st.amu.edu.pl
1 NAZWISKO IMIE INDEKS PESEL EMAIL
2 Drzewiński Patryk 452790 11111111111 patdrz1@st.amu.edu.pl
3 Skowronek Adam 452791 22222222222 adasko8@st.amu.edu.pl
4 Kuzmenko Mariia 452792 33333333333 markuz5@st.amu.edu.pl

View File

@ -0,0 +1,4 @@
NAZWISKO,NOTHING,INDEKS,PESEL,EMAIL
Drzewiński,Patryk,452790,11111111111,patdrz1@st.amu.edu.pl
Skowronek,Adam,452791,22222222222,adasko8@st.amu.edu.pl
Kuzmenko,Mariia,452792,33333333333,markuz5@st.amu.edu.pl
1 NAZWISKO NOTHING INDEKS PESEL EMAIL
2 Drzewiński Patryk 452790 11111111111 patdrz1@st.amu.edu.pl
3 Skowronek Adam 452791 22222222222 adasko8@st.amu.edu.pl
4 Kuzmenko Mariia 452792 33333333333 markuz5@st.amu.edu.pl

View File

@ -0,0 +1,4 @@
NAZWISKO,IMIE,INDEKS,PESEL,EMAIL
0030,Patryk,452790,11111111111,patdrz1@st.amu.edu.pl
4939,Adam,452791,22222222222,adasko8@st.amu.edu.pl
3232,Mariia,452792,33333333333,markuz5@st.amu.edu.pl
1 NAZWISKO IMIE INDEKS PESEL EMAIL
2 0030 Patryk 452790 11111111111 patdrz1@st.amu.edu.pl
3 4939 Adam 452791 22222222222 adasko8@st.amu.edu.pl
4 3232 Mariia 452792 33333333333 markuz5@st.amu.edu.pl