Merge branch 'development' of https://git.wmi.amu.edu.pl/s459309/system-pri into development
This commit is contained in:
commit
e7bc1f3dae
@ -10,10 +10,11 @@ from ..schemas.groups import (
|
|||||||
GroupCreateSchema,
|
GroupCreateSchema,
|
||||||
GroupEditSchema,
|
GroupEditSchema,
|
||||||
GroupQuerySchema,
|
GroupQuerySchema,
|
||||||
|
GroupSetGradeSchema,
|
||||||
GroupsPaginationSchema,
|
GroupsPaginationSchema,
|
||||||
)
|
)
|
||||||
from ..schemas.students import DetailGroupSchema
|
from ..schemas.students import DetailGroupSchema
|
||||||
from ..utils import attach_points_for_first_and_second_term_to_group_models
|
from ..utils import attach_grade_to_group_models
|
||||||
|
|
||||||
bp = APIBlueprint("groups", __name__, url_prefix="/groups")
|
bp = APIBlueprint("groups", __name__, url_prefix="/groups")
|
||||||
|
|
||||||
@ -28,10 +29,8 @@ def list_groups(year_group_id: int, query: dict) -> dict:
|
|||||||
|
|
||||||
groups_query = Group.search_by_name(year_group_id, search_name)
|
groups_query = Group.search_by_name(year_group_id, search_name)
|
||||||
data = paginate_models(page, groups_query, per_page)
|
data = paginate_models(page, groups_query, per_page)
|
||||||
|
|
||||||
items = data["items"]
|
items = data["items"]
|
||||||
attach_points_for_first_and_second_term_to_group_models(items)
|
attach_grade_to_group_models(items)
|
||||||
|
|
||||||
return {"groups": items, "max_pages": data["max_pages"]}
|
return {"groups": items, "max_pages": data["max_pages"]}
|
||||||
|
|
||||||
|
|
||||||
@ -93,6 +92,7 @@ def detail_group(group_id: int) -> Group:
|
|||||||
group = Group.query.filter_by(id=group_id).first()
|
group = Group.query.filter_by(id=group_id).first()
|
||||||
if group is None:
|
if group is None:
|
||||||
abort(404, "Not found group!")
|
abort(404, "Not found group!")
|
||||||
|
attach_grade_to_group_models([group])
|
||||||
return group
|
return group
|
||||||
|
|
||||||
|
|
||||||
@ -145,3 +145,22 @@ def edit_group(group_id: int, data: dict) -> dict:
|
|||||||
|
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
return {"message": "Group was updated!"}
|
return {"message": "Group was updated!"}
|
||||||
|
|
||||||
|
|
||||||
|
@bp.put("/<int:group_id>/set-grades/")
|
||||||
|
@bp.input(GroupSetGradeSchema)
|
||||||
|
@bp.output(MessageSchema)
|
||||||
|
def set_grade_for_group(group_id: int, data: dict) -> dict:
|
||||||
|
if not data:
|
||||||
|
abort(400, "You have passed empty data!")
|
||||||
|
|
||||||
|
group_query = Group.query.filter_by(id=group_id)
|
||||||
|
group = group_query.first()
|
||||||
|
|
||||||
|
if group is None:
|
||||||
|
abort(404, "Not found group!")
|
||||||
|
|
||||||
|
group_query.update(data)
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
|
return {"message": "Grade was updated!"}
|
||||||
|
@ -28,3 +28,8 @@ class GroupEditSchema(Schema):
|
|||||||
|
|
||||||
class GroupIdSchema(Schema):
|
class GroupIdSchema(Schema):
|
||||||
group_id = fields.Integer(required=True)
|
group_id = fields.Integer(required=True)
|
||||||
|
|
||||||
|
|
||||||
|
class GroupSetGradeSchema(Schema):
|
||||||
|
grade_for_first_term = fields.Float()
|
||||||
|
grade_for_second_term = fields.Float()
|
||||||
|
@ -56,6 +56,16 @@ def parse_csv(
|
|||||||
return students
|
return students
|
||||||
|
|
||||||
|
|
||||||
|
def map_project_supervisors(groups: List[Group]) -> dict:
|
||||||
|
i = 1
|
||||||
|
mapped_project_supervisors = {}
|
||||||
|
for group in groups:
|
||||||
|
if group.project_supervisor_id not in mapped_project_supervisors.keys():
|
||||||
|
mapped_project_supervisors[group.project_supervisor_id] = i
|
||||||
|
i += 1
|
||||||
|
return mapped_project_supervisors
|
||||||
|
|
||||||
|
|
||||||
def generate_csv(students_and_groups: List[Tuple[Student, Group]]) -> str:
|
def generate_csv(students_and_groups: List[Tuple[Student, Group]]) -> str:
|
||||||
headers = [
|
headers = [
|
||||||
"INDEKS",
|
"INDEKS",
|
||||||
@ -68,6 +78,9 @@ def generate_csv(students_and_groups: List[Tuple[Student, Group]]) -> str:
|
|||||||
"GR_NR",
|
"GR_NR",
|
||||||
"PRG_KOD",
|
"PRG_KOD",
|
||||||
]
|
]
|
||||||
|
mapped_project_supervisors_id = map_project_supervisors(
|
||||||
|
[group for _, group in students_and_groups]
|
||||||
|
)
|
||||||
data = [
|
data = [
|
||||||
(
|
(
|
||||||
student.index,
|
student.index,
|
||||||
@ -77,7 +90,7 @@ def generate_csv(students_and_groups: List[Tuple[Student, Group]]) -> str:
|
|||||||
group.cdyd_kod,
|
group.cdyd_kod,
|
||||||
group.prz_kod,
|
group.prz_kod,
|
||||||
group.tzaj_kod,
|
group.tzaj_kod,
|
||||||
group.project_supervisor_id,
|
mapped_project_supervisors_id[group.project_supervisor_id],
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
for student, group in students_and_groups
|
for student, group in students_and_groups
|
||||||
@ -152,18 +165,17 @@ def generate_examination_schedule_pdf_file(
|
|||||||
ps = td.group.project_supervisor
|
ps = td.group.project_supervisor
|
||||||
project_supervisor_fullname = f"{ps.first_name[0]}. {ps.last_name}"
|
project_supervisor_fullname = f"{ps.first_name[0]}. {ps.last_name}"
|
||||||
students = td.group.students
|
students = td.group.students
|
||||||
# print(students)
|
|
||||||
team = ", ".join([f"{s.first_name} {s.last_name}" for s in students])
|
team = ", ".join([f"{s.first_name} {s.last_name}" for s in students])
|
||||||
else:
|
else:
|
||||||
project_supervisor_fullname = ""
|
project_supervisor_fullname = ""
|
||||||
team = ""
|
team = ""
|
||||||
|
|
||||||
members = td.members_of_committee
|
members = td.members_of_committee
|
||||||
# print(members)
|
|
||||||
if len(members) == 0:
|
if len(members) == 0:
|
||||||
committee = ""
|
committee = ""
|
||||||
else:
|
else:
|
||||||
members_iter = (f"{m.first_name[0]} {m.last_name}" for m in members)
|
members_iter = (f"{m.first_name[0]}. {m.last_name}" for m in members)
|
||||||
committee = ", ".join(members_iter)
|
committee = ", ".join(members_iter)
|
||||||
|
|
||||||
data.append(
|
data.append(
|
||||||
@ -226,58 +238,97 @@ def load_weight_for_project_grade_sheet() -> Union[dict, None]:
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def calculate_points_for_one_term(
|
def get_criterion_by_weight_key(weight_key: str) -> str:
|
||||||
weights: dict, project_grade_sheets: List[ProjectGradeSheet]
|
if weight_key.startswith("presentation"):
|
||||||
) -> list:
|
return "presentation"
|
||||||
terms = []
|
if weight_key.startswith("documentation"):
|
||||||
for pgs in project_grade_sheets:
|
return "documentation"
|
||||||
if pgs is None:
|
if weight_key.startswith("group_work"):
|
||||||
terms.append((0, 0))
|
return "group_work"
|
||||||
continue
|
return "product_project"
|
||||||
|
|
||||||
first_term_points = {
|
|
||||||
"nominator": 0,
|
def grade_in_percentage(term_key: str, term_points: dict) -> str:
|
||||||
"denominator": 0,
|
|
||||||
}
|
|
||||||
second_term_points = {
|
|
||||||
"nominator": 0,
|
|
||||||
"denominator": 0,
|
|
||||||
}
|
|
||||||
for weight_key, weight_value in weights.items():
|
|
||||||
points = (
|
|
||||||
first_term_points if weight_key.endswith("1") else second_term_points
|
|
||||||
)
|
|
||||||
try:
|
try:
|
||||||
attribute_value = getattr(pgs, weight_key)
|
criterions = {
|
||||||
|
"presentation": current_app.config.get(f"PRESENTATION_WEIGHT_{term_key}"),
|
||||||
|
"group_work": current_app.config.get(f"GROUP_WORK_WEIGHT_{term_key}"),
|
||||||
|
"documentation": current_app.config.get(f"DOCUMENTATION_WEIGHT_{term_key}"),
|
||||||
|
"product_project": current_app.config.get(
|
||||||
|
f"PRODUCT_PROJECT_WEIGHT_{term_key}"
|
||||||
|
),
|
||||||
|
}
|
||||||
|
result = 0
|
||||||
|
for criterion_key, criterion_weight in criterions.items():
|
||||||
|
result += (
|
||||||
|
term_points[criterion_key]["gained_points"]
|
||||||
|
/ term_points[criterion_key]["all_points"]
|
||||||
|
* criterion_weight
|
||||||
|
)
|
||||||
|
result /= sum(criterions.values())
|
||||||
|
except ZeroDivisionError:
|
||||||
|
result = 0
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_points_for_both_terms(
|
||||||
|
weights: dict, project_grade_sheet: ProjectGradeSheet
|
||||||
|
) -> Tuple[float, float]:
|
||||||
|
if project_grade_sheet is None:
|
||||||
|
return 0.0, 0.0
|
||||||
|
first_term_points = {
|
||||||
|
"presentation": {"gained_points": 0, "all_points": 0},
|
||||||
|
"documentation": {"gained_points": 0, "all_points": 0},
|
||||||
|
"group_work": {"gained_points": 0, "all_points": 0},
|
||||||
|
"product_project": {"gained_points": 0, "all_points": 0},
|
||||||
|
}
|
||||||
|
|
||||||
|
second_term_points = copy.deepcopy(first_term_points)
|
||||||
|
|
||||||
|
for weight_key, weight_value in weights.items():
|
||||||
|
points = first_term_points if weight_key.endswith("1") else second_term_points
|
||||||
|
criterion = get_criterion_by_weight_key(weight_key)
|
||||||
|
try:
|
||||||
|
attribute_value = getattr(project_grade_sheet, weight_key)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
attribute_value = 0
|
attribute_value = 0
|
||||||
points["nominator"] += attribute_value * weight_value * 1 / 4
|
points[criterion]["gained_points"] += attribute_value / 4 * weight_value
|
||||||
points["denominator"] += weight_value
|
points[criterion]["all_points"] += weight_value
|
||||||
|
|
||||||
try:
|
points_1 = round(grade_in_percentage("FIRST_TERM", first_term_points) * 100, 1)
|
||||||
fp = first_term_points["nominator"] / first_term_points["denominator"]
|
points_2 = round(grade_in_percentage("SECOND_TERM", second_term_points) * 100, 1)
|
||||||
except ZeroDivisionError:
|
return points_1, points_2
|
||||||
fp = 0
|
|
||||||
try:
|
|
||||||
sp = second_term_points["nominator"] / second_term_points["denominator"]
|
|
||||||
except ZeroDivisionError:
|
|
||||||
sp = 0
|
|
||||||
|
|
||||||
terms.append((round(fp, 2) * 100, round(sp, 2) * 100))
|
|
||||||
|
|
||||||
return terms
|
|
||||||
|
|
||||||
|
|
||||||
def attach_points_for_first_and_second_term_to_group_models(items: List[Group]) -> None:
|
def attach_points_for_first_and_second_term_to_group(group: Group) -> None:
|
||||||
weights = load_weight_for_project_grade_sheet()
|
weights = load_weight_for_project_grade_sheet()
|
||||||
pgs = []
|
pgs = group.project_grade_sheet
|
||||||
for g in items:
|
if len(pgs) == 0:
|
||||||
if len(g.project_grade_sheet) == 0:
|
pgs = None
|
||||||
pgs.append(None)
|
|
||||||
else:
|
else:
|
||||||
pgs.append(g.project_grade_sheet[0])
|
pgs = pgs[0]
|
||||||
calculated_points = calculate_points_for_one_term(weights, pgs)
|
points = calculate_points_for_both_terms(weights, pgs)
|
||||||
|
|
||||||
for group, points in zip(items, calculated_points):
|
|
||||||
group.points_for_first_term = points[0]
|
group.points_for_first_term = points[0]
|
||||||
group.points_for_second_term = points[1]
|
group.points_for_second_term = points[1]
|
||||||
|
|
||||||
|
|
||||||
|
def get_term_grade(point: float) -> float:
|
||||||
|
if point >= 91.0:
|
||||||
|
return 5
|
||||||
|
if point >= 81.0:
|
||||||
|
return 4.5
|
||||||
|
if point >= 71.0:
|
||||||
|
return 4
|
||||||
|
if point >= 61.0:
|
||||||
|
return 3.5
|
||||||
|
if point >= 51.0:
|
||||||
|
return 3
|
||||||
|
return 2
|
||||||
|
|
||||||
|
|
||||||
|
def attach_grade_to_group_models(groups: List[Group]) -> None:
|
||||||
|
for group in groups:
|
||||||
|
if group.grade_for_first_term == 0:
|
||||||
|
group.grade_for_first_term = get_term_grade(group.points_for_first_term)
|
||||||
|
if group.grade_for_second_term == 0:
|
||||||
|
group.grade_for_second_term = get_term_grade(group.points_for_second_term)
|
||||||
|
@ -1,11 +1,14 @@
|
|||||||
from flask import abort
|
from flask import abort
|
||||||
|
|
||||||
|
from ...coordinator.utils import attach_points_for_first_and_second_term_to_group
|
||||||
from ...dependencies import db
|
from ...dependencies import db
|
||||||
from ...students.models import Group, ProjectGradeSheet
|
from ...students.models import Group, ProjectGradeSheet
|
||||||
from ..models import ProjectSupervisor
|
from ..models import ProjectSupervisor
|
||||||
|
|
||||||
|
|
||||||
def update_project_grade_sheet(group_id: int, query: dict, data: dict) -> None:
|
def update_project_grade_sheet(
|
||||||
|
group_id: int, query: dict, data: dict
|
||||||
|
) -> ProjectGradeSheet:
|
||||||
project_supervisor_id = query.get("id")
|
project_supervisor_id = query.get("id")
|
||||||
project_supervisor = ProjectSupervisor.query.filter(
|
project_supervisor = ProjectSupervisor.query.filter(
|
||||||
ProjectSupervisor.id == project_supervisor_id
|
ProjectSupervisor.id == project_supervisor_id
|
||||||
@ -28,4 +31,5 @@ def update_project_grade_sheet(group_id: int, query: dict, data: dict) -> None:
|
|||||||
abort(404, "Not found project grade sheet!")
|
abort(404, "Not found project grade sheet!")
|
||||||
|
|
||||||
pgs_query.update(data)
|
pgs_query.update(data)
|
||||||
|
attach_points_for_first_and_second_term_to_group(group)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
@ -38,8 +38,10 @@ class Group(Base):
|
|||||||
project_supervisor = db.relationship("ProjectSupervisor", backref="groups")
|
project_supervisor = db.relationship("ProjectSupervisor", backref="groups")
|
||||||
year_group_id = db.Column(db.Integer, db.ForeignKey("year_groups.id"))
|
year_group_id = db.Column(db.Integer, db.ForeignKey("year_groups.id"))
|
||||||
year_group = db.relationship("YearGroup", backref="groups", lazy="joined")
|
year_group = db.relationship("YearGroup", backref="groups", lazy="joined")
|
||||||
points_for_first_term = db.Column(db.Integer, default=0, nullable=False)
|
points_for_first_term = db.Column(db.Float, default=0, nullable=False)
|
||||||
points_for_second_term = db.Column(db.Integer, default=0, nullable=False)
|
points_for_second_term = db.Column(db.Float, default=0, nullable=False)
|
||||||
|
grade_for_first_term = db.Column(db.Float, default=0, nullable=False)
|
||||||
|
grade_for_second_term = db.Column(db.Float, default=0, nullable=False)
|
||||||
students = db.relationship(
|
students = db.relationship(
|
||||||
"Student", secondary=students_groups, back_populates="groups"
|
"Student", secondary=students_groups, back_populates="groups"
|
||||||
)
|
)
|
||||||
|
@ -1,15 +1,15 @@
|
|||||||
"""empty message
|
"""empty message
|
||||||
|
|
||||||
Revision ID: 559c8f18a125
|
Revision ID: 5f2f440d05e2
|
||||||
Revises:
|
Revises:
|
||||||
Create Date: 2023-01-14 15:25:59.137169
|
Create Date: 2023-01-15 23:52:36.927007
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
from alembic import op
|
from alembic import op
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision = "559c8f18a125"
|
revision = "5f2f440d05e2"
|
||||||
down_revision = None
|
down_revision = None
|
||||||
branch_labels = None
|
branch_labels = None
|
||||||
depends_on = None
|
depends_on = None
|
||||||
@ -104,8 +104,10 @@ def upgrade():
|
|||||||
sa.Column("tzaj_kod", sa.String(length=60), nullable=True),
|
sa.Column("tzaj_kod", sa.String(length=60), nullable=True),
|
||||||
sa.Column("project_supervisor_id", sa.Integer(), nullable=True),
|
sa.Column("project_supervisor_id", sa.Integer(), nullable=True),
|
||||||
sa.Column("year_group_id", sa.Integer(), nullable=True),
|
sa.Column("year_group_id", sa.Integer(), nullable=True),
|
||||||
sa.Column("points_for_first_term", sa.Integer(), nullable=False),
|
sa.Column("points_for_first_term", sa.Float(), nullable=False),
|
||||||
sa.Column("points_for_second_term", sa.Integer(), nullable=False),
|
sa.Column("points_for_second_term", sa.Float(), nullable=False),
|
||||||
|
sa.Column("grade_for_first_term", sa.Float(), nullable=False),
|
||||||
|
sa.Column("grade_for_second_term", sa.Float(), nullable=False),
|
||||||
sa.ForeignKeyConstraint(
|
sa.ForeignKeyConstraint(
|
||||||
["project_supervisor_id"],
|
["project_supervisor_id"],
|
||||||
["project_supervisors.id"],
|
["project_supervisors.id"],
|
@ -4,12 +4,15 @@ import pandas as pd
|
|||||||
import pytest
|
import pytest
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
|
|
||||||
|
from app.base.mode import ModeGroups
|
||||||
from app.base.utils import is_allowed_extensions, order_by_column_name, paginate_models
|
from app.base.utils import is_allowed_extensions, order_by_column_name, paginate_models
|
||||||
from app.coordinator.exceptions import InvalidNameOrTypeHeaderException
|
from app.coordinator.exceptions import InvalidNameOrTypeHeaderException
|
||||||
from app.coordinator.utils import (
|
from app.coordinator.utils import (
|
||||||
check_columns,
|
check_columns,
|
||||||
generate_csv,
|
generate_csv,
|
||||||
generate_range_dates,
|
generate_range_dates,
|
||||||
|
get_duration_time,
|
||||||
|
map_project_supervisors,
|
||||||
parse_csv,
|
parse_csv,
|
||||||
)
|
)
|
||||||
from app.dependencies import db
|
from app.dependencies import db
|
||||||
@ -46,14 +49,12 @@ def test_paginate_models(test_app_ctx_with_db) -> None:
|
|||||||
index=123456,
|
index=123456,
|
||||||
first_name="Dominic",
|
first_name="Dominic",
|
||||||
last_name="Smith",
|
last_name="Smith",
|
||||||
pesel="99010109876",
|
|
||||||
email="xxx@gmail.com",
|
email="xxx@gmail.com",
|
||||||
)
|
)
|
||||||
st1 = Student(
|
st1 = Student(
|
||||||
index=123457,
|
index=123457,
|
||||||
first_name="John",
|
first_name="John",
|
||||||
last_name="Newton",
|
last_name="Newton",
|
||||||
pesel="99010109871",
|
|
||||||
email="zzz@gmail.com",
|
email="zzz@gmail.com",
|
||||||
)
|
)
|
||||||
db.session.add_all([st, st1])
|
db.session.add_all([st, st1])
|
||||||
@ -72,7 +73,6 @@ def test_check_columns() -> None:
|
|||||||
"NAZWISKO": ["Smith"],
|
"NAZWISKO": ["Smith"],
|
||||||
"IMIE": ["Dominic"],
|
"IMIE": ["Dominic"],
|
||||||
"INDEKS": [343433],
|
"INDEKS": [343433],
|
||||||
"PESEL": [90020178654],
|
|
||||||
"EMAIL": ["domsmi@gmail.com"],
|
"EMAIL": ["domsmi@gmail.com"],
|
||||||
}
|
}
|
||||||
df = pd.DataFrame(data=dummy_data)
|
df = pd.DataFrame(data=dummy_data)
|
||||||
@ -90,7 +90,6 @@ def test_check_columns_with_invalid_column_types() -> None:
|
|||||||
"NAZWISKO": [999],
|
"NAZWISKO": [999],
|
||||||
"IMIE": ["Dominic"],
|
"IMIE": ["Dominic"],
|
||||||
"INDEKS": [343433],
|
"INDEKS": [343433],
|
||||||
"PESEL": [90020178654],
|
|
||||||
"EMAIL": ["domsmi@gmail.com"],
|
"EMAIL": ["domsmi@gmail.com"],
|
||||||
}
|
}
|
||||||
df = pd.DataFrame(data=dummy_data)
|
df = pd.DataFrame(data=dummy_data)
|
||||||
@ -99,13 +98,13 @@ def test_check_columns_with_invalid_column_types() -> None:
|
|||||||
|
|
||||||
def get_path_to_fake_data(filename: str) -> str:
|
def get_path_to_fake_data(filename: str) -> str:
|
||||||
base_dir = current_app.config.get("BASE_DIR", "/")
|
base_dir = current_app.config.get("BASE_DIR", "/")
|
||||||
return base_dir / "tmp_data" / filename
|
return base_dir / "tests" / "data" / filename
|
||||||
|
|
||||||
|
|
||||||
def test_parse_csv(test_app) -> None:
|
def test_parse_csv(test_app) -> None:
|
||||||
with test_app.app_context():
|
with test_app.app_context():
|
||||||
with open(get_path_to_fake_data("students.csv")) as f:
|
with open(get_path_to_fake_data("students.csv")) as f:
|
||||||
students = sorted(list(parse_csv(f)), key=lambda s: s.index)
|
students = sorted(list(parse_csv(f, 1)), key=lambda s: s.index)
|
||||||
indexes = [452790 + i for i in range(3)]
|
indexes = [452790 + i for i in range(3)]
|
||||||
assert len(students) == len(indexes)
|
assert len(students) == len(indexes)
|
||||||
for st, idx in zip(students, indexes):
|
for st, idx in zip(students, indexes):
|
||||||
@ -116,14 +115,14 @@ def test_parse_csv_with_invalid_column_header_name_in_csv_file(test_app) -> None
|
|||||||
with test_app.app_context():
|
with test_app.app_context():
|
||||||
with open(get_path_to_fake_data("students_column_name.csv")) as f:
|
with open(get_path_to_fake_data("students_column_name.csv")) as f:
|
||||||
with pytest.raises(InvalidNameOrTypeHeaderException):
|
with pytest.raises(InvalidNameOrTypeHeaderException):
|
||||||
parse_csv(f)
|
parse_csv(f, 1)
|
||||||
|
|
||||||
|
|
||||||
def test_parse_csv_with_invalid_column_type_in_csv_file(test_app) -> None:
|
def test_parse_csv_with_invalid_column_type_in_csv_file(test_app) -> None:
|
||||||
with test_app.app_context():
|
with test_app.app_context():
|
||||||
with open(get_path_to_fake_data("students_column_type.csv")) as f:
|
with open(get_path_to_fake_data("students_column_type.csv")) as f:
|
||||||
with pytest.raises(InvalidNameOrTypeHeaderException):
|
with pytest.raises(InvalidNameOrTypeHeaderException):
|
||||||
parse_csv(f)
|
parse_csv(f, 1)
|
||||||
|
|
||||||
|
|
||||||
def test_generate_range_dates() -> None:
|
def test_generate_range_dates() -> None:
|
||||||
@ -139,43 +138,34 @@ def test_generate_range_dates() -> None:
|
|||||||
assert start_date <= date < end_date
|
assert start_date <= date < end_date
|
||||||
|
|
||||||
|
|
||||||
def test_generate_csv(test_app_ctx_with_db) -> None:
|
def test_generate_csv() -> None:
|
||||||
students_data = [
|
students_data = [
|
||||||
{
|
{
|
||||||
"first_name": "Dominic",
|
"first_name": "Dominic",
|
||||||
"last_name": "Smith",
|
"last_name": "Smith",
|
||||||
"email": "xxe@gmail.com",
|
"email": "xxe@gmail.com",
|
||||||
"index": 123456,
|
"index": 123456,
|
||||||
"pesel": "98070234293",
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"first_name": "Matthew",
|
"first_name": "Matthew",
|
||||||
"last_name": "Cash",
|
"last_name": "Cash",
|
||||||
"email": "zze@gmail.com",
|
"email": "zze@gmail.com",
|
||||||
"index": 123455,
|
"index": 123455,
|
||||||
"pesel": "98070234291",
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"first_name": "Martin",
|
"first_name": "Martin",
|
||||||
"last_name": "Rose",
|
"last_name": "Rose",
|
||||||
"email": "nne@gmail.com",
|
"email": "nne@gmail.com",
|
||||||
"index": 123446,
|
"index": 123446,
|
||||||
"pesel": "98070234223",
|
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
with test_app_ctx_with_db:
|
|
||||||
students = [Student(**data) for data in students_data]
|
students = [Student(**data) for data in students_data]
|
||||||
db.session.add_all(students)
|
|
||||||
db.session.commit()
|
|
||||||
|
|
||||||
gr1 = Group(name="new-project")
|
gr1 = Group(name="new-project")
|
||||||
gr2 = Group(name="system-pri")
|
gr2 = Group(name="system-pri")
|
||||||
gr1.students.append(students[0])
|
gr1.students.append(students[0])
|
||||||
gr1.students.append(students[1])
|
gr1.students.append(students[1])
|
||||||
gr2.students.append(students[2])
|
gr2.students.append(students[2])
|
||||||
db.session.add_all([gr1, gr2])
|
|
||||||
db.session.commit()
|
|
||||||
|
|
||||||
students_and_groups = [
|
students_and_groups = [
|
||||||
(students[0], gr1),
|
(students[0], gr1),
|
||||||
@ -186,3 +176,24 @@ def test_generate_csv(test_app_ctx_with_db) -> None:
|
|||||||
for data in students_data:
|
for data in students_data:
|
||||||
for value in data.values():
|
for value in data.values():
|
||||||
assert str(value) in generated_csv
|
assert str(value) in generated_csv
|
||||||
|
|
||||||
|
|
||||||
|
def test_map_project_supervisors() -> None:
|
||||||
|
project_supervisors_id = [(1, 2), (2, 3), (3, 7)]
|
||||||
|
groups = []
|
||||||
|
for i in range(3):
|
||||||
|
for _, ps_id in project_supervisors_id:
|
||||||
|
groups.append(Group(project_supervisor_id=ps_id))
|
||||||
|
|
||||||
|
mapped_ps = map_project_supervisors(
|
||||||
|
sorted(groups, key=lambda g: g.project_supervisor_id)
|
||||||
|
)
|
||||||
|
for expected_id, ps_id in project_supervisors_id:
|
||||||
|
assert mapped_ps[ps_id] == expected_id
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_duration_time() -> None:
|
||||||
|
assert get_duration_time(ModeGroups.STATIONARY.value) == 30
|
||||||
|
assert get_duration_time(ModeGroups.NON_STATIONARY.value) == 20
|
||||||
|
assert get_duration_time(ModeGroups.ENGLISH_SPEAKING_STATIONARY.value) == 30
|
||||||
|
assert get_duration_time("invalid value") is None
|
||||||
|
Loading…
Reference in New Issue
Block a user