Skip to content

Commit 2a274af

Browse files
committed
Partway through new data import
1 parent 2cb368e commit 2a274af

5 files changed

Lines changed: 400 additions & 159 deletions

File tree

.idea/physics-workload.iml

Lines changed: 2 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

physics_workload/app/models/assignment.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,9 @@ class Assignment(ModelCommon):
1515
Pairs a Staff member up with the task they're performing.
1616
"""
1717

18+
def get_epoch_count(self) -> int:
19+
return self.epoch_set.count()
20+
1821
icon = "clipboard"
1922
url_root = "assignment"
2023

scripts/import_nonunit_tasks_from_csv.py

Lines changed: 72 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -9,60 +9,88 @@
99
"""
1010
from datetime import datetime
1111
from zoneinfo import ZoneInfo
12-
from logging import getLogger, Logger, INFO
12+
from logging import getLogger, Logger
1313
from os import getcwd
1414
from pathlib import Path
1515

16-
from pandas import DataFrame, read_csv, isnull
16+
from pandas import DataFrame, isnull
1717

1818
from django.conf import settings
1919

2020
from app.models import Task, AcademicGroup
21+
from load_tasks import load_nonunit_tasks_from_load_master_csv
2122

2223

2324
# Set up logging
2425
logger: Logger = getLogger(__name__)
2526

2627
# Hardcoded for ease of dealing with the manage.py shell.
27-
CSV_PATH: Path = Path(getcwd()) / "spreadsheet_tasks_nonunit.csv"
28-
logger.info(f"Importing tasks from: {CSV_PATH}")
29-
30-
# Track the history of creation
31-
settings.SIMPLE_HISTORY_ENABLED = True
32-
33-
# Read the CSV
34-
load_df: DataFrame = read_csv(CSV_PATH, header=0, index_col=False)
35-
36-
tasks_created: int = 0
37-
history_date: datetime = datetime(
38-
year=2024, month=9, day=20, hour=0, minute=0, second=0,
39-
tzinfo=ZoneInfo("GMT"),
40-
)
41-
42-
for idx, row in load_df.iterrows():
43-
# Iterate through the dataframe, and for each row create a new task and save the details.
44-
try:
45-
task, created = Task.objects.get_or_create(
46-
pk=100+idx,
47-
academic_group=AcademicGroup.objects.get(code=row.academic_group) if not isnull(row.academic_group) else None,
48-
title=row.task_name,
49-
description=row.description,
50-
notes=row.notes,
51-
load_fixed=row.load_fixed if row.load_fixed != 912 else 0,
52-
is_full_time=True if row.load_fixed == 912 else False,
53-
)
54-
task._history_date = history_date
55-
task.save()
56-
tasks_created += created
57-
58-
except Exception as e:
59-
logger.warning(
60-
f"Row {idx}: Failed to import: {e} - {row}"
61-
)
62-
63-
# Stop tracking history changes.
64-
settings.SIMPLE_HISTORY_ENABLED = False
65-
66-
logger.info(
67-
f"Import complete, created {tasks_created} tasks."
68-
)
28+
CSV_FILES: dict[int, Path] = {
29+
2024: Path(getcwd() + "spreadsheet_tasks_nonunit_2024.csv"),
30+
2025: Path(getcwd() + "spreadsheet_tasks_nonunit_2025.csv"),
31+
}
32+
33+
def import_nonunit_tasks(path: Path, year: int, initial_pk: int) -> int:
34+
"""
35+
Imports tasks not associated with a unit from a cut-down version of the load_master, and adds them to the DB.
36+
37+
:param path: The path to the spreadsheet file
38+
:param year: The year the file is for (2024 for 2024/2025).
39+
:param initial_pk: The first PK to assign.
40+
:return: The PK of the last task created.
41+
"""
42+
logger.info(f"Importing tasks for: {year}, path: {path}")
43+
44+
# Track the history of creation
45+
settings.SIMPLE_HISTORY_ENABLED = True
46+
47+
# Read the CSV
48+
load_df: DataFrame = load_nonunit_tasks_from_load_master_csv(path)
49+
50+
tasks_created: int = 0
51+
history_date: datetime = datetime(
52+
year=year, month=9, day=20, hour=0, minute=0, second=0,
53+
tzinfo=ZoneInfo("GMT"),
54+
)
55+
56+
for idx, row in load_df.iterrows():
57+
# Iterate through the dataframe, and for each row create a new task and save the details.
58+
try:
59+
task, created = Task.objects.get_or_create(
60+
pk=initial_pk+idx,
61+
academic_group=AcademicGroup.objects.get(code=row.academic_group) if not isnull(row.academic_group) else None,
62+
title=row.task_name,
63+
description=row.description,
64+
notes=row.notes,
65+
load_fixed=row.load_fixed if row.load_fixed != -1 else None,
66+
load_fixed_first=row.load_fixed_first,
67+
is_full_time=(row.load_fixed == -1),
68+
)
69+
task._history_date = history_date
70+
task.save()
71+
tasks_created += created
72+
73+
except Exception as e:
74+
logger.warning(
75+
f"Row {idx}: Failed to import: {e} - {row}"
76+
)
77+
78+
logger.info(f"Imported tasks for: {year}, created: {tasks_created}")
79+
80+
# Stop tracking history changes.
81+
settings.SIMPLE_HISTORY_ENABLED = False
82+
83+
return len(load_df) + initial_pk
84+
85+
86+
def main():
87+
"""
88+
Runs the import for the hard-coded files.
89+
"""
90+
logger.info(
91+
f"Importing non-unit tasks: {CSV_FILES}"
92+
)
93+
initial_pk: int = 0
94+
95+
for year, path in CSV_FILES.items():
96+
initial_pk = import_nonunit_tasks(path, year, initial_pk)

0 commit comments

Comments
 (0)