|
9 | 9 | """ |
10 | 10 | from datetime import datetime |
11 | 11 | from zoneinfo import ZoneInfo |
12 | | -from logging import getLogger, Logger, INFO |
| 12 | +from logging import getLogger, Logger |
13 | 13 | from os import getcwd |
14 | 14 | from pathlib import Path |
15 | 15 |
|
16 | | -from pandas import DataFrame, read_csv, isnull |
| 16 | +from pandas import DataFrame, isnull |
17 | 17 |
|
18 | 18 | from django.conf import settings |
19 | 19 |
|
20 | 20 | from app.models import Task, AcademicGroup |
| 21 | +from load_tasks import load_nonunit_tasks_from_load_master_csv |
21 | 22 |
|
22 | 23 |
|
23 | 24 | # Set up logging |
24 | 25 | logger: Logger = getLogger(__name__) |
25 | 26 |
|
26 | 27 | # Hardcoded for ease of dealing with the manage.py shell. |
27 | | -CSV_PATH: Path = Path(getcwd()) / "spreadsheet_tasks_nonunit.csv" |
28 | | -logger.info(f"Importing tasks from: {CSV_PATH}") |
29 | | - |
30 | | -# Track the history of creation |
31 | | -settings.SIMPLE_HISTORY_ENABLED = True |
32 | | - |
33 | | -# Read the CSV |
34 | | -load_df: DataFrame = read_csv(CSV_PATH, header=0, index_col=False) |
35 | | - |
36 | | -tasks_created: int = 0 |
37 | | -history_date: datetime = datetime( |
38 | | - year=2024, month=9, day=20, hour=0, minute=0, second=0, |
39 | | - tzinfo=ZoneInfo("GMT"), |
40 | | -) |
41 | | - |
42 | | -for idx, row in load_df.iterrows(): |
43 | | - # Iterate through the dataframe, and for each row create a new task and save the details. |
44 | | - try: |
45 | | - task, created = Task.objects.get_or_create( |
46 | | - pk=100+idx, |
47 | | - academic_group=AcademicGroup.objects.get(code=row.academic_group) if not isnull(row.academic_group) else None, |
48 | | - title=row.task_name, |
49 | | - description=row.description, |
50 | | - notes=row.notes, |
51 | | - load_fixed=row.load_fixed if row.load_fixed != 912 else 0, |
52 | | - is_full_time=True if row.load_fixed == 912 else False, |
53 | | - ) |
54 | | - task._history_date = history_date |
55 | | - task.save() |
56 | | - tasks_created += created |
57 | | - |
58 | | - except Exception as e: |
59 | | - logger.warning( |
60 | | - f"Row {idx}: Failed to import: {e} - {row}" |
61 | | - ) |
62 | | - |
63 | | -# Stop tracking history changes. |
64 | | -settings.SIMPLE_HISTORY_ENABLED = False |
65 | | - |
66 | | -logger.info( |
67 | | - f"Import complete, created {tasks_created} tasks." |
68 | | -) |
| 28 | +CSV_FILES: dict[int, Path] = { |
| 29 | + 2024: Path(getcwd() + "spreadsheet_tasks_nonunit_2024.csv"), |
| 30 | + 2025: Path(getcwd() + "spreadsheet_tasks_nonunit_2025.csv"), |
| 31 | +} |
| 32 | + |
| 33 | +def import_nonunit_tasks(path: Path, year: int, initial_pk: int) -> int: |
| 34 | + """ |
| 35 | + Imports tasks not associated with a unit from a cut-down version of the load_master, and adds them to the DB. |
| 36 | +
|
| 37 | + :param path: The path to the spreadsheet file |
| 38 | + :param year: The year the file is for (2024 for 2024/2025). |
| 39 | + :param initial_pk: The first PK to assign. |
| 40 | + :return: The PK of the last task created. |
| 41 | + """ |
| 42 | + logger.info(f"Importing tasks for: {year}, path: {path}") |
| 43 | + |
| 44 | + # Track the history of creation |
| 45 | + settings.SIMPLE_HISTORY_ENABLED = True |
| 46 | + |
| 47 | + # Read the CSV |
| 48 | + load_df: DataFrame = load_nonunit_tasks_from_load_master_csv(path) |
| 49 | + |
| 50 | + tasks_created: int = 0 |
| 51 | + history_date: datetime = datetime( |
| 52 | + year=year, month=9, day=20, hour=0, minute=0, second=0, |
| 53 | + tzinfo=ZoneInfo("GMT"), |
| 54 | + ) |
| 55 | + |
| 56 | + for idx, row in load_df.iterrows(): |
| 57 | + # Iterate through the dataframe, and for each row create a new task and save the details. |
| 58 | + try: |
| 59 | + task, created = Task.objects.get_or_create( |
| 60 | + pk=initial_pk+idx, |
| 61 | + academic_group=AcademicGroup.objects.get(code=row.academic_group) if not isnull(row.academic_group) else None, |
| 62 | + title=row.task_name, |
| 63 | + description=row.description, |
| 64 | + notes=row.notes, |
| 65 | + load_fixed=row.load_fixed if row.load_fixed != -1 else None, |
| 66 | + load_fixed_first=row.load_fixed_first, |
| 67 | + is_full_time=(row.load_fixed == -1), |
| 68 | + ) |
| 69 | + task._history_date = history_date |
| 70 | + task.save() |
| 71 | + tasks_created += created |
| 72 | + |
| 73 | + except Exception as e: |
| 74 | + logger.warning( |
| 75 | + f"Row {idx}: Failed to import: {e} - {row}" |
| 76 | + ) |
| 77 | + |
| 78 | + logger.info(f"Imported tasks for: {year}, created: {tasks_created}") |
| 79 | + |
| 80 | + # Stop tracking history changes. |
| 81 | + settings.SIMPLE_HISTORY_ENABLED = False |
| 82 | + |
| 83 | + return len(load_df) + initial_pk |
| 84 | + |
| 85 | + |
| 86 | +def main(): |
| 87 | + """ |
| 88 | + Runs the import for the hard-coded files. |
| 89 | + """ |
| 90 | + logger.info( |
| 91 | + f"Importing non-unit tasks: {CSV_FILES}" |
| 92 | + ) |
| 93 | + initial_pk: int = 0 |
| 94 | + |
| 95 | + for year, path in CSV_FILES.items(): |
| 96 | + initial_pk = import_nonunit_tasks(path, year, initial_pk) |
0 commit comments