diff --git a/client/src/components/learningPath/DocumentUploadForm.vue b/client/src/components/learningPath/DocumentUploadForm.vue index e56da889..4b042c4e 100644 --- a/client/src/components/learningPath/DocumentUploadForm.vue +++ b/client/src/components/learningPath/DocumentUploadForm.vue @@ -12,16 +12,16 @@ interface Props { const { t } = useI18n(); const props = withDefaults(defineProps(), { - learningSequences: [], + learningSequences: () => [], showUploadErrorMessage: false, }); const emit = defineEmits<{ - (e: "formSubmit", data: object): void; + (e: "formSubmit", data: DocumentUploadData): void; }>(); const formData = reactive({ - file: null | File, + file: null, name: "", learningSequence: { id: -1, @@ -36,8 +36,12 @@ const formErrors = reactive({ }); function fileChange(e: Event) { - const keys = Object.keys(e.target.files); - formData.file = keys.length > 0 ? e.target.files[keys[0]] : null; + const target = e.target as HTMLInputElement; + if (target === null || target.files === null) { + return; + } + + formData.file = target.files.length > 0 ? target.files[0] : null; } function submitForm() { @@ -49,7 +53,7 @@ function submitForm() { } function validateForm() { - formErrors.file = formData.file === 0; + formErrors.file = formData.file === null; formErrors.learningSequence = formData.learningSequence.id === -1; formErrors.name = formData.name === ""; @@ -62,7 +66,7 @@ function validateForm() { } function resetFormErrors() { - for (const [_name, value] of Object.entries(formErrors)) { + for (let [_name, value] of Object.entries(formErrors)) { value = false; } } diff --git a/client/src/fetchHelpers.ts b/client/src/fetchHelpers.ts index 518c1be7..abfe7d6a 100644 --- a/client/src/fetchHelpers.ts +++ b/client/src/fetchHelpers.ts @@ -39,6 +39,10 @@ export const itPost = (url: RequestInfo, data: unknown, options: RequestInit = { options ); + if (options.method === undefined) { + options.method = "POST"; + } + // @ts-ignore options.headers["X-CSRFToken"] = getCookieValue("csrftoken"); diff --git a/client/src/pages/learningPath/CirclePage.vue b/client/src/pages/learningPath/CirclePage.vue index 4330f7b1..6a497e56 100644 --- a/client/src/pages/learningPath/CirclePage.vue +++ b/client/src/pages/learningPath/CirclePage.vue @@ -110,9 +110,12 @@ onMounted(async () => { async function uploadDocument(data: DocumentUploadData) { showUploadErrorMessage.value = false; try { + if (!courseSessionsStore.courseSessionForRoute) { + throw new Error("No course session found"); + } const newDocument = await uploadCircleDocument( data, - courseSessionsStore.courseSessionForRoute?.id + courseSessionsStore.courseSessionForRoute.id ); const courseSessionStore = useCourseSessionsStore(); courseSessionStore.addDocument(newDocument); @@ -215,14 +218,23 @@ async function uploadDocument(data: DocumentUploadData) {

{{ $t("circlePage.documents.title") }}

-
    +
    1. {{ learningSequence.title }}

        -
      • +
      • {{ document.name }} diff --git a/client/src/services/files.ts b/client/src/services/files.ts index 1de8b101..df7c9263 100644 --- a/client/src/services/files.ts +++ b/client/src/services/files.ts @@ -2,7 +2,15 @@ import { itDelete, itFetch, itPost } from "@/fetchHelpers"; import { getCookieValue } from "@/router/guards"; import type { CircleDocument, DocumentUploadData } from "@/types"; +type FileData = { + fields: Record; + url: string; +}; + async function startFileUpload(fileData: DocumentUploadData, courseSessionId: number) { + if (fileData === null || fileData.file === null) { + return null; + } return await itPost(`/api/core/document/start/`, { file_type: fileData.file.type, file_name: fileData.file.name, @@ -12,7 +20,7 @@ async function startFileUpload(fileData: DocumentUploadData, courseSessionId: nu }); } -function uploadFile(fileData, file: File) { +function uploadFile(fileData: FileData, file: File) { if (fileData.fields) { return s3Upload(fileData, file); } else { @@ -20,7 +28,7 @@ function uploadFile(fileData, file: File) { } } -function directUpload(fileData, file: File) { +function directUpload(fileData: FileData, file: File) { const formData = new FormData(); formData.append("file", file); @@ -39,7 +47,7 @@ function directUpload(fileData, file: File) { handleUpload(fileData.url, options); } -function s3Upload(fileData, file: File) { +function s3Upload(fileData: FileData, file: File) { const formData = new FormData(); for (const [name, value] of Object.entries(fileData.fields)) { formData.append(name, value); @@ -55,7 +63,7 @@ function s3Upload(fileData, file: File) { return handleUpload(fileData.url, options); } -function handleUpload(url: string, options) { +function handleUpload(url: string, options: RequestInit) { return itFetch(url, options).then((response) => { return response.json().catch(() => { return Promise.resolve(null); @@ -67,6 +75,10 @@ export async function uploadCircleDocument( data: DocumentUploadData, courseSessionId: number ): Promise { + if (data.file === null) { + throw new Error("No file selected"); + } + const startData = await startFileUpload(data, courseSessionId); await uploadFile(startData, data.file); diff --git a/client/src/stores/courseSessions.ts b/client/src/stores/courseSessions.ts index 57133070..e3936d22 100644 --- a/client/src/stores/courseSessions.ts +++ b/client/src/stores/courseSessions.ts @@ -1,6 +1,6 @@ import { itGetCached, itPost } from "@/fetchHelpers"; import { deleteCircleDocument } from "@/services/files"; -import type { CircleExpert, CourseSession, CircleDocument } from "@/types"; +import type { CircleDocument, CircleExpert, CourseSession } from "@/types"; import _ from "lodash"; import log from "loglevel"; @@ -10,6 +10,16 @@ import { useRoute } from "vue-router"; import { useCircleStore } from "./circle"; import { useUserStore } from "./user"; +export type CourseSessionsStoreState = { + courseSessions: CourseSession[] | undefined; +}; + +export type LearningSequenceCircleDocument = { + id: number; + title: string; + documents: CircleDocument[]; +}; + function loadCourseSessionsData(reload = false) { log.debug("loadCourseSessionsData called"); const courseSessions = ref([]); @@ -17,7 +27,7 @@ function loadCourseSessionsData(reload = false) { function userExpertCircles( userId: number, - courseSessionForRoute: CourseSession + courseSessionForRoute: CourseSession | undefined ): CircleExpert[] { if (!courseSessionForRoute) { return []; @@ -25,10 +35,6 @@ function userExpertCircles( return courseSessionForRoute.experts.filter((expert) => expert.user_id === userId); } -export type CourseSessionsStoreState = { - courseSessions: CourseSession[] | undefined; -}; - async function loadAndUpdate() { courseSessions.value = await itGetCached(`/api/course/sessions/`, { reload: reload, diff --git a/client/src/types.ts b/client/src/types.ts index c88e017d..742cdd4a 100644 --- a/client/src/types.ts +++ b/client/src/types.ts @@ -369,7 +369,7 @@ export interface ExpertSessionUser extends CourseSessionUser { // document upload export interface DocumentUploadData { - file: File; + file: File | null; name: string; learningSequence: { id: number; diff --git a/server/config/settings/base.py b/server/config/settings/base.py index fdf9c92a..cced5b15 100644 --- a/server/config/settings/base.py +++ b/server/config/settings/base.py @@ -573,6 +573,29 @@ GRAPPLE = { "APPS": ["core", "course", "learnpath", "competence", "media_library"], } +# S3 BUCKET CONFIGURATION +FILE_UPLOAD_STORAGE = env("FILE_UPLOAD_STORAGE", default="local") # local | s3 + +if FILE_UPLOAD_STORAGE == "local": + FILE_MAX_SIZE = env.int("FILE_MAX_SIZE", default=5242880) + +if FILE_UPLOAD_STORAGE == "s3": + # Using django-storages + # https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html + DEFAULT_FILE_STORAGE = "storages.backends.s3boto3.S3Boto3Storage" + + AWS_S3_ACCESS_KEY_ID = env("AWS_S3_ACCESS_KEY_ID") + AWS_S3_SECRET_ACCESS_KEY = env("AWS_S3_SECRET_ACCESS_KEY") + AWS_STORAGE_BUCKET_NAME = env("AWS_STORAGE_BUCKET_NAME") + AWS_S3_REGION_NAME = env("AWS_S3_REGION_NAME") + AWS_S3_SIGNATURE_VERSION = env("AWS_S3_SIGNATURE_VERSION", default="s3v4") + FILE_MAX_SIZE = env.int("FILE_MAX_SIZE", default=5242880) # 5MB + + # https://docs.aws.amazon.com/AmazonS3/latest/userguide/acl-overview.html#canned-acl + AWS_DEFAULT_ACL = env("AWS_DEFAULT_ACL", default="private") + + AWS_PRESIGNED_EXPIRY = env.int("AWS_PRESIGNED_EXPIRY", default=60) # seconds + if APP_ENVIRONMENT == "development": # http://whitenoise.evans.io/en/latest/django.html#using-whitenoise-in-development INSTALLED_APPS = ["whitenoise.runserver_nostatic"] + INSTALLED_APPS # noqa F405 @@ -607,28 +630,6 @@ if APP_ENVIRONMENT == "development": # https://django-extensions.readthedocs.io/en/latest/installation_instructions.html#configuration INSTALLED_APPS += ["django_extensions", "django_watchfiles"] # noqa F405 - # S3 BUCKET CONFIGURATION - FILE_UPLOAD_STORAGE = env("FILE_UPLOAD_STORAGE", default="local") # local | s3 - - if FILE_UPLOAD_STORAGE == "local": - FILE_MAX_SIZE = env.int("FILE_MAX_SIZE", default=5242880) - - if FILE_UPLOAD_STORAGE == "s3": - # Using django-storages - # https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html - DEFAULT_FILE_STORAGE = "storages.backends.s3boto3.S3Boto3Storage" - - AWS_S3_ACCESS_KEY_ID = env("AWS_S3_ACCESS_KEY_ID") - AWS_S3_SECRET_ACCESS_KEY = env("AWS_S3_SECRET_ACCESS_KEY") - AWS_STORAGE_BUCKET_NAME = env("AWS_STORAGE_BUCKET_NAME") - AWS_S3_REGION_NAME = env("AWS_S3_REGION_NAME") - AWS_S3_SIGNATURE_VERSION = env("AWS_S3_SIGNATURE_VERSION", default="s3v4") - FILE_MAX_SIZE = env.int("FILE_MAX_SIZE", default=5242880) # 5MB - - # https://docs.aws.amazon.com/AmazonS3/latest/userguide/acl-overview.html#canned-acl - AWS_DEFAULT_ACL = env("AWS_DEFAULT_ACL", default="private") - - AWS_PRESIGNED_EXPIRY = env.int("AWS_PRESIGNED_EXPIRY", default=60) # seconds if APP_ENVIRONMENT in ["production", "caprover"] or APP_ENVIRONMENT.startswith( "caprover" ): diff --git a/server/vbv_lernwelt/course/tests/test_document_uploads.py b/server/vbv_lernwelt/course/tests/test_document_uploads.py index 06514bf1..526667e3 100644 --- a/server/vbv_lernwelt/course/tests/test_document_uploads.py +++ b/server/vbv_lernwelt/course/tests/test_document_uploads.py @@ -1,3 +1,4 @@ +from django.conf import settings from rest_framework.test import APITestCase from vbv_lernwelt.core.create_default_users import create_default_users @@ -52,13 +53,17 @@ class DocumentUploadApiTestCase(APITestCase): self.assertEqual(response.status_code, 200) self.assertNotEqual(response.data["url"], "") - self.assertEqual( - response.data["fields"]["Content-Type"], self.test_data["file_type"] - ) - self.assertEqual( - response.data["fields"]["Content-Disposition"], - f"attachment; filename={self.test_data['file_name']}", - ) + + if settings.FILE_UPLOAD_STORAGE == "s3": + self.assertTrue(response.data["url"].startswith("https://")) + self.assertEqual( + response.data["fields"]["Content-Type"], self.test_data["file_type"] + ) + + self.assertEqual( + response.data["fields"]["Content-Disposition"], + f"attachment; filename={self.test_data['file_name']}", + ) file_id = response.data["file_id"] file = File.objects.get(id=file_id) diff --git a/server/vbv_lernwelt/files/integrations.py b/server/vbv_lernwelt/files/integrations.py index 99180a0d..11585466 100644 --- a/server/vbv_lernwelt/files/integrations.py +++ b/server/vbv_lernwelt/files/integrations.py @@ -113,3 +113,14 @@ def s3_generate_presigned_url(*, file_path: str) -> str: Params={"Bucket": credentials.bucket_name, "Key": file_path}, ExpiresIn=credentials.presigned_expiry, ) + + +def s3_delete_file(*, file_path: str): + credentials = s3_get_credentials() + s3_client = s3_get_client() + + some = s3_client.delete_object( + Bucket=credentials.bucket_name, + Key=file_path, + ) + pass diff --git a/server/vbv_lernwelt/files/management/__init__.py b/server/vbv_lernwelt/files/management/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/server/vbv_lernwelt/files/management/commands/__init__.py b/server/vbv_lernwelt/files/management/commands/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/server/vbv_lernwelt/files/management/commands/delete_unreferenced_files.py b/server/vbv_lernwelt/files/management/commands/delete_unreferenced_files.py new file mode 100644 index 00000000..90f6ac21 --- /dev/null +++ b/server/vbv_lernwelt/files/management/commands/delete_unreferenced_files.py @@ -0,0 +1,47 @@ +from django.core.management.base import BaseCommand + +from vbv_lernwelt.files.models import File + + +class Command(BaseCommand): + help = "Delete unreferenced uploads and delete their files" + + def add_arguments(self, parser): + parser.add_argument( + "--dry-run", + action="store_true", + dest="dry_run", + default=False, + help="Dry run", + ) + + def handle(self, *args, **options): + dry_run = options["dry_run"] + + num_deleted = 0 + + unreferenced_uploads = File.objects.filter(upload_finished_at__isnull=True) + + if dry_run: + print("------ DRY RUN -------") + + print( + "Going to delete {} unreferenced uploads".format( + unreferenced_uploads.count() + ) + ) + for upload in unreferenced_uploads: + try: + if not dry_run: + upload.delete_file() + file_id = upload.id + upload.delete() + print("Deleted file with id {}".format(file_id)) + else: + print("Would delete file with id {}".format(upload.id)) + num_deleted += 1 + except Exception as e: + print(e) + pass + + print("Deleted {:d} uploads".format(num_deleted)) diff --git a/server/vbv_lernwelt/files/models.py b/server/vbv_lernwelt/files/models.py index 2a3950f5..30271730 100644 --- a/server/vbv_lernwelt/files/models.py +++ b/server/vbv_lernwelt/files/models.py @@ -3,7 +3,7 @@ from django.db import models from vbv_lernwelt.core.models import User from vbv_lernwelt.files.enums import FileUploadStorage -from vbv_lernwelt.files.integrations import s3_generate_presigned_url +from vbv_lernwelt.files.integrations import s3_delete_file, s3_generate_presigned_url from vbv_lernwelt.files.utils import file_generate_upload_path @@ -39,3 +39,9 @@ class File(models.Model): return s3_generate_presigned_url(file_path=str(self.file)) return f"{self.file.url}" + + def delete_file(self): + if settings.FILE_UPLOAD_STORAGE == FileUploadStorage.S3.value: + return s3_delete_file(file_path=str(self.file)) + else: + return self.file.delete()