From dff69f5fdd53ff7b998b6da8a4d899a451591a8a Mon Sep 17 00:00:00 2001 From: daniel Date: Thu, 7 Dec 2023 16:57:28 -0500 Subject: [PATCH 01/29] build: inlcude bsoup library --- apiserver/requirements/base.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/apiserver/requirements/base.txt b/apiserver/requirements/base.txt index 969ab3c8931..249b29d48c2 100644 --- a/apiserver/requirements/base.txt +++ b/apiserver/requirements/base.txt @@ -33,4 +33,5 @@ django_celery_beat==2.5.0 psycopg-binary==3.1.10 psycopg-c==3.1.10 scout-apm==2.26.1 -openpyxl==3.1.2 \ No newline at end of file +openpyxl==3.1.2 +beautifulsoup4==4.12.2 \ No newline at end of file From 3de418c2f5f0e14169df4947328f509ae399a219 Mon Sep 17 00:00:00 2001 From: daniel Date: Thu, 7 Dec 2023 16:58:14 -0500 Subject: [PATCH 02/29] feat(utils): create class for aws s3 management --- apiserver/plane/utils/s3.py | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 apiserver/plane/utils/s3.py diff --git a/apiserver/plane/utils/s3.py b/apiserver/plane/utils/s3.py new file mode 100644 index 00000000000..53f23d836f2 --- /dev/null +++ b/apiserver/plane/utils/s3.py @@ -0,0 +1,32 @@ +import re +import boto3 +from botocore.client import Config +from urllib.parse import urlparse + +from django.conf import settings + + +class S3: + def __init__(self): + self.client = boto3.client( + "s3", + region_name=settings.AWS_REGION, + aws_access_key_id=settings.AWS_ACCESS_KEY_ID, + aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, + config=Config(signature_version=settings.AWS_S3_SIGNATURE_VERSION), + ) + + def refresh_url(self, old_url, time=settings.AWS_S3_MAX_AGE_SECONDS): + path = urlparse(str(old_url)).path.lstrip("/") + url = self.client.generate_presigned_url( + ClientMethod="get_object", + ExpiresIn=time, + Params={"Bucket": settings.AWS_S3_BUCKET_NAME, "Key": path}, + ) + + return url + + @staticmethod + def verify_s3_url(url): + pattern = re.compile(r"amazonaws\.com") + return pattern.search(url) From 1c03ab22715c282c7f150324b9d13e3d96f40dad Mon Sep 17 00:00:00 2001 From: daniel Date: Thu, 7 Dec 2023 16:58:39 -0500 Subject: [PATCH 03/29] feat(utils): create functions to parse texto to html and update s3 url --- apiserver/plane/utils/parse_html.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 apiserver/plane/utils/parse_html.py diff --git a/apiserver/plane/utils/parse_html.py b/apiserver/plane/utils/parse_html.py new file mode 100644 index 00000000000..d473fbb4994 --- /dev/null +++ b/apiserver/plane/utils/parse_html.py @@ -0,0 +1,17 @@ +from bs4 import BeautifulSoup + +from plane.utils.s3 import S3 + + +def parse_text_to_html(html, features="html.parser"): + return BeautifulSoup(html, features) + + +def refresh_url_content(html): + s3 = S3() + for img_tag in html.find_all("img"): + old_src = img_tag["src"] + new_url = s3.refresh_url(old_src) + img_tag["src"] = new_url + + return html From 1c272c2cd1a3b42afb9655a71a3f1b01b5806ae2 Mon Sep 17 00:00:00 2001 From: daniel Date: Thu, 7 Dec 2023 17:00:06 -0500 Subject: [PATCH 04/29] feat(project): refresh cover_image s3 url in project serializers --- apiserver/plane/api/serializers/project.py | 26 ++++++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/apiserver/plane/api/serializers/project.py b/apiserver/plane/api/serializers/project.py index 49d986cae0b..49173074ad3 100644 --- a/apiserver/plane/api/serializers/project.py +++ b/apiserver/plane/api/serializers/project.py @@ -17,6 +17,7 @@ ProjectDeployBoard, ProjectPublicMember, ) +from plane.utils.s3 import S3 class ProjectSerializer(BaseSerializer): @@ -78,6 +79,13 @@ def update(self, instance, validated_data): # If not same fail update raise serializers.ValidationError(detail="Project Identifier is already taken") + def to_representation(self, instance): + data = super().to_representation(instance) + if S3.verify_s3_url(instance.cover_image): + s3 = S3() + data["cover_image"] = s3.refresh_url(instance.cover_image) + return data + class ProjectLiteSerializer(BaseSerializer): class Meta: @@ -93,6 +101,13 @@ class Meta: ] read_only_fields = fields + def to_representation(self, instance): + data = super().to_representation(instance) + if S3.verify_s3_url(instance.cover_image): + s3 = S3() + data["cover_image"] = s3.refresh_url(instance.cover_image) + return data + class ProjectDetailSerializer(BaseSerializer): workspace = WorkSpaceSerializer(read_only=True) @@ -111,6 +126,13 @@ class Meta: model = Project fields = "__all__" + def to_representation(self, instance): + data = super().to_representation(instance) + if S3.verify_s3_url(instance.cover_image): + s3 = S3() + data["cover_image"] = s3.refresh_url(instance.cover_image) + return data + class ProjectMemberSerializer(BaseSerializer): workspace = WorkspaceLiteSerializer(read_only=True) @@ -178,12 +200,12 @@ class Meta: fields = "__all__" read_only_fields = [ "workspace", - "project", "anchor", + "project", + "anchor", ] class ProjectPublicMemberSerializer(BaseSerializer): - class Meta: model = ProjectPublicMember fields = "__all__" From ed4a0e756282348fa46ba735ea16dfe3748c8a4f Mon Sep 17 00:00:00 2001 From: daniel Date: Thu, 7 Dec 2023 17:01:24 -0500 Subject: [PATCH 05/29] feat(issue): refresh content url in issue serializers --- apiserver/plane/api/serializers/issue.py | 51 ++++++++++++++---------- 1 file changed, 30 insertions(+), 21 deletions(-) diff --git a/apiserver/plane/api/serializers/issue.py b/apiserver/plane/api/serializers/issue.py index 57539f24c47..2810afa5d91 100644 --- a/apiserver/plane/api/serializers/issue.py +++ b/apiserver/plane/api/serializers/issue.py @@ -32,6 +32,7 @@ IssueVote, IssueRelation, ) +from plane.utils.parse_html import parse_text_to_html, refresh_url_content class IssueFlatSerializer(BaseSerializer): @@ -297,30 +298,19 @@ class IssueRelationSerializer(BaseSerializer): class Meta: model = IssueRelation - fields = [ - "issue_detail", - "relation_type", - "related_issue", - "issue", - "id" - ] + fields = ["issue_detail", "relation_type", "related_issue", "issue", "id"] read_only_fields = [ "workspace", "project", ] + class RelatedIssueSerializer(BaseSerializer): issue_detail = IssueProjectLiteSerializer(read_only=True, source="issue") class Meta: model = IssueRelation - fields = [ - "issue_detail", - "relation_type", - "related_issue", - "issue", - "id" - ] + fields = ["issue_detail", "relation_type", "related_issue", "issue", "id"] read_only_fields = [ "workspace", "project", @@ -438,9 +428,8 @@ class Meta: class IssueReactionSerializer(BaseSerializer): - actor_detail = UserLiteSerializer(read_only=True, source="actor") - + class Meta: model = IssueReaction fields = "__all__" @@ -473,7 +462,6 @@ class Meta: class IssueVoteSerializer(BaseSerializer): - actor_detail = UserLiteSerializer(read_only=True, source="actor") class Meta: @@ -541,8 +529,12 @@ class IssueSerializer(BaseSerializer): parent_detail = IssueStateFlatSerializer(read_only=True, source="parent") label_details = LabelSerializer(read_only=True, source="labels", many=True) assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True) - related_issues = IssueRelationSerializer(read_only=True, source="issue_relation", many=True) - issue_relations = RelatedIssueSerializer(read_only=True, source="issue_related", many=True) + related_issues = IssueRelationSerializer( + read_only=True, source="issue_relation", many=True + ) + issue_relations = RelatedIssueSerializer( + read_only=True, source="issue_related", many=True + ) issue_cycle = IssueCycleDetailSerializer(read_only=True) issue_module = IssueModuleDetailSerializer(read_only=True) issue_link = IssueLinkSerializer(read_only=True, many=True) @@ -562,6 +554,14 @@ class Meta: "updated_at", ] + def to_representation(self, instance): + data = super().to_representation(instance) + + html = parse_text_to_html(instance.description_html) + data["description_html"] = str(refresh_url_content(html)) + + return data + class IssueLiteSerializer(BaseSerializer): workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") @@ -591,11 +591,21 @@ class Meta: "updated_at", ] + def to_representation(self, instance): + data = super().to_representation(instance) + + html = parse_text_to_html(instance.description_html) + data["description_html"] = str(refresh_url_content(html)) + + return data + class IssuePublicSerializer(BaseSerializer): project_detail = ProjectLiteSerializer(read_only=True, source="project") state_detail = StateLiteSerializer(read_only=True, source="state") - reactions = IssueReactionSerializer(read_only=True, many=True, source="issue_reactions") + reactions = IssueReactionSerializer( + read_only=True, many=True, source="issue_reactions" + ) votes = IssueVoteSerializer(read_only=True, many=True) class Meta: @@ -618,7 +628,6 @@ class Meta: read_only_fields = fields - class IssueSubscriberSerializer(BaseSerializer): class Meta: model = IssueSubscriber From dfca75b9c49bf6ebdea15b24327dad7dc36b2e34 Mon Sep 17 00:00:00 2001 From: daniel Date: Mon, 11 Dec 2023 22:27:38 -0500 Subject: [PATCH 06/29] feat(utils): include validation if minio is true --- apiserver/plane/utils/s3.py | 23 ++++++++++++++++------- 1 file changed, 16 insertions(+), 7 deletions(-) diff --git a/apiserver/plane/utils/s3.py b/apiserver/plane/utils/s3.py index 53f23d836f2..2a274fc3a8d 100644 --- a/apiserver/plane/utils/s3.py +++ b/apiserver/plane/utils/s3.py @@ -8,13 +8,22 @@ class S3: def __init__(self): - self.client = boto3.client( - "s3", - region_name=settings.AWS_REGION, - aws_access_key_id=settings.AWS_ACCESS_KEY_ID, - aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, - config=Config(signature_version=settings.AWS_S3_SIGNATURE_VERSION), - ) + if settings.USE_MINIO: + self.client = boto3.client( + "s3", + endpoint_url=settings.AWS_S3_ENDPOINT_URL, + aws_access_key_id=settings.AWS_ACCESS_KEY_ID, + aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, + config=Config(signature_version="s3v4"), + ) + else: + self.client = boto3.client( + "s3", + region_name=settings.AWS_REGION, + aws_access_key_id=settings.AWS_ACCESS_KEY_ID, + aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, + config=Config(signature_version=settings.AWS_S3_SIGNATURE_VERSION), + ) def refresh_url(self, old_url, time=settings.AWS_S3_MAX_AGE_SECONDS): path = urlparse(str(old_url)).path.lstrip("/") From 5319dfcc82d4801d8002c240a9ecfa6b058c7543 Mon Sep 17 00:00:00 2001 From: daniel Date: Mon, 11 Dec 2023 22:28:39 -0500 Subject: [PATCH 07/29] feat(issue): create class for refresh description_html s3 links --- apiserver/plane/api/serializers/issue.py | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/apiserver/plane/api/serializers/issue.py b/apiserver/plane/api/serializers/issue.py index 2810afa5d91..ffa65d8f38c 100644 --- a/apiserver/plane/api/serializers/issue.py +++ b/apiserver/plane/api/serializers/issue.py @@ -35,6 +35,16 @@ from plane.utils.parse_html import parse_text_to_html, refresh_url_content +class BaseIssueSerializerMixin: + def refresh_html_content(self, instance): + html = parse_text_to_html(instance.description_html) + refreshed, html = refresh_url_content(html) + + if refreshed: + instance.description_html = html + instance.save() + + class IssueFlatSerializer(BaseSerializer): ## Contain only flat fields @@ -523,7 +533,7 @@ class Meta: fields = "__all__" -class IssueSerializer(BaseSerializer): +class IssueSerializer(BaseSerializer, BaseIssueSerializerMixin): project_detail = ProjectLiteSerializer(read_only=True, source="project") state_detail = StateSerializer(read_only=True, source="state") parent_detail = IssueStateFlatSerializer(read_only=True, source="parent") @@ -555,15 +565,12 @@ class Meta: ] def to_representation(self, instance): + self.refresh_html_content(instance) data = super().to_representation(instance) - - html = parse_text_to_html(instance.description_html) - data["description_html"] = str(refresh_url_content(html)) - return data -class IssueLiteSerializer(BaseSerializer): +class IssueLiteSerializer(BaseSerializer, BaseIssueSerializerMixin): workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") project_detail = ProjectLiteSerializer(read_only=True, source="project") state_detail = StateLiteSerializer(read_only=True, source="state") @@ -592,11 +599,8 @@ class Meta: ] def to_representation(self, instance): + self.refresh_html_content(instance) data = super().to_representation(instance) - - html = parse_text_to_html(instance.description_html) - data["description_html"] = str(refresh_url_content(html)) - return data From f8bd404f4b373729d515fafab095a1c6cd3aed99 Mon Sep 17 00:00:00 2001 From: daniel Date: Mon, 11 Dec 2023 22:32:44 -0500 Subject: [PATCH 08/29] feat(utils): add validation if s3 obj link is expired --- apiserver/plane/utils/parse_html.py | 25 ++++++++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/apiserver/plane/utils/parse_html.py b/apiserver/plane/utils/parse_html.py index d473fbb4994..bd6622a30f4 100644 --- a/apiserver/plane/utils/parse_html.py +++ b/apiserver/plane/utils/parse_html.py @@ -1,4 +1,8 @@ from bs4 import BeautifulSoup +from datetime import datetime, timezone +from urllib.parse import urlparse, parse_qs + +from django.conf import settings from plane.utils.s3 import S3 @@ -8,10 +12,25 @@ def parse_text_to_html(html, features="html.parser"): def refresh_url_content(html): + date_format = "%Y%m%dT%H%M%SZ" + refreshed = False + s3 = S3() for img_tag in html.find_all("img"): old_src = img_tag["src"] - new_url = s3.refresh_url(old_src) - img_tag["src"] = new_url + parsed_url = urlparse(old_src) + query_params = parse_qs(parsed_url.query) + x_amz_date = query_params.get("X-Amz-Date", [None])[0] + + x_amz_date_to_date = datetime.strptime(x_amz_date, date_format).replace( + tzinfo=timezone.utc + ) + actual_date = datetime.now(timezone.utc) + seconds_difference = (actual_date - x_amz_date_to_date).total_seconds() + + if seconds_difference >= (settings.AWS_S3_MAX_AGE_SECONDS - 20): + new_url = s3.refresh_url(old_src) + img_tag["src"] = new_url + refreshed = True - return html + return refreshed, str(html) From 6f57763465b660233cce8f1a985642887a488cc9 Mon Sep 17 00:00:00 2001 From: daniel Date: Tue, 12 Dec 2023 15:24:45 -0500 Subject: [PATCH 09/29] feat(s3): create static method to verify if url file has expired --- apiserver/plane/utils/s3.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/apiserver/plane/utils/s3.py b/apiserver/plane/utils/s3.py index 2a274fc3a8d..8a751079341 100644 --- a/apiserver/plane/utils/s3.py +++ b/apiserver/plane/utils/s3.py @@ -1,7 +1,8 @@ import re import boto3 from botocore.client import Config -from urllib.parse import urlparse +from urllib.parse import urlparse, parse_qs +from datetime import datetime, timezone from django.conf import settings @@ -39,3 +40,17 @@ def refresh_url(self, old_url, time=settings.AWS_S3_MAX_AGE_SECONDS): def verify_s3_url(url): pattern = re.compile(r"amazonaws\.com") return pattern.search(url) + + @staticmethod + def url_file_has_experid(url, date_format="%Y%m%dT%H%M%SZ"): + parsed_url = urlparse(url) + query_params = parse_qs(parsed_url.query) + x_amz_date = query_params.get("X-Amz-Date", [None])[0] + + x_amz_date_to_date = datetime.strptime(x_amz_date, date_format).replace( + tzinfo=timezone.utc + ) + actual_date = datetime.now(timezone.utc) + seconds_difference = (actual_date - x_amz_date_to_date).total_seconds() + + return seconds_difference >= (settings.AWS_S3_MAX_AGE_SECONDS - 20) From b3eceb8377875df4836696fbd1c4cc8deda104c5 Mon Sep 17 00:00:00 2001 From: daniel Date: Tue, 12 Dec 2023 15:25:02 -0500 Subject: [PATCH 10/29] feat(s3): create static method to verify if url file has expired --- apiserver/plane/utils/parse_html.py | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/apiserver/plane/utils/parse_html.py b/apiserver/plane/utils/parse_html.py index bd6622a30f4..f4a8472dfe3 100644 --- a/apiserver/plane/utils/parse_html.py +++ b/apiserver/plane/utils/parse_html.py @@ -12,23 +12,13 @@ def parse_text_to_html(html, features="html.parser"): def refresh_url_content(html): - date_format = "%Y%m%dT%H%M%SZ" refreshed = False s3 = S3() for img_tag in html.find_all("img"): old_src = img_tag["src"] - parsed_url = urlparse(old_src) - query_params = parse_qs(parsed_url.query) - x_amz_date = query_params.get("X-Amz-Date", [None])[0] - x_amz_date_to_date = datetime.strptime(x_amz_date, date_format).replace( - tzinfo=timezone.utc - ) - actual_date = datetime.now(timezone.utc) - seconds_difference = (actual_date - x_amz_date_to_date).total_seconds() - - if seconds_difference >= (settings.AWS_S3_MAX_AGE_SECONDS - 20): + if S3.url_file_has_experid(old_src): new_url = s3.refresh_url(old_src) img_tag["src"] = new_url refreshed = True From 268c56343eb14c59acc5adfe6d4cdc3bcc046940 Mon Sep 17 00:00:00 2001 From: daniel Date: Tue, 12 Dec 2023 15:25:49 -0500 Subject: [PATCH 11/29] feat(project): create class to refresh project cover image --- apiserver/plane/api/serializers/project.py | 37 +++++++++++----------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/apiserver/plane/api/serializers/project.py b/apiserver/plane/api/serializers/project.py index 49173074ad3..bb663788cb7 100644 --- a/apiserver/plane/api/serializers/project.py +++ b/apiserver/plane/api/serializers/project.py @@ -20,7 +20,17 @@ from plane.utils.s3 import S3 -class ProjectSerializer(BaseSerializer): +class BaseProjectSerializerMixin: + def refresh_cover_image(self, instance): + cover_image = instance.cover_image + + if S3.verify_s3_url(cover_image) and S3.url_file_has_experid(cover_image): + s3 = S3() + instance.cover_image = s3.refresh_url(cover_image) + instance.save() + + +class ProjectSerializer(BaseSerializer, BaseProjectSerializerMixin): workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True) class Meta: @@ -80,14 +90,11 @@ def update(self, instance, validated_data): raise serializers.ValidationError(detail="Project Identifier is already taken") def to_representation(self, instance): - data = super().to_representation(instance) - if S3.verify_s3_url(instance.cover_image): - s3 = S3() - data["cover_image"] = s3.refresh_url(instance.cover_image) - return data + self.refresh_cover_image(instance) + return super().to_representation(instance) -class ProjectLiteSerializer(BaseSerializer): +class ProjectLiteSerializer(BaseSerializer, BaseProjectSerializerMixin): class Meta: model = Project fields = [ @@ -102,14 +109,11 @@ class Meta: read_only_fields = fields def to_representation(self, instance): - data = super().to_representation(instance) - if S3.verify_s3_url(instance.cover_image): - s3 = S3() - data["cover_image"] = s3.refresh_url(instance.cover_image) - return data + self.refresh_cover_image(instance) + return super().to_representation(instance) -class ProjectDetailSerializer(BaseSerializer): +class ProjectDetailSerializer(BaseSerializer, BaseProjectSerializerMixin): workspace = WorkSpaceSerializer(read_only=True) default_assignee = UserLiteSerializer(read_only=True) project_lead = UserLiteSerializer(read_only=True) @@ -127,11 +131,8 @@ class Meta: fields = "__all__" def to_representation(self, instance): - data = super().to_representation(instance) - if S3.verify_s3_url(instance.cover_image): - s3 = S3() - data["cover_image"] = s3.refresh_url(instance.cover_image) - return data + self.refresh_cover_image(instance) + return super().to_representation(instance) class ProjectMemberSerializer(BaseSerializer): From ec9fcbc932f456e23b6d9e17f369da9a61d7cca6 Mon Sep 17 00:00:00 2001 From: daniel Date: Tue, 12 Dec 2023 15:26:52 -0500 Subject: [PATCH 12/29] fix: fix typo --- apiserver/plane/api/serializers/project.py | 2 +- apiserver/plane/utils/parse_html.py | 2 +- apiserver/plane/utils/s3.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/apiserver/plane/api/serializers/project.py b/apiserver/plane/api/serializers/project.py index bb663788cb7..666d7d1d9b7 100644 --- a/apiserver/plane/api/serializers/project.py +++ b/apiserver/plane/api/serializers/project.py @@ -24,7 +24,7 @@ class BaseProjectSerializerMixin: def refresh_cover_image(self, instance): cover_image = instance.cover_image - if S3.verify_s3_url(cover_image) and S3.url_file_has_experid(cover_image): + if S3.verify_s3_url(cover_image) and S3.url_file_has_expired(cover_image): s3 = S3() instance.cover_image = s3.refresh_url(cover_image) instance.save() diff --git a/apiserver/plane/utils/parse_html.py b/apiserver/plane/utils/parse_html.py index f4a8472dfe3..1c266b6880e 100644 --- a/apiserver/plane/utils/parse_html.py +++ b/apiserver/plane/utils/parse_html.py @@ -18,7 +18,7 @@ def refresh_url_content(html): for img_tag in html.find_all("img"): old_src = img_tag["src"] - if S3.url_file_has_experid(old_src): + if S3.url_file_has_expired(old_src): new_url = s3.refresh_url(old_src) img_tag["src"] = new_url refreshed = True diff --git a/apiserver/plane/utils/s3.py b/apiserver/plane/utils/s3.py index 8a751079341..800661c1621 100644 --- a/apiserver/plane/utils/s3.py +++ b/apiserver/plane/utils/s3.py @@ -42,7 +42,7 @@ def verify_s3_url(url): return pattern.search(url) @staticmethod - def url_file_has_experid(url, date_format="%Y%m%dT%H%M%SZ"): + def url_file_has_expired(url, date_format="%Y%m%dT%H%M%SZ"): parsed_url = urlparse(url) query_params = parse_qs(parsed_url.query) x_amz_date = query_params.get("X-Amz-Date", [None])[0] From 7e38796410a99ab1a708d6d748d20e9e07c9261d Mon Sep 17 00:00:00 2001 From: daniel Date: Wed, 13 Dec 2023 13:59:05 -0500 Subject: [PATCH 13/29] feat(s3): create upload, delete and edit refresh url methods --- apiserver/plane/utils/s3.py | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/apiserver/plane/utils/s3.py b/apiserver/plane/utils/s3.py index 800661c1621..a7e8483d095 100644 --- a/apiserver/plane/utils/s3.py +++ b/apiserver/plane/utils/s3.py @@ -28,14 +28,37 @@ def __init__(self): def refresh_url(self, old_url, time=settings.AWS_S3_MAX_AGE_SECONDS): path = urlparse(str(old_url)).path.lstrip("/") + bucket_name = ( + settings.AWS_STORAGE_BUCKET_NAME + if settings.USE_MINIO + else settings.AWS_S3_BUCKET_NAME + ) + url = self.client.generate_presigned_url( ClientMethod="get_object", ExpiresIn=time, - Params={"Bucket": settings.AWS_S3_BUCKET_NAME, "Key": path}, + Params={"Bucket": bucket_name, "Key": path}, ) + if settings.USE_MINIO: + url = url.replace( + "http://plane-minio:9000/uploads/", + f"{settings.AWS_S3_URL_PROTOCOL}//{settings.AWS_S3_CUSTOM_DOMAIN}/", + ) + return url + def upload_file(self, file, bucket_name, file_name, acl, content_type): + self.client.upload_fileobj( + file, + bucket_name, + file_name, + ExtraArgs={"ACL": acl, "ContentType": content_type}, + ) + + def delete_file(self, bucket_name, path): + self.client.delete_object(Bucket=bucket_name, Key=path) + @staticmethod def verify_s3_url(url): pattern = re.compile(r"amazonaws\.com") From f34901027d07a3b7e8ff30c07a8b51bab129ea04 Mon Sep 17 00:00:00 2001 From: daniel Date: Wed, 13 Dec 2023 14:02:31 -0500 Subject: [PATCH 14/29] feat(s3): use s3 for all operations --- apiserver/plane/bgtasks/export_task.py | 58 +++++-------------- .../plane/bgtasks/exporter_expired_task.py | 24 ++------ 2 files changed, 17 insertions(+), 65 deletions(-) diff --git a/apiserver/plane/bgtasks/export_task.py b/apiserver/plane/bgtasks/export_task.py index a45120eb5dd..a45554af64a 100644 --- a/apiserver/plane/bgtasks/export_task.py +++ b/apiserver/plane/bgtasks/export_task.py @@ -18,6 +18,7 @@ # Module imports from plane.db.models import Issue, ExporterHistory +from plane.utils.s3 import S3 def dateTimeConverter(time): @@ -71,51 +72,18 @@ def create_zip_file(files): def upload_to_s3(zip_file, workspace_id, token_id, slug): file_name = f"{workspace_id}/export-{slug}-{token_id[:6]}-{timezone.now()}.zip" expires_in = 7 * 24 * 60 * 60 - - if settings.DOCKERIZED and settings.USE_MINIO: - s3 = boto3.client( - "s3", - endpoint_url=settings.AWS_S3_ENDPOINT_URL, - aws_access_key_id=settings.AWS_ACCESS_KEY_ID, - aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, - config=Config(signature_version="s3v4"), - ) - s3.upload_fileobj( - zip_file, - settings.AWS_STORAGE_BUCKET_NAME, - file_name, - ExtraArgs={"ACL": "public-read", "ContentType": "application/zip"}, - ) - presigned_url = s3.generate_presigned_url( - "get_object", - Params={"Bucket": settings.AWS_STORAGE_BUCKET_NAME, "Key": file_name}, - ExpiresIn=expires_in, - ) - # Create the new url with updated domain and protocol - presigned_url = presigned_url.replace( - "http://plane-minio:9000/uploads/", - f"{settings.AWS_S3_URL_PROTOCOL}//{settings.AWS_S3_CUSTOM_DOMAIN}/", - ) - else: - s3 = boto3.client( - "s3", - region_name=settings.AWS_REGION, - aws_access_key_id=settings.AWS_ACCESS_KEY_ID, - aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, - config=Config(signature_version="s3v4"), - ) - s3.upload_fileobj( - zip_file, - settings.AWS_S3_BUCKET_NAME, - file_name, - ExtraArgs={"ACL": "public-read", "ContentType": "application/zip"}, - ) - - presigned_url = s3.generate_presigned_url( - "get_object", - Params={"Bucket": settings.AWS_S3_BUCKET_NAME, "Key": file_name}, - ExpiresIn=expires_in, - ) + s3 = S3() + + s3.upload_file( + zip_file, + settings.AWS_STORAGE_BUCKET_NAME + if settings.USE_MINIO + else settings.AWS_S3_BUCKET_NAME, + file_name, + "public-read", + "application/zip", + ) + presigned_url = s3.refresh_url(file_name, expires_in) exporter_instance = ExporterHistory.objects.get(token=token_id) diff --git a/apiserver/plane/bgtasks/exporter_expired_task.py b/apiserver/plane/bgtasks/exporter_expired_task.py index 45c53eaca05..8f5db2c43f1 100644 --- a/apiserver/plane/bgtasks/exporter_expired_task.py +++ b/apiserver/plane/bgtasks/exporter_expired_task.py @@ -1,5 +1,4 @@ # Python imports -import boto3 from datetime import timedelta # Django imports @@ -9,10 +8,10 @@ # Third party imports from celery import shared_task -from botocore.client import Config # Module imports from plane.db.models import ExporterHistory +from plane.utils.s3 import S3 @shared_task @@ -21,29 +20,14 @@ def delete_old_s3_link(): expired_exporter_history = ExporterHistory.objects.filter( Q(url__isnull=False) & Q(created_at__lte=timezone.now() - timedelta(days=8)) ).values_list("key", "id") - if settings.DOCKERIZED and settings.USE_MINIO: - s3 = boto3.client( - "s3", - endpoint_url=settings.AWS_S3_ENDPOINT_URL, - aws_access_key_id=settings.AWS_ACCESS_KEY_ID, - aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, - config=Config(signature_version="s3v4"), - ) - else: - s3 = boto3.client( - "s3", - region_name=settings.AWS_REGION, - aws_access_key_id=settings.AWS_ACCESS_KEY_ID, - aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, - config=Config(signature_version="s3v4"), - ) + s3 = S3() for file_name, exporter_id in expired_exporter_history: # Delete object from S3 if file_name: if settings.DOCKERIZED and settings.USE_MINIO: - s3.delete_object(Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=file_name) + s3.delete_file(settings.AWS_STORAGE_BUCKET_NAME, file_name) else: - s3.delete_object(Bucket=settings.AWS_S3_BUCKET_NAME, Key=file_name) + s3.delete_file(settings.AWS_S3_BUCKET_NAME, file_name) ExporterHistory.objects.filter(id=exporter_id).update(url=None) From ded65df078ba8abc59bc6798b6af3ab8f11813c1 Mon Sep 17 00:00:00 2001 From: daniel Date: Wed, 13 Dec 2023 14:02:56 -0500 Subject: [PATCH 15/29] build: inlcude AWS_S3_MAX_AGE_SECONDS variable if use minio is true --- apiserver/plane/settings/production.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apiserver/plane/settings/production.py b/apiserver/plane/settings/production.py index 9c6bd95a92b..11e5bda911a 100644 --- a/apiserver/plane/settings/production.py +++ b/apiserver/plane/settings/production.py @@ -107,6 +107,7 @@ AWS_DEFAULT_ACL = "public-read" AWS_QUERYSTRING_AUTH = False AWS_S3_FILE_OVERWRITE = False + AWS_S3_MAX_AGE_SECONDS = 60 * 60 # Custom Domain settings parsed_url = urlparse(os.environ.get("WEB_URL", "http://localhost")) @@ -278,4 +279,3 @@ # Unsplash Access key UNSPLASH_ACCESS_KEY = os.environ.get("UNSPLASH_ACCESS_KEY") - From 0f9331ad248de3ed2963f28e9338246d4ba5b590 Mon Sep 17 00:00:00 2001 From: daniel Date: Fri, 15 Dec 2023 21:35:07 -0500 Subject: [PATCH 16/29] feat(s3): change variable name --- apiserver/plane/utils/s3.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/apiserver/plane/utils/s3.py b/apiserver/plane/utils/s3.py index a7e8483d095..82742855f98 100644 --- a/apiserver/plane/utils/s3.py +++ b/apiserver/plane/utils/s3.py @@ -15,7 +15,7 @@ def __init__(self): endpoint_url=settings.AWS_S3_ENDPOINT_URL, aws_access_key_id=settings.AWS_ACCESS_KEY_ID, aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, - config=Config(signature_version="s3v4"), + config=Config(signature_version=settings.AWS_S3_SIGNATURE_VERSION), ) else: self.client = boto3.client( @@ -28,16 +28,11 @@ def __init__(self): def refresh_url(self, old_url, time=settings.AWS_S3_MAX_AGE_SECONDS): path = urlparse(str(old_url)).path.lstrip("/") - bucket_name = ( - settings.AWS_STORAGE_BUCKET_NAME - if settings.USE_MINIO - else settings.AWS_S3_BUCKET_NAME - ) url = self.client.generate_presigned_url( ClientMethod="get_object", ExpiresIn=time, - Params={"Bucket": bucket_name, "Key": path}, + Params={"Bucket": settings.AWS_STORAGE_BUCKET_NAME, "Key": path}, ) if settings.USE_MINIO: From 60791478d21c73900ba2665f831860b1b80b7021 Mon Sep 17 00:00:00 2001 From: daniel Date: Fri, 15 Dec 2023 21:36:39 -0500 Subject: [PATCH 17/29] feat(project): change code for project serializer --- apiserver/plane/api/serializers/project.py | 25 ++------------- apiserver/plane/app/serializers/project.py | 37 +++++++++++++++++++--- 2 files changed, 35 insertions(+), 27 deletions(-) diff --git a/apiserver/plane/api/serializers/project.py b/apiserver/plane/api/serializers/project.py index 5d12d930362..14eafb52e16 100644 --- a/apiserver/plane/api/serializers/project.py +++ b/apiserver/plane/api/serializers/project.py @@ -2,22 +2,11 @@ from rest_framework import serializers # Module imports -from plane.db.models import Project, ProjectIdentifier, WorkspaceMember, State, Estimate +from plane.db.models import Project, ProjectIdentifier, WorkspaceMember from .base import BaseSerializer -from plane.utils.s3 import S3 -class BaseProjectSerializerMixin: - def refresh_cover_image(self, instance): - cover_image = instance.cover_image - - if S3.verify_s3_url(cover_image) and S3.url_file_has_expired(cover_image): - s3 = S3() - instance.cover_image = s3.refresh_url(cover_image) - instance.save() - - -class ProjectSerializer(BaseSerializer, BaseProjectSerializerMixin): +class ProjectSerializer(BaseSerializer): total_members = serializers.IntegerField(read_only=True) total_cycles = serializers.IntegerField(read_only=True) total_modules = serializers.IntegerField(read_only=True) @@ -86,12 +75,8 @@ def create(self, validated_data): ) return project - def to_representation(self, instance): - self.refresh_cover_image(instance) - return super().to_representation(instance) - -class ProjectLiteSerializer(BaseSerializer, BaseProjectSerializerMixin): +class ProjectLiteSerializer(BaseSerializer): class Meta: model = Project fields = [ @@ -104,7 +89,3 @@ class Meta: "description", ] read_only_fields = fields - - def to_representation(self, instance): - self.refresh_cover_image(instance) - return super().to_representation(instance) diff --git a/apiserver/plane/app/serializers/project.py b/apiserver/plane/app/serializers/project.py index aef715e33a8..66e94e712d0 100644 --- a/apiserver/plane/app/serializers/project.py +++ b/apiserver/plane/app/serializers/project.py @@ -14,9 +14,20 @@ ProjectDeployBoard, ProjectPublicMember, ) +from plane.utils.s3 import S3 -class ProjectSerializer(BaseSerializer): +class BaseProjectSerializerMixin: + def refresh_cover_image(self, instance): + cover_image = instance.cover_image + + if S3.verify_s3_url(cover_image) and S3.url_file_has_expired(cover_image): + s3 = S3() + instance.cover_image = s3.refresh_url(cover_image) + instance.save() + + +class ProjectSerializer(BaseSerializer, BaseProjectSerializerMixin): workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True) class Meta: @@ -75,8 +86,12 @@ def update(self, instance, validated_data): # If not same fail update raise serializers.ValidationError(detail="Project Identifier is already taken") + def to_representation(self, instance): + self.refresh_cover_image(instance) + return super().to_representation(instance) + -class ProjectLiteSerializer(BaseSerializer): +class ProjectLiteSerializer(BaseSerializer, BaseProjectSerializerMixin): class Meta: model = Project fields = [ @@ -90,8 +105,12 @@ class Meta: ] read_only_fields = fields + def to_representation(self, instance): + self.refresh_cover_image(instance) + return super().to_representation(instance) -class ProjectListSerializer(DynamicBaseSerializer): + +class ProjectListSerializer(DynamicBaseSerializer, BaseProjectSerializerMixin): is_favorite = serializers.BooleanField(read_only=True) total_members = serializers.IntegerField(read_only=True) total_cycles = serializers.IntegerField(read_only=True) @@ -121,8 +140,12 @@ class Meta: model = Project fields = "__all__" + def to_representation(self, instance): + self.refresh_cover_image(instance) + return super().to_representation(instance) + -class ProjectDetailSerializer(BaseSerializer): +class ProjectDetailSerializer(BaseSerializer, BaseProjectSerializerMixin): # workspace = WorkSpaceSerializer(read_only=True) default_assignee = UserLiteSerializer(read_only=True) project_lead = UserLiteSerializer(read_only=True) @@ -139,6 +162,10 @@ class Meta: model = Project fields = "__all__" + def to_representation(self, instance): + self.refresh_cover_image(instance) + return super().to_representation(instance) + class ProjectMemberSerializer(BaseSerializer): workspace = WorkspaceLiteSerializer(read_only=True) @@ -217,4 +244,4 @@ class Meta: "workspace", "project", "member", - ] \ No newline at end of file + ] From fe3e6e30c70060305bb162b820f8ccbcbe6e423d Mon Sep 17 00:00:00 2001 From: daniel Date: Fri, 15 Dec 2023 21:38:07 -0500 Subject: [PATCH 18/29] feat(issue): change code for issue serializer --- apiserver/plane/api/serializers/issue.py | 15 +---- apiserver/plane/app/serializers/issue.py | 79 +++++++++++++++--------- 2 files changed, 51 insertions(+), 43 deletions(-) diff --git a/apiserver/plane/api/serializers/issue.py b/apiserver/plane/api/serializers/issue.py index 62d09335912..ae56c198669 100644 --- a/apiserver/plane/api/serializers/issue.py +++ b/apiserver/plane/api/serializers/issue.py @@ -21,8 +21,6 @@ IssueActivity, ProjectMember, ) -from plane.utils.parse_html import parse_text_to_html, refresh_url_content - from .base import BaseSerializer from .cycle import CycleSerializer, CycleLiteSerializer @@ -31,17 +29,7 @@ from .state import StateLiteSerializer -class BaseIssueSerializerMixin: - def refresh_html_content(self, instance): - html = parse_text_to_html(instance.description_html) - refreshed, html = refresh_url_content(html) - - if refreshed: - instance.description_html = html - instance.save() - - -class IssueSerializer(BaseSerializer, BaseIssueSerializerMixin): +class IssueSerializer(BaseSerializer): assignees = serializers.ListField( child=serializers.PrimaryKeyRelatedField( queryset=User.objects.values_list("id", flat=True) @@ -238,7 +226,6 @@ def update(self, instance, validated_data): return super().update(instance, validated_data) def to_representation(self, instance): - self.refresh_html_content(instance) data = super().to_representation(instance) if "assignees" in self.fields: if "assignees" in self.expand: diff --git a/apiserver/plane/app/serializers/issue.py b/apiserver/plane/app/serializers/issue.py index b13d03e35a4..c040f62819a 100644 --- a/apiserver/plane/app/serializers/issue.py +++ b/apiserver/plane/app/serializers/issue.py @@ -31,9 +31,20 @@ IssueVote, IssueRelation, ) +from plane.utils.parse_html import parse_text_to_html, refresh_url_content -class IssueFlatSerializer(BaseSerializer): +class BaseIssueSerializerMixin: + def refresh_html_content(self, instance): + html = parse_text_to_html(instance.description_html) + refreshed, html = refresh_url_content(html) + + if refreshed: + instance.description_html = html + instance.save() + + +class IssueFlatSerializer(BaseSerializer, BaseIssueSerializerMixin): ## Contain only flat fields class Meta: @@ -51,6 +62,10 @@ class Meta: "is_draft", ] + def to_representation(self, instance): + self.refresh_html_content(instance) + return super().to_representation(instance) + class IssueProjectLiteSerializer(BaseSerializer): project_detail = ProjectLiteSerializer(source="project", read_only=True) @@ -100,8 +115,8 @@ class Meta: def to_representation(self, instance): data = super().to_representation(instance) - data['assignees'] = [str(assignee.id) for assignee in instance.assignees.all()] - data['labels'] = [str(label.id) for label in instance.labels.all()] + data["assignees"] = [str(assignee.id) for assignee in instance.assignees.all()] + data["labels"] = [str(label.id) for label in instance.labels.all()] return data def validate(self, data): @@ -232,7 +247,6 @@ class Meta: fields = "__all__" - class IssuePropertySerializer(BaseSerializer): class Meta: model = IssueProperty @@ -268,7 +282,6 @@ class Meta: class IssueLabelSerializer(BaseSerializer): - class Meta: model = IssueLabel fields = "__all__" @@ -283,30 +296,19 @@ class IssueRelationSerializer(BaseSerializer): class Meta: model = IssueRelation - fields = [ - "issue_detail", - "relation_type", - "related_issue", - "issue", - "id" - ] + fields = ["issue_detail", "relation_type", "related_issue", "issue", "id"] read_only_fields = [ "workspace", "project", ] + class RelatedIssueSerializer(BaseSerializer): issue_detail = IssueProjectLiteSerializer(read_only=True, source="issue") class Meta: model = IssueRelation - fields = [ - "issue_detail", - "relation_type", - "related_issue", - "issue", - "id" - ] + fields = ["issue_detail", "relation_type", "related_issue", "issue", "id"] read_only_fields = [ "workspace", "project", @@ -424,9 +426,8 @@ class Meta: class IssueReactionSerializer(BaseSerializer): - actor_detail = UserLiteSerializer(read_only=True, source="actor") - + class Meta: model = IssueReaction fields = "__all__" @@ -459,7 +460,6 @@ class Meta: class IssueVoteSerializer(BaseSerializer): - actor_detail = UserLiteSerializer(read_only=True, source="actor") class Meta: @@ -506,7 +506,7 @@ class Meta: # Issue Serializer with state details -class IssueStateSerializer(DynamicBaseSerializer): +class IssueStateSerializer(DynamicBaseSerializer, BaseIssueSerializerMixin): label_details = LabelLiteSerializer(read_only=True, source="labels", many=True) state_detail = StateLiteSerializer(read_only=True, source="state") project_detail = ProjectLiteSerializer(read_only=True, source="project") @@ -520,15 +520,23 @@ class Meta: model = Issue fields = "__all__" + def to_representation(self, instance): + self.refresh_html_content(instance) + return super().to_representation(instance) -class IssueSerializer(BaseSerializer): + +class IssueSerializer(BaseSerializer, BaseIssueSerializerMixin): project_detail = ProjectLiteSerializer(read_only=True, source="project") state_detail = StateSerializer(read_only=True, source="state") parent_detail = IssueStateFlatSerializer(read_only=True, source="parent") label_details = LabelSerializer(read_only=True, source="labels", many=True) assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True) - related_issues = IssueRelationSerializer(read_only=True, source="issue_relation", many=True) - issue_relations = RelatedIssueSerializer(read_only=True, source="issue_related", many=True) + related_issues = IssueRelationSerializer( + read_only=True, source="issue_relation", many=True + ) + issue_relations = RelatedIssueSerializer( + read_only=True, source="issue_related", many=True + ) issue_cycle = IssueCycleDetailSerializer(read_only=True) issue_module = IssueModuleDetailSerializer(read_only=True) issue_link = IssueLinkSerializer(read_only=True, many=True) @@ -548,8 +556,12 @@ class Meta: "updated_at", ] + def to_representation(self, instance): + self.refresh_html_content(instance) + return super().to_representation(instance) + -class IssueLiteSerializer(DynamicBaseSerializer): +class IssueLiteSerializer(DynamicBaseSerializer, BaseIssueSerializerMixin): workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") project_detail = ProjectLiteSerializer(read_only=True, source="project") state_detail = StateLiteSerializer(read_only=True, source="state") @@ -577,11 +589,17 @@ class Meta: "updated_at", ] + def to_representation(self, instance): + self.refresh_html_content(instance) + return super().to_representation(instance) + -class IssuePublicSerializer(BaseSerializer): +class IssuePublicSerializer(BaseSerializer, BaseIssueSerializerMixin): project_detail = ProjectLiteSerializer(read_only=True, source="project") state_detail = StateLiteSerializer(read_only=True, source="state") - reactions = IssueReactionSerializer(read_only=True, many=True, source="issue_reactions") + reactions = IssueReactionSerializer( + read_only=True, many=True, source="issue_reactions" + ) votes = IssueVoteSerializer(read_only=True, many=True) class Meta: @@ -603,6 +621,9 @@ class Meta: ] read_only_fields = fields + def to_representation(self, instance): + self.refresh_html_content(instance) + return super().to_representation(instance) class IssueSubscriberSerializer(BaseSerializer): From b7c3d4c49439d7d0bf0dab9421d85dff570ec94b Mon Sep 17 00:00:00 2001 From: daniel Date: Fri, 15 Dec 2023 21:54:16 -0500 Subject: [PATCH 19/29] build(settings): include AWS_S3_SIGNATURE_VERSION, AWS_S3_BUCKET_AUTH and AWS_S3_MAX_AGE_SECONDS to config file --- apiserver/plane/settings/common.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/apiserver/plane/settings/common.py b/apiserver/plane/settings/common.py index 76528176b16..4384500c6b1 100644 --- a/apiserver/plane/settings/common.py +++ b/apiserver/plane/settings/common.py @@ -225,13 +225,17 @@ AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key") AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "secret-key") AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads") -AWS_REGION = os.environ.get("AWS_REGION", "") -AWS_DEFAULT_ACL = "public-read" -AWS_QUERYSTRING_AUTH = False +AWS_REGION = os.environ.get("AWS_REGION", "us-east-2") +AWS_S3_SIGNATURE_VERSION = "s3v4" +AWS_S3_BUCKET_AUTH = True +AWS_QUERYSTRING_AUTH = True +AWS_DEFAULT_ACL = "private" if AWS_S3_BUCKET_AUTH else "public-read" AWS_S3_FILE_OVERWRITE = False +AWS_S3_MAX_AGE_SECONDS = 60 * 60 AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", None) or os.environ.get( "MINIO_ENDPOINT_URL", None ) + if AWS_S3_ENDPOINT_URL: parsed_url = urlparse(os.environ.get("WEB_URL", "http://localhost")) AWS_S3_CUSTOM_DOMAIN = f"{parsed_url.netloc}/{AWS_STORAGE_BUCKET_NAME}" @@ -291,7 +295,9 @@ # Sentry Settings # Enable Sentry Settings -if bool(os.environ.get("SENTRY_DSN", False)) and os.environ.get("SENTRY_DSN").startswith("https://"): +if bool(os.environ.get("SENTRY_DSN", False)) and os.environ.get( + "SENTRY_DSN" +).startswith("https://"): sentry_sdk.init( dsn=os.environ.get("SENTRY_DSN", ""), integrations=[ @@ -321,7 +327,7 @@ ANALYTICS_BASE_API = os.environ.get("ANALYTICS_BASE_API", False) # Use Minio settings -USE_MINIO = int(os.environ.get("USE_MINIO", 0)) == 1 +USE_MINIO = 0 # Posthog settings POSTHOG_API_KEY = os.environ.get("POSTHOG_API_KEY", False) From f9b0053958afeefd5722dc65e4607674aea60240 Mon Sep 17 00:00:00 2001 From: daniel Date: Fri, 15 Dec 2023 21:54:42 -0500 Subject: [PATCH 20/29] feat(issue): include validation if bucket is private --- apiserver/plane/app/serializers/issue.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/apiserver/plane/app/serializers/issue.py b/apiserver/plane/app/serializers/issue.py index c040f62819a..c7390b1ba66 100644 --- a/apiserver/plane/app/serializers/issue.py +++ b/apiserver/plane/app/serializers/issue.py @@ -1,5 +1,6 @@ # Django imports from django.utils import timezone +from django.conf import settings # Third Party imports from rest_framework import serializers @@ -36,12 +37,13 @@ class BaseIssueSerializerMixin: def refresh_html_content(self, instance): - html = parse_text_to_html(instance.description_html) - refreshed, html = refresh_url_content(html) + if settings.AWS_S3_BUCKET_AUTH: + html = parse_text_to_html(instance.description_html) + refreshed, html = refresh_url_content(html) - if refreshed: - instance.description_html = html - instance.save() + if refreshed: + instance.description_html = html + instance.save() class IssueFlatSerializer(BaseSerializer, BaseIssueSerializerMixin): From c09ca64f45e369e1543363e0343840fff44bc989 Mon Sep 17 00:00:00 2001 From: daniel Date: Fri, 15 Dec 2023 21:54:49 -0500 Subject: [PATCH 21/29] feat(project): include validation if bucket is private --- apiserver/plane/app/serializers/project.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/apiserver/plane/app/serializers/project.py b/apiserver/plane/app/serializers/project.py index 66e94e712d0..6eb32c566ae 100644 --- a/apiserver/plane/app/serializers/project.py +++ b/apiserver/plane/app/serializers/project.py @@ -1,3 +1,6 @@ +# Django imports +from django.conf import settings + # Third party imports from rest_framework import serializers @@ -19,12 +22,13 @@ class BaseProjectSerializerMixin: def refresh_cover_image(self, instance): - cover_image = instance.cover_image + if settings.AWS_S3_BUCKET_AUTH: + cover_image = instance.cover_image - if S3.verify_s3_url(cover_image) and S3.url_file_has_expired(cover_image): - s3 = S3() - instance.cover_image = s3.refresh_url(cover_image) - instance.save() + if S3.verify_s3_url(cover_image) and S3.url_file_has_expired(cover_image): + s3 = S3() + instance.cover_image = s3.refresh_url(cover_image) + instance.save() class ProjectSerializer(BaseSerializer, BaseProjectSerializerMixin): From 64904f6563ee6ed6a76acad9b33d6eeffb78b217 Mon Sep 17 00:00:00 2001 From: daniel Date: Fri, 15 Dec 2023 22:02:01 -0500 Subject: [PATCH 22/29] feat: delete unused imports --- apiserver/plane/bgtasks/export_task.py | 2 -- apiserver/plane/utils/parse_html.py | 4 ---- 2 files changed, 6 deletions(-) diff --git a/apiserver/plane/bgtasks/export_task.py b/apiserver/plane/bgtasks/export_task.py index 8ef406627b2..a7ddd41c60f 100644 --- a/apiserver/plane/bgtasks/export_task.py +++ b/apiserver/plane/bgtasks/export_task.py @@ -2,7 +2,6 @@ import csv import io import json -import boto3 import zipfile # Django imports @@ -12,7 +11,6 @@ # Third party imports from celery import shared_task from sentry_sdk import capture_exception -from botocore.client import Config from openpyxl import Workbook # Module imports diff --git a/apiserver/plane/utils/parse_html.py b/apiserver/plane/utils/parse_html.py index 1c266b6880e..b6fcda11fd9 100644 --- a/apiserver/plane/utils/parse_html.py +++ b/apiserver/plane/utils/parse_html.py @@ -1,8 +1,4 @@ from bs4 import BeautifulSoup -from datetime import datetime, timezone -from urllib.parse import urlparse, parse_qs - -from django.conf import settings from plane.utils.s3 import S3 From ebdde98296940bdea26a587f035583cd28cbb81d Mon Sep 17 00:00:00 2001 From: daniel Date: Fri, 15 Dec 2023 22:10:30 -0500 Subject: [PATCH 23/29] docs: update docs --- apiserver/plane/app/serializers/issue.py | 2 ++ apiserver/plane/app/serializers/project.py | 2 ++ apiserver/plane/utils/s3.py | 2 ++ 3 files changed, 6 insertions(+) diff --git a/apiserver/plane/app/serializers/issue.py b/apiserver/plane/app/serializers/issue.py index c7390b1ba66..0753bbf68cc 100644 --- a/apiserver/plane/app/serializers/issue.py +++ b/apiserver/plane/app/serializers/issue.py @@ -36,6 +36,8 @@ class BaseIssueSerializerMixin: + """abstract class for refresh s3 link in description htlm images""" + def refresh_html_content(self, instance): if settings.AWS_S3_BUCKET_AUTH: html = parse_text_to_html(instance.description_html) diff --git a/apiserver/plane/app/serializers/project.py b/apiserver/plane/app/serializers/project.py index 6eb32c566ae..e4cfff79791 100644 --- a/apiserver/plane/app/serializers/project.py +++ b/apiserver/plane/app/serializers/project.py @@ -21,6 +21,8 @@ class BaseProjectSerializerMixin: + """abstract class for refresh cover image s3 link""" + def refresh_cover_image(self, instance): if settings.AWS_S3_BUCKET_AUTH: cover_image = instance.cover_image diff --git a/apiserver/plane/utils/s3.py b/apiserver/plane/utils/s3.py index 82742855f98..985b4606e6e 100644 --- a/apiserver/plane/utils/s3.py +++ b/apiserver/plane/utils/s3.py @@ -8,6 +8,8 @@ class S3: + """class for manage s3 operations (upload, delete, refresh url file)""" + def __init__(self): if settings.USE_MINIO: self.client = boto3.client( From 5c62db3506c2845d56bad501408b35feb2ac7bfd Mon Sep 17 00:00:00 2001 From: daniel Date: Fri, 15 Dec 2023 22:16:53 -0500 Subject: [PATCH 24/29] build: set AWS_S3_BUCKET_AUTH default to false --- apiserver/plane/settings/common.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apiserver/plane/settings/common.py b/apiserver/plane/settings/common.py index 4384500c6b1..9fb0f3d774e 100644 --- a/apiserver/plane/settings/common.py +++ b/apiserver/plane/settings/common.py @@ -227,8 +227,8 @@ AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads") AWS_REGION = os.environ.get("AWS_REGION", "us-east-2") AWS_S3_SIGNATURE_VERSION = "s3v4" -AWS_S3_BUCKET_AUTH = True -AWS_QUERYSTRING_AUTH = True +AWS_S3_BUCKET_AUTH = False +AWS_QUERYSTRING_AUTH = False AWS_DEFAULT_ACL = "private" if AWS_S3_BUCKET_AUTH else "public-read" AWS_S3_FILE_OVERWRITE = False AWS_S3_MAX_AGE_SECONDS = 60 * 60 From 583fd14553bdcf115a8a9f150bedc42185a63ad3 Mon Sep 17 00:00:00 2001 From: daniel Date: Fri, 15 Dec 2023 23:36:52 -0500 Subject: [PATCH 25/29] feat: include minio info --- apiserver/plane/bgtasks/export_task.py | 4 +--- apiserver/plane/bgtasks/exporter_expired_task.py | 5 +---- apiserver/plane/settings/common.py | 2 +- 3 files changed, 3 insertions(+), 8 deletions(-) diff --git a/apiserver/plane/bgtasks/export_task.py b/apiserver/plane/bgtasks/export_task.py index a7ddd41c60f..995e0445159 100644 --- a/apiserver/plane/bgtasks/export_task.py +++ b/apiserver/plane/bgtasks/export_task.py @@ -73,9 +73,7 @@ def upload_to_s3(zip_file, workspace_id, token_id, slug): s3.upload_file( zip_file, - settings.AWS_STORAGE_BUCKET_NAME - if settings.USE_MINIO - else settings.AWS_S3_BUCKET_NAME, + settings.AWS_STORAGE_BUCKET_NAME, file_name, "public-read", "application/zip", diff --git a/apiserver/plane/bgtasks/exporter_expired_task.py b/apiserver/plane/bgtasks/exporter_expired_task.py index d8fe27d6f5d..360ecb7cdd7 100644 --- a/apiserver/plane/bgtasks/exporter_expired_task.py +++ b/apiserver/plane/bgtasks/exporter_expired_task.py @@ -25,9 +25,6 @@ def delete_old_s3_link(): for file_name, exporter_id in expired_exporter_history: # Delete object from S3 if file_name: - if settings.USE_MINIO: - s3.delete_file(settings.AWS_STORAGE_BUCKET_NAME, file_name) - else: - s3.delete_file(settings.AWS_S3_BUCKET_NAME, file_name) + s3.delete_file(settings.AWS_STORAGE_BUCKET_NAME, file_name) ExporterHistory.objects.filter(id=exporter_id).update(url=None) diff --git a/apiserver/plane/settings/common.py b/apiserver/plane/settings/common.py index 9fb0f3d774e..86a291839f9 100644 --- a/apiserver/plane/settings/common.py +++ b/apiserver/plane/settings/common.py @@ -327,7 +327,7 @@ ANALYTICS_BASE_API = os.environ.get("ANALYTICS_BASE_API", False) # Use Minio settings -USE_MINIO = 0 +USE_MINIO = int(os.environ.get("USE_MINIO", 0)) == 1 # Posthog settings POSTHOG_API_KEY = os.environ.get("POSTHOG_API_KEY", False) From 69d031653fbfeb20b381aa18bffee16d3db5ecf6 Mon Sep 17 00:00:00 2001 From: Daniel Alba Date: Wed, 17 Jan 2024 23:40:14 -0500 Subject: [PATCH 26/29] build: include s3 info variables in .env file --- ENV_SETUP.md | 4 ++++ apiserver/.env.example | 2 ++ apiserver/plane/settings/common.py | 9 +++++---- deploy/coolify/coolify-docker-compose.yml | 6 ++++++ deploy/selfhost/variables.env | 2 ++ 5 files changed, 19 insertions(+), 4 deletions(-) diff --git a/ENV_SETUP.md b/ENV_SETUP.md index bfc30019624..8695583c12e 100644 --- a/ENV_SETUP.md +++ b/ENV_SETUP.md @@ -27,6 +27,8 @@ AWS_SECRET_ACCESS_KEY="secret-key" AWS_S3_ENDPOINT_URL="http://plane-minio:9000" # Changing this requires change in the nginx.conf for uploads if using minio setup AWS_S3_BUCKET_NAME="uploads" +AWS_S3_BUCKET_AUTH=False +AWS_QUERYSTRING_AUTH=False # Maximum file upload limit FILE_SIZE_LIMIT=5242880 ​ @@ -94,6 +96,8 @@ AWS_SECRET_ACCESS_KEY="secret-key" AWS_S3_ENDPOINT_URL="http://plane-minio:9000" # Changing this requires change in the nginx.conf for uploads if using minio setup AWS_S3_BUCKET_NAME="uploads" +AWS_S3_BUCKET_AUTH=False +AWS_QUERYSTRING_AUTH=False # Maximum file upload limit FILE_SIZE_LIMIT=5242880 ​ diff --git a/apiserver/.env.example b/apiserver/.env.example index 37178b39809..949802e83e7 100644 --- a/apiserver/.env.example +++ b/apiserver/.env.example @@ -31,6 +31,8 @@ AWS_SECRET_ACCESS_KEY="secret-key" AWS_S3_ENDPOINT_URL="http://plane-minio:9000" # Changing this requires change in the nginx.conf for uploads if using minio setup AWS_S3_BUCKET_NAME="uploads" +AWS_S3_BUCKET_AUTH=False +AWS_QUERYSTRING_AUTH=False # Maximum file upload limit FILE_SIZE_LIMIT=5242880 diff --git a/apiserver/plane/settings/common.py b/apiserver/plane/settings/common.py index 86a291839f9..cbf3316c71b 100644 --- a/apiserver/plane/settings/common.py +++ b/apiserver/plane/settings/common.py @@ -225,12 +225,13 @@ AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key") AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "secret-key") AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads") -AWS_REGION = os.environ.get("AWS_REGION", "us-east-2") +AWS_REGION = os.environ.get("AWS_REGION") +AWS_S3_REGION_NAME = os.environ.get("AWS_REGION") AWS_S3_SIGNATURE_VERSION = "s3v4" -AWS_S3_BUCKET_AUTH = False -AWS_QUERYSTRING_AUTH = False +AWS_S3_BUCKET_AUTH = os.environ.get("AWS_S3_BUCKET_AUTH", False) +AWS_QUERYSTRING_AUTH = os.environ.get("AWS_QUERYSTRING_AUTH", False) AWS_DEFAULT_ACL = "private" if AWS_S3_BUCKET_AUTH else "public-read" -AWS_S3_FILE_OVERWRITE = False +AWS_S3_FILE_OVERWRITE = os.environ.get("AWS_S3_FILE_OVERWRITE", False) AWS_S3_MAX_AGE_SECONDS = 60 * 60 AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", None) or os.environ.get( "MINIO_ENDPOINT_URL", None diff --git a/deploy/coolify/coolify-docker-compose.yml b/deploy/coolify/coolify-docker-compose.yml index 58e00a7a715..fd517b78bc9 100644 --- a/deploy/coolify/coolify-docker-compose.yml +++ b/deploy/coolify/coolify-docker-compose.yml @@ -54,6 +54,8 @@ services: - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-secret-key} - AWS_S3_ENDPOINT_URL=${AWS_S3_ENDPOINT_URL:-http://plane-minio:9000} - AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads} + - AWS_S3_BUCKET_AUTH=${AWS_S3_BUCKET_AUTH:-False} + - AWS_QUERYSTRING_AUTH=${AWS_QUERYSTRING_AUTH:-False} - FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880} - OPENAI_API_BASE=${OPENAI_API_BASE:-https://api.openai.com/v1} - OPENAI_API_KEY=${OPENAI_API_KEY:-sk-} @@ -102,6 +104,8 @@ services: - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-secret-key} - AWS_S3_ENDPOINT_URL=${AWS_S3_ENDPOINT_URL:-http://plane-minio:9000} - AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads} + - AWS_S3_BUCKET_AUTH=${AWS_S3_BUCKET_AUTH:-False} + - AWS_QUERYSTRING_AUTH=${AWS_QUERYSTRING_AUTH:-False} - FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880} - OPENAI_API_BASE=${OPENAI_API_BASE:-https://api.openai.com/v1} - OPENAI_API_KEY=${OPENAI_API_KEY:-sk-} @@ -148,6 +152,8 @@ services: - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-secret-key} - AWS_S3_ENDPOINT_URL=${AWS_S3_ENDPOINT_URL:-http://plane-minio:9000} - AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads} + - AWS_S3_BUCKET_AUTH=${AWS_S3_BUCKET_AUTH:-False} + - AWS_QUERYSTRING_AUTH=${AWS_QUERYSTRING_AUTH:-False} - FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880} - OPENAI_API_BASE=${OPENAI_API_BASE:-https://api.openai.com/v1} - OPENAI_API_KEY=${OPENAI_API_KEY:-sk-} diff --git a/deploy/selfhost/variables.env b/deploy/selfhost/variables.env index 4a378181154..7df39979ffe 100644 --- a/deploy/selfhost/variables.env +++ b/deploy/selfhost/variables.env @@ -57,6 +57,8 @@ AWS_ACCESS_KEY_ID="access-key" AWS_SECRET_ACCESS_KEY="secret-key" AWS_S3_ENDPOINT_URL=http://plane-minio:9000 AWS_S3_BUCKET_NAME=uploads +AWS_S3_BUCKET_AUTH=False +AWS_QUERYSTRING_AUTH=False MINIO_ROOT_USER="access-key" MINIO_ROOT_PASSWORD="secret-key" BUCKET_NAME=uploads From f3c2314b6a97085926654714db6b9590f34634cf Mon Sep 17 00:00:00 2001 From: Daniel Alba Date: Wed, 17 Jan 2024 23:40:38 -0500 Subject: [PATCH 27/29] build: include s3 info variables in .env file --- deploy/selfhost/docker-compose.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/deploy/selfhost/docker-compose.yml b/deploy/selfhost/docker-compose.yml index 8b4ff77ef02..e2fcec17481 100644 --- a/deploy/selfhost/docker-compose.yml +++ b/deploy/selfhost/docker-compose.yml @@ -55,6 +55,8 @@ x-app-env : &app-env - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-"secret-key"} - AWS_S3_ENDPOINT_URL=${AWS_S3_ENDPOINT_URL:-http://plane-minio:9000} - AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads} + - AWS_S3_BUCKET_AUTH=${AWS_S3_BUCKET_AUTH:-False} + - AWS_QUERYSTRING_AUTH=${AWS_QUERYSTRING_AUTH:-False} - MINIO_ROOT_USER=${MINIO_ROOT_USER:-"access-key"} - MINIO_ROOT_PASSWORD=${MINIO_ROOT_PASSWORD:-"secret-key"} - BUCKET_NAME=${BUCKET_NAME:-uploads} From 4ff86818407a6f9806bc0163cb2a7701c9e20870 Mon Sep 17 00:00:00 2001 From: Daniel Alba Date: Thu, 18 Jan 2024 00:40:34 -0500 Subject: [PATCH 28/29] feat(s3): include refresh_url function in comment serializer --- apiserver/plane/app/serializers/issue.py | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/apiserver/plane/app/serializers/issue.py b/apiserver/plane/app/serializers/issue.py index 0753bbf68cc..357f5baca83 100644 --- a/apiserver/plane/app/serializers/issue.py +++ b/apiserver/plane/app/serializers/issue.py @@ -38,13 +38,13 @@ class BaseIssueSerializerMixin: """abstract class for refresh s3 link in description htlm images""" - def refresh_html_content(self, instance): + def refresh_html_content(self, instance, html, html_field_name="description_html"): if settings.AWS_S3_BUCKET_AUTH: - html = parse_text_to_html(instance.description_html) + html = parse_text_to_html(html) refreshed, html = refresh_url_content(html) if refreshed: - instance.description_html = html + setattr(instance, html_field_name, html) instance.save() @@ -67,7 +67,7 @@ class Meta: ] def to_representation(self, instance): - self.refresh_html_content(instance) + self.refresh_html_content(instance, instance.description_html) return super().to_representation(instance) @@ -472,7 +472,7 @@ class Meta: read_only_fields = fields -class IssueCommentSerializer(BaseSerializer): +class IssueCommentSerializer(BaseSerializer, BaseIssueSerializerMixin): actor_detail = UserLiteSerializer(read_only=True, source="actor") issue_detail = IssueFlatSerializer(read_only=True, source="issue") project_detail = ProjectLiteSerializer(read_only=True, source="project") @@ -493,6 +493,10 @@ class Meta: "updated_at", ] + def to_representation(self, instance): + self.refresh_html_content(instance, instance.comment_html, "comment_html") + return super().to_representation(instance) + class IssueStateFlatSerializer(BaseSerializer): state_detail = StateLiteSerializer(read_only=True, source="state") @@ -525,7 +529,7 @@ class Meta: fields = "__all__" def to_representation(self, instance): - self.refresh_html_content(instance) + self.refresh_html_content(instance, instance.description_html) return super().to_representation(instance) @@ -561,7 +565,7 @@ class Meta: ] def to_representation(self, instance): - self.refresh_html_content(instance) + self.refresh_html_content(instance, instance.description_html) return super().to_representation(instance) @@ -594,7 +598,7 @@ class Meta: ] def to_representation(self, instance): - self.refresh_html_content(instance) + self.refresh_html_content(instance, instance.description_html) return super().to_representation(instance) @@ -626,7 +630,7 @@ class Meta: read_only_fields = fields def to_representation(self, instance): - self.refresh_html_content(instance) + self.refresh_html_content(instance, instance.description_html) return super().to_representation(instance) From c12acd3a176ac11fc06f9f7de5422e57f3557f5d Mon Sep 17 00:00:00 2001 From: Daniel Alba Date: Thu, 18 Jan 2024 00:43:42 -0500 Subject: [PATCH 29/29] feat(s3): add verify for s3 url --- apiserver/plane/utils/parse_html.py | 2 +- apiserver/plane/utils/s3.py | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/apiserver/plane/utils/parse_html.py b/apiserver/plane/utils/parse_html.py index b6fcda11fd9..43af0cc3f09 100644 --- a/apiserver/plane/utils/parse_html.py +++ b/apiserver/plane/utils/parse_html.py @@ -14,7 +14,7 @@ def refresh_url_content(html): for img_tag in html.find_all("img"): old_src = img_tag["src"] - if S3.url_file_has_expired(old_src): + if S3.verify_s3_url(old_src) and S3.url_file_has_expired(old_src): new_url = s3.refresh_url(old_src) img_tag["src"] = new_url refreshed = True diff --git a/apiserver/plane/utils/s3.py b/apiserver/plane/utils/s3.py index 985b4606e6e..733885aaf09 100644 --- a/apiserver/plane/utils/s3.py +++ b/apiserver/plane/utils/s3.py @@ -58,8 +58,10 @@ def delete_file(self, bucket_name, path): @staticmethod def verify_s3_url(url): - pattern = re.compile(r"amazonaws\.com") - return pattern.search(url) + if url: + pattern = re.compile(r"amazonaws\.com") + return pattern.search(url) + return False @staticmethod def url_file_has_expired(url, date_format="%Y%m%dT%H%M%SZ"):