Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
2c6aa52
chore: added issue relations in issue listing
NarayanBavisetti Sep 20, 2024
e098017
Merge branch 'preview' of github.com:makeplane/plane into chore/issue…
NarayanBavisetti Sep 25, 2024
04fb798
chore: added pagination for issue detail endpoint
NarayanBavisetti Sep 25, 2024
dc5b548
Merge branch 'preview' of github.com:makeplane/plane into chore/issue…
NarayanBavisetti Oct 14, 2024
1d79941
chore: bulk date update endpoint
NarayanBavisetti Oct 14, 2024
dadb2bd
chore: appended the target date
NarayanBavisetti Oct 16, 2024
720c546
Merge branch 'preview' of github.com:makeplane/plane into chore/issue…
NarayanBavisetti Oct 20, 2024
a765b95
chore: issue relation new types defined
NarayanBavisetti Oct 20, 2024
0895430
Merge branch 'preview' of github.com:makeplane/plane into chore/issue…
NarayanBavisetti Oct 23, 2024
47b7604
fix: order by and issue filters
NarayanBavisetti Oct 23, 2024
6dad58f
fix: passed order by in pagination
NarayanBavisetti Oct 23, 2024
ce773cb
Merge branch 'preview' of github.com:makeplane/plane into chore/issue…
rahulramesha Oct 24, 2024
5a33cae
Merge branch 'preview' of github.com:makeplane/plane into chore/issue…
rahulramesha Oct 25, 2024
07de992
Merge branch 'preview' of github.com:makeplane/plane into chore/issue…
NarayanBavisetti Oct 25, 2024
c2e8e1b
chore: changed the key for issue dates
NarayanBavisetti Oct 25, 2024
4ca5af4
Merge branch 'chore/issue-listing-relations' of github.com:makeplane/…
rahulramesha Oct 25, 2024
8597a00
Revamp Timeline Layout
rahulramesha Oct 25, 2024
6edb3e2
fix block dragging
rahulramesha Oct 25, 2024
096bdab
minor ui fixes
rahulramesha Oct 25, 2024
c24f422
improve auto scroll UX
rahulramesha Oct 25, 2024
24db506
remove unused import
rahulramesha Oct 25, 2024
795e380
fix timeline layout heights
rahulramesha Oct 27, 2024
351a2a8
modify base timeline store
rahulramesha Oct 27, 2024
bea7602
Segregate issue relation types
rahulramesha Oct 27, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
82 changes: 44 additions & 38 deletions apiserver/plane/app/serializers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,44 +49,47 @@ def _filter_fields(self, fields):
allowed.append(list(item.keys())[0])

for field in allowed:
from . import (
WorkspaceLiteSerializer,
ProjectLiteSerializer,
UserLiteSerializer,
StateLiteSerializer,
IssueSerializer,
LabelSerializer,
CycleIssueSerializer,
IssueLiteSerializer,
IssueRelationSerializer,
InboxIssueLiteSerializer,
IssueReactionLiteSerializer,
IssueLinkLiteSerializer,
)

# Expansion mapper
expansion = {
"user": UserLiteSerializer,
"workspace": WorkspaceLiteSerializer,
"project": ProjectLiteSerializer,
"default_assignee": UserLiteSerializer,
"project_lead": UserLiteSerializer,
"state": StateLiteSerializer,
"created_by": UserLiteSerializer,
"issue": IssueSerializer,
"actor": UserLiteSerializer,
"owned_by": UserLiteSerializer,
"members": UserLiteSerializer,
"assignees": UserLiteSerializer,
"labels": LabelSerializer,
"issue_cycle": CycleIssueSerializer,
"parent": IssueLiteSerializer,
"issue_relation": IssueRelationSerializer,
"issue_inbox": InboxIssueLiteSerializer,
"issue_reactions": IssueReactionLiteSerializer,
"issue_link": IssueLinkLiteSerializer,
"sub_issues": IssueLiteSerializer,
}
if field not in self.fields:
from . import (
WorkspaceLiteSerializer,
ProjectLiteSerializer,
UserLiteSerializer,
StateLiteSerializer,
IssueSerializer,
LabelSerializer,
CycleIssueSerializer,
IssueLiteSerializer,
IssueRelationSerializer,
InboxIssueLiteSerializer,
IssueReactionLiteSerializer,
IssueLinkLiteSerializer,
RelatedIssueSerializer,
)
Comment on lines +52 to +67
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Consider refactoring imports for better maintainability.

The current implementation uses lazy loading to prevent circular dependencies, which is good. However, consider extracting these serializer imports into a separate module (e.g., serializer_registry.py) to improve maintainability and reduce duplication, as these imports are repeated in the to_representation method.

Example refactor:

# serializer_registry.py
from functools import lru_cache

@lru_cache(maxsize=None)
def get_serializer_map():
    from . import (
        WorkspaceLiteSerializer,
        ProjectLiteSerializer,
        # ... other imports
    )
    return {
        "workspace": WorkspaceLiteSerializer,
        # ... other mappings
    }

Then in this file:

from .serializer_registry import get_serializer_map

# Use get_serializer_map() when needed


# Expansion mapper
expansion = {
"user": UserLiteSerializer,
"workspace": WorkspaceLiteSerializer,
"project": ProjectLiteSerializer,
"default_assignee": UserLiteSerializer,
"project_lead": UserLiteSerializer,
"state": StateLiteSerializer,
"created_by": UserLiteSerializer,
"issue": IssueSerializer,
"actor": UserLiteSerializer,
"owned_by": UserLiteSerializer,
"members": UserLiteSerializer,
"assignees": UserLiteSerializer,
"labels": LabelSerializer,
"issue_cycle": CycleIssueSerializer,
"parent": IssueLiteSerializer,
"issue_relation": IssueRelationSerializer,
"issue_related": RelatedIssueSerializer,
"issue_inbox": InboxIssueLiteSerializer,
"issue_reactions": IssueReactionLiteSerializer,
"issue_link": IssueLinkLiteSerializer,
"sub_issues": IssueLiteSerializer,
}
Comment on lines +69 to +92
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Remove duplication of expansion mapper dictionary.

The expansion mapper dictionary is duplicated in both _filter_fields and to_representation methods. This violates the DRY principle and could lead to maintenance issues.

Consider making it a class attribute or using the previously suggested serializer_registry.py:

class DynamicBaseSerializer(BaseSerializer):
    EXPANSION_MAPPER = {
        "user": "UserLiteSerializer",
        "workspace": "WorkspaceLiteSerializer",
        # ... other mappings
    }

    def _get_expansion_serializer(self, key):
        serializer_map = get_serializer_map()
        return serializer_map.get(key)


if field not in self.fields and field in expansion:
self.fields[field] = expansion[field](
Expand All @@ -104,6 +107,7 @@ def _filter_fields(self, fields):
"issue_attachment",
"issue_link",
"sub_issues",
"issue_related",
]
else False
)
Expand Down Expand Up @@ -133,6 +137,7 @@ def to_representation(self, instance):
IssueReactionLiteSerializer,
IssueAttachmentLiteSerializer,
IssueLinkLiteSerializer,
RelatedIssueSerializer,
)

# Expansion mapper
Expand All @@ -153,6 +158,7 @@ def to_representation(self, instance):
"issue_cycle": CycleIssueSerializer,
"parent": IssueLiteSerializer,
"issue_relation": IssueRelationSerializer,
"issue_related": RelatedIssueSerializer,
"issue_inbox": InboxIssueLiteSerializer,
"issue_reactions": IssueReactionLiteSerializer,
"issue_attachment": IssueAttachmentLiteSerializer,
Expand Down
13 changes: 13 additions & 0 deletions apiserver/plane/app/urls/issue.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,9 @@
BulkArchiveIssuesEndpoint,
DeletedIssuesListViewSet,
IssuePaginatedViewSet,
IssueDetailEndpoint,
IssueAttachmentV2Endpoint,
IssueBulkUpdateDateEndpoint,
)

urlpatterns = [
Expand All @@ -40,6 +42,12 @@
),
name="project-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues-detail/",
IssueDetailEndpoint.as_view(),
name="project-issue-detail",
),
# updated v1 paginated issues
# updated v2 paginated issues
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/v2/issues/",
Expand Down Expand Up @@ -307,4 +315,9 @@
DeletedIssuesListViewSet.as_view(),
name="deleted-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-dates/",
IssueBulkUpdateDateEndpoint.as_view(),
name="project-issue-dates",
),
]
2 changes: 2 additions & 0 deletions apiserver/plane/app/views/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,8 @@
BulkDeleteIssuesEndpoint,
DeletedIssuesListViewSet,
IssuePaginatedViewSet,
IssueDetailEndpoint,
IssueBulkUpdateDateEndpoint,
)

from .issue.activity import (
Expand Down
191 changes: 191 additions & 0 deletions apiserver/plane/app/views/issue/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -976,3 +976,194 @@ def list(self, request, slug, project_id):
)

return Response(paginated_data, status=status.HTTP_200_OK)


class IssueDetailEndpoint(BaseAPIView):
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
def get(self, request, slug, project_id):
filters = issue_filters(request.query_params, "GET")
issue = (
Issue.issue_objects.filter(
workspace__slug=slug, project_id=project_id
)
.select_related("workspace", "project", "state", "parent")
.prefetch_related("assignees", "labels", "issue_module__module")
.annotate(
cycle_id=Subquery(
CycleIssue.objects.filter(
issue=OuterRef("id"), deleted_at__isnull=True
).values("cycle_id")[:1]
)
)
.annotate(
label_ids=Coalesce(
ArrayAgg(
"labels__id",
distinct=True,
filter=Q(
~Q(labels__id__isnull=True)
& Q(label_issue__deleted_at__isnull=True),
),
),
Value([], output_field=ArrayField(UUIDField())),
),
assignee_ids=Coalesce(
ArrayAgg(
"assignees__id",
distinct=True,
filter=Q(
~Q(assignees__id__isnull=True)
& Q(assignees__member_project__is_active=True)
& Q(issue_assignee__deleted_at__isnull=True)
),
),
Value([], output_field=ArrayField(UUIDField())),
),
module_ids=Coalesce(
ArrayAgg(
"issue_module__module_id",
distinct=True,
filter=Q(
~Q(issue_module__module_id__isnull=True)
& Q(issue_module__module__archived_at__isnull=True)
& Q(issue_module__deleted_at__isnull=True)
),
),
Value([], output_field=ArrayField(UUIDField())),
),
)
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
attachment_count=FileAsset.objects.filter(
issue_id=OuterRef("id"),
entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
)
issue = issue.filter(**filters)
order_by_param = request.GET.get("order_by", "-created_at")
# Issue queryset
issue, order_by_param = order_issue_queryset(
issue_queryset=issue,
order_by_param=order_by_param,
)
return self.paginate(
request=request,
order_by=order_by_param,
queryset=(issue),
on_results=lambda issue: IssueSerializer(
issue,
many=True,
fields=self.fields,
expand=self.expand,
).data,
)


class IssueBulkUpdateDateEndpoint(BaseAPIView):

def validate_dates(
self, current_start, current_target, new_start, new_target
):
"""
Validate that start date is before target date.
"""
start = new_start or current_start
target = new_target or current_target

if start and target and start > target:
return False
return True

@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
def post(self, request, slug, project_id):

updates = request.data.get("updates", [])

issue_ids = [update["id"] for update in updates]
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue

Add validation for the presence of the "id" key in updates

The code assumes that every update in updates contains an "id" key. If an update is missing this key, a KeyError will be raised. To prevent this, consider adding validation to handle missing "id" keys gracefully.

Apply this diff to fix the issue:

- issue_ids = [update["id"] for update in updates]
+ issue_ids = [update["id"] for update in updates if "id" in update]

...

- issue_id = update["id"]
+ issue_id = update.get("id")
+ if not issue_id:
+     continue  # Skip or handle the error appropriately

Also applies to: 1108-1108

epoch = int(timezone.now().timestamp())

# Fetch all relevant issues in a single query
issues = list(Issue.objects.filter(id__in=issue_ids))
issues_dict = {str(issue.id): issue for issue in issues}
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue

Ensure consistent types for issue IDs when accessing issues_dict

Currently, issues_dict uses string representations of issue IDs as keys (str(issue.id)), but issue_id from update["id"] may be a UUID object. This type mismatch can lead to KeyError when retrieving issues from the dictionary. Ensure that both the keys and the lookup use consistent types.

Apply this diff to fix the issue:

- issues_dict = {str(issue.id): issue for issue in issues}
+ issues_dict = {issue.id: issue for issue in issues}

Also applies to: 1110-1110

issues_to_update = []

for update in updates:
issue_id = update["id"]
issue = issues_dict.get(issue_id)

if not issue:
continue

start_date = update.get("start_date")
target_date = update.get("target_date")
validate_dates = self.validate_dates(
issue.start_date, issue.target_date, start_date, target_date
)
if not validate_dates:
return Response(
{
"message": "Start date cannot exceed target date",
},
status=status.HTTP_400_BAD_REQUEST,
)

if start_date:
issue_activity.delay(
type="issue.activity.updated",
requested_data=json.dumps(
{"start_date": update.get("start_date")}
),
current_instance=json.dumps(
{"start_date": str(issue.start_date)}
),
issue_id=str(issue_id),
actor_id=str(request.user.id),
project_id=str(project_id),
epoch=epoch,
)
issue.start_date = start_date
issues_to_update.append(issue)

if target_date:
issue_activity.delay(
type="issue.activity.updated",
requested_data=json.dumps(
{"target_date": update.get("target_date")}
),
current_instance=json.dumps(
{"target_date": str(issue.target_date)}
),
issue_id=str(issue_id),
actor_id=str(request.user.id),
project_id=str(project_id),
epoch=epoch,
)
issue.target_date = target_date
issues_to_update.append(issue)

# Bulk update issues
Issue.objects.bulk_update(
issues_to_update, ["start_date", "target_date"]
)

return Response(
{"message": "Issues updated successfully"},
status=status.HTTP_200_OK,
)
Loading