-
Notifications
You must be signed in to change notification settings - Fork 3.6k
[WEB-2442] feat: Revamp Timeline Layout #5915
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
2c6aa52
e098017
04fb798
dc5b548
1d79941
dadb2bd
720c546
a765b95
0895430
47b7604
6dad58f
ce773cb
5a33cae
07de992
c2e8e1b
4ca5af4
8597a00
6edb3e2
096bdab
c24f422
24db506
795e380
351a2a8
bea7602
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -49,44 +49,47 @@ def _filter_fields(self, fields): | |
| allowed.append(list(item.keys())[0]) | ||
|
|
||
| for field in allowed: | ||
| from . import ( | ||
| WorkspaceLiteSerializer, | ||
| ProjectLiteSerializer, | ||
| UserLiteSerializer, | ||
| StateLiteSerializer, | ||
| IssueSerializer, | ||
| LabelSerializer, | ||
| CycleIssueSerializer, | ||
| IssueLiteSerializer, | ||
| IssueRelationSerializer, | ||
| InboxIssueLiteSerializer, | ||
| IssueReactionLiteSerializer, | ||
| IssueLinkLiteSerializer, | ||
| ) | ||
|
|
||
| # Expansion mapper | ||
| expansion = { | ||
| "user": UserLiteSerializer, | ||
| "workspace": WorkspaceLiteSerializer, | ||
| "project": ProjectLiteSerializer, | ||
| "default_assignee": UserLiteSerializer, | ||
| "project_lead": UserLiteSerializer, | ||
| "state": StateLiteSerializer, | ||
| "created_by": UserLiteSerializer, | ||
| "issue": IssueSerializer, | ||
| "actor": UserLiteSerializer, | ||
| "owned_by": UserLiteSerializer, | ||
| "members": UserLiteSerializer, | ||
| "assignees": UserLiteSerializer, | ||
| "labels": LabelSerializer, | ||
| "issue_cycle": CycleIssueSerializer, | ||
| "parent": IssueLiteSerializer, | ||
| "issue_relation": IssueRelationSerializer, | ||
| "issue_inbox": InboxIssueLiteSerializer, | ||
| "issue_reactions": IssueReactionLiteSerializer, | ||
| "issue_link": IssueLinkLiteSerializer, | ||
| "sub_issues": IssueLiteSerializer, | ||
| } | ||
| if field not in self.fields: | ||
| from . import ( | ||
| WorkspaceLiteSerializer, | ||
| ProjectLiteSerializer, | ||
| UserLiteSerializer, | ||
| StateLiteSerializer, | ||
| IssueSerializer, | ||
| LabelSerializer, | ||
| CycleIssueSerializer, | ||
| IssueLiteSerializer, | ||
| IssueRelationSerializer, | ||
| InboxIssueLiteSerializer, | ||
| IssueReactionLiteSerializer, | ||
| IssueLinkLiteSerializer, | ||
| RelatedIssueSerializer, | ||
| ) | ||
|
|
||
| # Expansion mapper | ||
| expansion = { | ||
| "user": UserLiteSerializer, | ||
| "workspace": WorkspaceLiteSerializer, | ||
| "project": ProjectLiteSerializer, | ||
| "default_assignee": UserLiteSerializer, | ||
| "project_lead": UserLiteSerializer, | ||
| "state": StateLiteSerializer, | ||
| "created_by": UserLiteSerializer, | ||
| "issue": IssueSerializer, | ||
| "actor": UserLiteSerializer, | ||
| "owned_by": UserLiteSerializer, | ||
| "members": UserLiteSerializer, | ||
| "assignees": UserLiteSerializer, | ||
| "labels": LabelSerializer, | ||
| "issue_cycle": CycleIssueSerializer, | ||
| "parent": IssueLiteSerializer, | ||
| "issue_relation": IssueRelationSerializer, | ||
| "issue_related": RelatedIssueSerializer, | ||
| "issue_inbox": InboxIssueLiteSerializer, | ||
| "issue_reactions": IssueReactionLiteSerializer, | ||
| "issue_link": IssueLinkLiteSerializer, | ||
| "sub_issues": IssueLiteSerializer, | ||
| } | ||
|
Comment on lines
+69
to
+92
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 🛠️ Refactor suggestion Remove duplication of expansion mapper dictionary. The expansion mapper dictionary is duplicated in both Consider making it a class attribute or using the previously suggested class DynamicBaseSerializer(BaseSerializer):
EXPANSION_MAPPER = {
"user": "UserLiteSerializer",
"workspace": "WorkspaceLiteSerializer",
# ... other mappings
}
def _get_expansion_serializer(self, key):
serializer_map = get_serializer_map()
return serializer_map.get(key) |
||
|
|
||
| if field not in self.fields and field in expansion: | ||
| self.fields[field] = expansion[field]( | ||
|
|
@@ -104,6 +107,7 @@ def _filter_fields(self, fields): | |
| "issue_attachment", | ||
| "issue_link", | ||
| "sub_issues", | ||
| "issue_related", | ||
| ] | ||
| else False | ||
| ) | ||
|
|
@@ -133,6 +137,7 @@ def to_representation(self, instance): | |
| IssueReactionLiteSerializer, | ||
| IssueAttachmentLiteSerializer, | ||
| IssueLinkLiteSerializer, | ||
| RelatedIssueSerializer, | ||
| ) | ||
|
|
||
| # Expansion mapper | ||
|
|
@@ -153,6 +158,7 @@ def to_representation(self, instance): | |
| "issue_cycle": CycleIssueSerializer, | ||
| "parent": IssueLiteSerializer, | ||
| "issue_relation": IssueRelationSerializer, | ||
| "issue_related": RelatedIssueSerializer, | ||
| "issue_inbox": InboxIssueLiteSerializer, | ||
| "issue_reactions": IssueReactionLiteSerializer, | ||
| "issue_attachment": IssueAttachmentLiteSerializer, | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -976,3 +976,194 @@ def list(self, request, slug, project_id): | |
| ) | ||
|
|
||
| return Response(paginated_data, status=status.HTTP_200_OK) | ||
|
|
||
|
|
||
| class IssueDetailEndpoint(BaseAPIView): | ||
| @allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST]) | ||
| def get(self, request, slug, project_id): | ||
| filters = issue_filters(request.query_params, "GET") | ||
| issue = ( | ||
| Issue.issue_objects.filter( | ||
| workspace__slug=slug, project_id=project_id | ||
| ) | ||
| .select_related("workspace", "project", "state", "parent") | ||
| .prefetch_related("assignees", "labels", "issue_module__module") | ||
| .annotate( | ||
| cycle_id=Subquery( | ||
| CycleIssue.objects.filter( | ||
| issue=OuterRef("id"), deleted_at__isnull=True | ||
| ).values("cycle_id")[:1] | ||
| ) | ||
| ) | ||
| .annotate( | ||
| label_ids=Coalesce( | ||
| ArrayAgg( | ||
| "labels__id", | ||
| distinct=True, | ||
| filter=Q( | ||
| ~Q(labels__id__isnull=True) | ||
| & Q(label_issue__deleted_at__isnull=True), | ||
| ), | ||
| ), | ||
| Value([], output_field=ArrayField(UUIDField())), | ||
| ), | ||
| assignee_ids=Coalesce( | ||
| ArrayAgg( | ||
| "assignees__id", | ||
| distinct=True, | ||
| filter=Q( | ||
| ~Q(assignees__id__isnull=True) | ||
| & Q(assignees__member_project__is_active=True) | ||
| & Q(issue_assignee__deleted_at__isnull=True) | ||
| ), | ||
| ), | ||
| Value([], output_field=ArrayField(UUIDField())), | ||
| ), | ||
| module_ids=Coalesce( | ||
| ArrayAgg( | ||
| "issue_module__module_id", | ||
| distinct=True, | ||
| filter=Q( | ||
| ~Q(issue_module__module_id__isnull=True) | ||
| & Q(issue_module__module__archived_at__isnull=True) | ||
| & Q(issue_module__deleted_at__isnull=True) | ||
| ), | ||
| ), | ||
| Value([], output_field=ArrayField(UUIDField())), | ||
| ), | ||
| ) | ||
| .annotate( | ||
| link_count=IssueLink.objects.filter(issue=OuterRef("id")) | ||
| .order_by() | ||
| .annotate(count=Func(F("id"), function="Count")) | ||
| .values("count") | ||
| ) | ||
| .annotate( | ||
| attachment_count=FileAsset.objects.filter( | ||
| issue_id=OuterRef("id"), | ||
| entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT, | ||
| ) | ||
| .order_by() | ||
| .annotate(count=Func(F("id"), function="Count")) | ||
| .values("count") | ||
| ) | ||
| .annotate( | ||
| sub_issues_count=Issue.issue_objects.filter( | ||
| parent=OuterRef("id") | ||
| ) | ||
| .order_by() | ||
| .annotate(count=Func(F("id"), function="Count")) | ||
| .values("count") | ||
| ) | ||
| ) | ||
| issue = issue.filter(**filters) | ||
| order_by_param = request.GET.get("order_by", "-created_at") | ||
| # Issue queryset | ||
| issue, order_by_param = order_issue_queryset( | ||
| issue_queryset=issue, | ||
| order_by_param=order_by_param, | ||
| ) | ||
| return self.paginate( | ||
| request=request, | ||
| order_by=order_by_param, | ||
| queryset=(issue), | ||
| on_results=lambda issue: IssueSerializer( | ||
| issue, | ||
| many=True, | ||
| fields=self.fields, | ||
| expand=self.expand, | ||
| ).data, | ||
| ) | ||
|
|
||
|
|
||
| class IssueBulkUpdateDateEndpoint(BaseAPIView): | ||
|
|
||
| def validate_dates( | ||
| self, current_start, current_target, new_start, new_target | ||
| ): | ||
| """ | ||
| Validate that start date is before target date. | ||
| """ | ||
| start = new_start or current_start | ||
| target = new_target or current_target | ||
|
|
||
| if start and target and start > target: | ||
| return False | ||
| return True | ||
|
|
||
| @allow_permission([ROLE.ADMIN, ROLE.MEMBER]) | ||
| def post(self, request, slug, project_id): | ||
|
|
||
| updates = request.data.get("updates", []) | ||
|
|
||
| issue_ids = [update["id"] for update in updates] | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Add validation for the presence of the The code assumes that every update in Apply this diff to fix the issue: - issue_ids = [update["id"] for update in updates]
+ issue_ids = [update["id"] for update in updates if "id" in update]
...
- issue_id = update["id"]
+ issue_id = update.get("id")
+ if not issue_id:
+ continue # Skip or handle the error appropriatelyAlso applies to: 1108-1108 |
||
| epoch = int(timezone.now().timestamp()) | ||
|
|
||
| # Fetch all relevant issues in a single query | ||
| issues = list(Issue.objects.filter(id__in=issue_ids)) | ||
| issues_dict = {str(issue.id): issue for issue in issues} | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Ensure consistent types for issue IDs when accessing Currently, Apply this diff to fix the issue: - issues_dict = {str(issue.id): issue for issue in issues}
+ issues_dict = {issue.id: issue for issue in issues}Also applies to: 1110-1110 |
||
| issues_to_update = [] | ||
|
|
||
| for update in updates: | ||
| issue_id = update["id"] | ||
| issue = issues_dict.get(issue_id) | ||
|
|
||
| if not issue: | ||
| continue | ||
|
|
||
| start_date = update.get("start_date") | ||
| target_date = update.get("target_date") | ||
| validate_dates = self.validate_dates( | ||
| issue.start_date, issue.target_date, start_date, target_date | ||
| ) | ||
| if not validate_dates: | ||
| return Response( | ||
| { | ||
| "message": "Start date cannot exceed target date", | ||
| }, | ||
| status=status.HTTP_400_BAD_REQUEST, | ||
| ) | ||
|
|
||
| if start_date: | ||
| issue_activity.delay( | ||
| type="issue.activity.updated", | ||
| requested_data=json.dumps( | ||
| {"start_date": update.get("start_date")} | ||
| ), | ||
| current_instance=json.dumps( | ||
| {"start_date": str(issue.start_date)} | ||
| ), | ||
| issue_id=str(issue_id), | ||
| actor_id=str(request.user.id), | ||
| project_id=str(project_id), | ||
| epoch=epoch, | ||
| ) | ||
| issue.start_date = start_date | ||
| issues_to_update.append(issue) | ||
|
|
||
| if target_date: | ||
| issue_activity.delay( | ||
| type="issue.activity.updated", | ||
| requested_data=json.dumps( | ||
| {"target_date": update.get("target_date")} | ||
| ), | ||
| current_instance=json.dumps( | ||
| {"target_date": str(issue.target_date)} | ||
| ), | ||
| issue_id=str(issue_id), | ||
| actor_id=str(request.user.id), | ||
| project_id=str(project_id), | ||
| epoch=epoch, | ||
| ) | ||
| issue.target_date = target_date | ||
| issues_to_update.append(issue) | ||
|
|
||
| # Bulk update issues | ||
| Issue.objects.bulk_update( | ||
| issues_to_update, ["start_date", "target_date"] | ||
| ) | ||
|
|
||
| return Response( | ||
| {"message": "Issues updated successfully"}, | ||
| status=status.HTTP_200_OK, | ||
| ) | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
🛠️ Refactor suggestion
Consider refactoring imports for better maintainability.
The current implementation uses lazy loading to prevent circular dependencies, which is good. However, consider extracting these serializer imports into a separate module (e.g.,
serializer_registry.py) to improve maintainability and reduce duplication, as these imports are repeated in theto_representationmethod.Example refactor:
Then in this file: