Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
52 commits
Select commit Hold shift + click to select a range
bbdcf01
hotfix: increase max duration to 300s for validate count
SandipBajracharya Jan 27, 2026
ffa473d
feat(OUT-2914): store to attachment table when attachment is uploaded.
arpandhakal Jan 15, 2026
69387a9
feat(OUT-2914): attachments table create and delete entries progress
arpandhakal Jan 16, 2026
d848ec4
fix(OUT-2914): refactoring + fixed replied/comments attachment creati…
arpandhakal Jan 19, 2026
68a7826
fix(OUT-2914): refactoring + fixed replied/comments attachment creati…
arpandhakal Jan 19, 2026
81b0b88
fix(OUT-2914): applied requested changes, heavy refactoring
arpandhakal Jan 20, 2026
1c0d11c
fix(OUT-2914): added a check for workspace id before uploading an att…
arpandhakal Jan 20, 2026
c3752d6
feat(OUT-2917): public api to list comments of a task
SandipBajracharya Jan 14, 2026
9a6c08b
refactor(OUT-2917): expose comments list route as sub-resource on tasks
SandipBajracharya Jan 14, 2026
820cc3b
fix(OUT-2917): await path params
SandipBajracharya Jan 15, 2026
96b8be2
refactor(OUT-2917): implemented proper typing, validation
SandipBajracharya Jan 15, 2026
1626f0e
perf(OUT-2917): index comment table and get multiple signed urls from…
SandipBajracharya Jan 15, 2026
4927e5c
fix(OUT-2917): sequentially map the attachments
SandipBajracharya Jan 15, 2026
1e0923a
feat(OUT-2919): create public api to read single comment of a task
SandipBajracharya Jan 15, 2026
abd29a9
fix(OUT-2919): await path params
SandipBajracharya Jan 15, 2026
94f9963
refactor(OUT-2919): use object parameter in function
SandipBajracharya Jan 15, 2026
4d43464
feat(OUT-2917): public api to list comments of a task
SandipBajracharya Jan 14, 2026
3dba06c
feat(OUT-2920): create public API to delete a comment
SandipBajracharya Jan 15, 2026
fa35fc3
fix(OUT-2920): remove double file import
SandipBajracharya Jan 21, 2026
5c466c4
fix(OUT-2920): file import error
SandipBajracharya Jan 21, 2026
520515c
feat(OUT-2938): secure public comments api
SandipBajracharya Jan 21, 2026
94809aa
feat(OUT-2920): create public API to delete a comment
SandipBajracharya Jan 15, 2026
6a82963
feat(OUT-2940): delete attachments from bucket when a comment is deleted
SandipBajracharya Jan 16, 2026
c0bf72d
fix(OUT-2940): remove double file import
SandipBajracharya Jan 21, 2026
cafba8b
fix(OUT-2940): not create sign url when attachment is deleted
SandipBajracharya Jan 22, 2026
50ed7d5
feat(OUT-2921): dispatch webhook event when comment added on task
SandipBajracharya Jan 19, 2026
528d70d
refactor(OUT-2921): include attachments in create comment response
SandipBajracharya Jan 19, 2026
e93f7d0
fix(OUT-2921): remove double file import
SandipBajracharya Jan 26, 2026
1162621
feat(OUT-2923): include attachments attribute in public tasks api
SandipBajracharya Jan 19, 2026
84ff623
chore(OUT-2923): change download url to have null value if the attach…
SandipBajracharya Jan 19, 2026
7b1650e
chore(OUT-2923): code cleanup
SandipBajracharya Jan 19, 2026
210d8e7
feat(OUT-2923): delete attachments of task when task is deleted
SandipBajracharya Jan 22, 2026
1bb20ad
fix(OUT-2923): return download url null for deleted attachments
SandipBajracharya Jan 22, 2026
80c22ba
feat(OUT-2923): filter out attachments that are not available in the …
SandipBajracharya Jan 22, 2026
f98308e
feat(OUT-2923): remove commentId null condition
SandipBajracharya Jan 26, 2026
76e12df
refactor(OUT-2923): rename classes, remove functions with same functi…
SandipBajracharya Jan 26, 2026
c2d8e08
feat(OUT-2923): remove attachments from the bucket when a task is del…
SandipBajracharya Jan 26, 2026
1641114
chore(OUT-2923): remove deletedDate attribute from the attachment res…
SandipBajracharya Jan 26, 2026
364b0f5
fix(OUT-2961): include CU to create attachments
SandipBajracharya Jan 26, 2026
f47d980
fix(OUT-2961): dispatch comment.created webhook with signed attachments
SandipBajracharya Jan 26, 2026
68d5adb
feat(OUT-3009): removed the requirement of taskId in comment endpoints
arpandhakal Jan 27, 2026
c861459
fix(OUT-3009): applied requested changes, changed api route from api/…
arpandhakal Jan 27, 2026
3b9a75d
fix(OUT-3009): applied requested changes, changed api route from api/…
arpandhakal Jan 27, 2026
4eb3808
fix(OUT-3009): some cleaning jobs
arpandhakal Jan 28, 2026
81fb0f4
fix(OUT-3002): sanitized the contents and body of tasks and comments …
arpandhakal Jan 27, 2026
291dcdc
fix(OUT-3002): added jsdoc to sanitizeContent util
arpandhakal Jan 27, 2026
5e53c85
fix(OUT-3004): Comment attachment fileName should be clean filename.
arpandhakal Jan 28, 2026
488c819
fix(OUT-3004): applied requested changes
arpandhakal Jan 28, 2026
7a5cba4
fix(OUT-3000): added a backfill script to populate initiatorIds for o…
arpandhakal Jan 28, 2026
cd1b09e
fix(OUT-3000): used object map for quick lookup instead of storing iu…
arpandhakal Jan 28, 2026
db2e39d
fix(OUT-3033): if comment not found, threw a 404 error with proper er…
arpandhakal Jan 30, 2026
a7423e1
chore(out-2917): switch to avatar component from design system
priosshrsth Jan 16, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .nvmrc
Original file line number Diff line number Diff line change
@@ -1 +1 @@
v20.19.1
v20
2 changes: 1 addition & 1 deletion mise.toml
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
[tools]
node = "20.19.4"
node = "20"
2 changes: 2 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,8 @@
"cmd:delete-duplicate-notifications": "tsx ./src/cmd/delete-duplicate-notifications",
"cmd:normalize-filterOptions-assignee": "tsx ./src/cmd/normalize-filterOptions-assignee",
"cmd:post-deploy-m15": "tsx ./src/cmd/post-deploy-m15",
"cmd:backfill-attachments": "tsx ./src/cmd/backfill-attachments",
"cmd:backfill-initiatorType-in-comments": "tsx ./src/cmd/backfill-initiatorType-in-comments",
"db:grant-supabase-privileges": "node src/lib/supabase-privilege",
"deploy": "npx trigger.dev@latest deploy",
"dev": "next dev",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
-- CreateIndex
CREATE INDEX "IX_Comments_taskId_workspaceId_createdAt" ON "Comments"("taskId", "workspaceId", "createdAt" DESC);
1 change: 1 addition & 0 deletions prisma/schema/comment.prisma
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,5 @@ model Comment {
deletedAt DateTime? @db.Timestamptz()

@@map("Comments")
@@index([taskId, workspaceId, createdAt(sort: Desc)], name: "IX_Comments_taskId_workspaceId_createdAt")
}
96 changes: 48 additions & 48 deletions sentry.client.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,55 +2,55 @@
// The config you add here will be used whenever a users loads a page in their browser.
// https://docs.sentry.io/platforms/javascript/guides/nextjs/

import * as Sentry from "@sentry/nextjs";
import * as Sentry from '@sentry/nextjs'

const dsn = process.env.NEXT_PUBLIC_SENTRY_DSN || process.env.SENTRY_DSN;
const vercelEnv = process.env.NEXT_PUBLIC_VERCEL_ENV;
const isProd = process.env.NEXT_PUBLIC_VERCEL_ENV === "production";
const dsn = process.env.NEXT_PUBLIC_SENTRY_DSN || process.env.SENTRY_DSN
const vercelEnv = process.env.NEXT_PUBLIC_VERCEL_ENV
const isProd = process.env.NEXT_PUBLIC_VERCEL_ENV === 'production'

if (dsn) {
Sentry.init({
dsn,

// Adjust this value in production, or use tracesSampler for greater control
tracesSampleRate: isProd ? 0.2 : 1,
profilesSampleRate: 0.1,
// NOTE: reducing sample only 10% of transactions in prod to get general trends instead of detailed and overfitted data

// Setting this option to true will print useful information to the console while you're setting up Sentry.
debug: false,

// You can remove this option if you're not planning to use the Sentry Session Replay feature:
// NOTE: Since session replay barely helps us anyways, getting rid of it to reduce some bundle size at least
// replaysOnErrorSampleRate: 1.0,
// replaysSessionSampleRate: 0,
integrations: [
Sentry.browserTracingIntegration({
beforeStartSpan: (e) => {
console.info("SentryBrowserTracingSpan", e.name);
return e;
},
}),
// Sentry.replayIntegration({
// Additional Replay configuration goes in here, for example:
// maskAllText: true,
// blockAllMedia: true,
// }),
],

// ignoreErrors: [/fetch failed/i],
ignoreErrors: [/fetch failed/i],

beforeSend(event) {
if (!isProd && event.type === undefined) {
return null;
}
event.tags = {
...event.tags,
// Adding additional app_env tag for cross-checking
app_env: isProd ? "production" : vercelEnv || "development",
};
return event;
},
});
Sentry.init({
dsn,

// Adjust this value in production, or use tracesSampler for greater control
tracesSampleRate: isProd ? 0.2 : 1,
profilesSampleRate: 0.1,
// NOTE: reducing sample only 10% of transactions in prod to get general trends instead of detailed and overfitted data

// Setting this option to true will print useful information to the console while you're setting up Sentry.
debug: false,

// You can remove this option if you're not planning to use the Sentry Session Replay feature:
// NOTE: Since session replay barely helps us anyways, getting rid of it to reduce some bundle size at least
// replaysOnErrorSampleRate: 1.0,
// replaysSessionSampleRate: 0,
integrations: [
Sentry.browserTracingIntegration({
beforeStartSpan: (e) => {
console.info('SentryBrowserTracingSpan', e.name)
return e
},
}),
// Sentry.replayIntegration({
// Additional Replay configuration goes in here, for example:
// maskAllText: true,
// blockAllMedia: true,
// }),
],

// ignoreErrors: [/fetch failed/i],
ignoreErrors: [/fetch failed/i],

beforeSend(event) {
if (!isProd && event.type === undefined) {
return null
}
event.tags = {
...event.tags,
// Adding additional app_env tag for cross-checking
app_env: isProd ? 'production' : vercelEnv || 'development',
}
return event
},
})
}
40 changes: 20 additions & 20 deletions sentry.server.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,31 +2,31 @@
// The config you add here will be used whenever the server handles a request.
// https://docs.sentry.io/platforms/javascript/guides/nextjs/

import * as Sentry from "@sentry/nextjs";
import * as Sentry from '@sentry/nextjs'

const dsn = process.env.NEXT_PUBLIC_SENTRY_DSN || process.env.SENTRY_DSN;
const vercelEnv = process.env.NEXT_PUBLIC_VERCEL_ENV;
const isProd = process.env.NEXT_PUBLIC_VERCEL_ENV === "production";
const dsn = process.env.NEXT_PUBLIC_SENTRY_DSN || process.env.SENTRY_DSN
const vercelEnv = process.env.NEXT_PUBLIC_VERCEL_ENV
const isProd = process.env.NEXT_PUBLIC_VERCEL_ENV === 'production'

if (dsn) {
Sentry.init({
dsn,
Sentry.init({
dsn,

// Adjust this value in production, or use tracesSampler for greater control
tracesSampleRate: 1,
// Adjust this value in production, or use tracesSampler for greater control
tracesSampleRate: 1,

// Setting this option to true will print useful information to the console while you're setting up Sentry.
debug: false,
// Setting this option to true will print useful information to the console while you're setting up Sentry.
debug: false,

// Uncomment the line below to enable Spotlight (https://spotlightjs.com)
// spotlight: process.env.NODE_ENV === 'development',
ignoreErrors: [/fetch failed/i],
// Uncomment the line below to enable Spotlight (https://spotlightjs.com)
// spotlight: process.env.NODE_ENV === 'development',
ignoreErrors: [/fetch failed/i],

beforeSend(event) {
if (!isProd && event.type === undefined) {
return null;
}
return event;
},
});
beforeSend(event) {
if (!isProd && event.type === undefined) {
return null
}
return event
},
})
}
2 changes: 1 addition & 1 deletion src/app/api/activity-logs/services/activity-log.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import {
SchemaByActivityType,
} from '@api/activity-logs/const'
import { LogResponse, LogResponseSchema } from '@api/activity-logs/schemas/LogResponseSchema'
import { CommentService } from '@api/comment/comment.service'
import { CommentService } from '@/app/api/comments/comment.service'
import APIError from '@api/core/exceptions/api'
import User from '@api/core/models/User.model'
import { BaseService } from '@api/core/services/base.service'
Expand Down
62 changes: 59 additions & 3 deletions src/app/api/attachments/attachments.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import APIError from '@api/core/exceptions/api'
import httpStatus from 'http-status'
import { SupabaseService } from '@api/core/services/supabase.service'
import { signedUrlTtl } from '@/constants/attachments'
import { PrismaClient } from '@prisma/client'

export class AttachmentsService extends BaseService {
async getAttachments(taskId: string) {
Expand All @@ -30,7 +31,7 @@ export class AttachmentsService extends BaseService {
const newAttachment = await this.db.attachment.create({
data: {
...data,
createdById: z.string().parse(this.user.internalUserId),
createdById: z.string().parse(this.user.internalUserId || this.user.clientId), // CU are also allowed to create attachments
workspaceId: this.user.workspaceId,
},
})
Expand All @@ -40,15 +41,15 @@ export class AttachmentsService extends BaseService {
async createMultipleAttachments(data: CreateAttachmentRequest[]) {
const policyGate = new PoliciesService(this.user)
policyGate.authorize(UserAction.Create, Resource.Attachments)
const userId = z.string().parse(this.user.internalUserId)

// TODO: @arpandhakal - $transaction here could consume a lot of sequential db connections, better to use Promise.all
// and reuse active connections instead.
const newAttachments = await this.db.$transaction(async (prisma) => {
const createPromises = data.map((attachmentData) =>
prisma.attachment.create({
data: {
...attachmentData,
createdById: userId,
createdById: z.string().parse(this.user.internalUserId || this.user.clientId), // CU are also allowed to create attachments
workspaceId: this.user.workspaceId,
},
}),
Expand Down Expand Up @@ -86,4 +87,59 @@ export class AttachmentsService extends BaseService {
const { data } = await supabase.supabase.storage.from(supabaseBucket).createSignedUrl(filePath, signedUrlTtl)
return data?.signedUrl
}

async deleteAttachmentsOfComment(commentId: string) {
const policyGate = new PoliciesService(this.user)
policyGate.authorize(UserAction.Delete, Resource.Attachments)

const commentAttachment = await this.db.$transaction(async (tx) => {
const commentAttachment = await tx.attachment.findMany({
where: { commentId: commentId, workspaceId: this.user.workspaceId },
select: { filePath: true },
})

await tx.attachment.deleteMany({
where: { commentId: commentId, workspaceId: this.user.workspaceId },
})

return commentAttachment
})

// directly delete attachments from bucket when deleting comments.
// Postgres transaction is not valid for supabase object so placing it after record deletion from db
const filePathArray = commentAttachment.map((el) => el.filePath)
const supabase = new SupabaseService()
await supabase.removeAttachmentsFromBucket(filePathArray)
}

async deleteAttachmentsOfTask(taskIds: string[]) {
const taskAttachment = await this.db.$transaction(async (tx) => {
const taskAttachment = await tx.attachment.findMany({
where: {
taskId: {
in: taskIds,
},
workspaceId: this.user.workspaceId,
},
select: { filePath: true },
})

await tx.attachment.deleteMany({
where: {
taskId: {
in: taskIds,
},
workspaceId: this.user.workspaceId,
},
})

return taskAttachment
})

// directly delete attachments from bucket when deleting comments.
// Postgres transaction is not valid for supabase object so placing it after record deletion from db
const filePathArray = taskAttachment.map((el) => el.filePath)
const supabase = new SupabaseService()
await supabase.removeAttachmentsFromBucket(filePathArray)
}
}
16 changes: 16 additions & 0 deletions src/app/api/attachments/public/public.dto.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import { RFC3339DateSchema } from '@/types/common'
import { AssigneeType } from '@prisma/client'
import z from 'zod'

export const PublicAttachmentDtoSchema = z.object({
id: z.string().uuid(),
fileName: z.string(),
fileSize: z.number(),
mimeType: z.string(),
downloadUrl: z.string().url().nullable(),
uploadedBy: z.string().uuid(),
uploadedByUserType: z.nativeEnum(AssigneeType).nullable(),
uploadedDate: RFC3339DateSchema,
})

export type PublicAttachmentDto = z.infer<typeof PublicAttachmentDtoSchema>
65 changes: 65 additions & 0 deletions src/app/api/attachments/public/public.serializer.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
import { PublicAttachmentDto } from '@/app/api/attachments/public/public.dto'
import { RFC3339DateSchema } from '@/types/common'
import { toRFC3339 } from '@/utils/dateHelper'
import { sanitizeFileName } from '@/utils/sanitizeFileName'
import { createSignedUrls } from '@/utils/signUrl'
import { Attachment, CommentInitiator } from '@prisma/client'
import z from 'zod'

export class PublicAttachmentSerializer {
/**
*
* @param attachments array of Attachment
* @param uploadedBy id of the one who commented
* @param uploadedByUserType usertype of the one who commented
* @returns Array of PublicAttachmentDto
*/
static async serializeAttachments({
attachments,
uploadedByUserType,
content,
uploadedBy,
}: {
attachments: Attachment[]
uploadedByUserType: CommentInitiator | null
content: string | null
uploadedBy?: string
}): Promise<PublicAttachmentDto[]> {
// check if attachments are in the content. If yes
const attachmentPaths = attachments
.map((attachment) => {
return attachment.filePath
})
.filter((path) => content?.includes(path))

const signedUrls = await PublicAttachmentSerializer.getFormattedSignedUrls(attachmentPaths)

return attachments
.map((attachment) => {
const url = signedUrls.find((item) => item.path === attachment.filePath)?.url
if (!url) return null
return {
id: attachment.id,
fileName: sanitizeFileName(attachment.fileName),
fileSize: attachment.fileSize,
mimeType: attachment.fileType,
downloadUrl: attachment.deletedAt
? null
: z
.string()
.url({ message: `Invalid downloadUrl for attachment with id ${attachment.id}` })
.parse(url),
uploadedBy: uploadedBy || attachment.createdById,
uploadedByUserType: uploadedByUserType,
uploadedDate: RFC3339DateSchema.parse(toRFC3339(attachment.createdAt)),
}
})
.filter((attachment) => attachment !== null)
}

static async getFormattedSignedUrls(attachmentPaths: string[]) {
if (!attachmentPaths.length) return []
const signedUrls = await createSignedUrls(attachmentPaths)
return signedUrls.map((item) => ({ path: item.path, url: item.signedUrl }))
}
}
Loading
Loading