Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/develop'
Browse files Browse the repository at this point in the history
Signed-off-by: Denis Bykhov <bykhov.denis@gmail.com>
  • Loading branch information
BykhovDenis committed Oct 23, 2024
2 parents 32d148f + 5eeb828 commit f3f6ce6
Show file tree
Hide file tree
Showing 175 changed files with 1,497 additions and 1,831 deletions.
41 changes: 21 additions & 20 deletions common/config/rush/pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

12 changes: 3 additions & 9 deletions dev/doc-import-tool/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -54,17 +54,11 @@ export function docImportTool (): void {

const uploadUrl = process.env.UPLOAD_URL ?? '/files'

const mongodbUri = process.env.MONGO_URL
if (mongodbUri === undefined) {
console.log('Please provide mongodb url')
process.exit(1)
}

setMetadata(serverClientPlugin.metadata.Endpoint, accountUrl)
setMetadata(serverToken.metadata.Secret, serverSecret)

async function withStorage (mongodbUri: string, f: (storageAdapter: StorageAdapter) => Promise<any>): Promise<void> {
const adapter = buildStorageFromConfig(storageConfigFromEnv(), mongodbUri)
async function withStorage (f: (storageAdapter: StorageAdapter) => Promise<any>): Promise<void> {
const adapter = buildStorageFromConfig(storageConfigFromEnv())
try {
await f(adapter)
} catch (err: any) {
Expand Down Expand Up @@ -94,7 +88,7 @@ export function docImportTool (): void {
}, space: ${cmd.space}, backend: ${cmd.backend}`
)

await withStorage(mongodbUri, async (storageAdapter) => {
await withStorage(async (storageAdapter) => {
const workspaceId = getWorkspaceId(workspace)

const config: Config = {
Expand Down
11 changes: 0 additions & 11 deletions dev/docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,6 @@ services:
environment:
# - WS_OPERATION=create
- SERVER_SECRET=secret
- MONGO_URL=${MONGO_URL}
- DB_URL=${MONGO_URL}
# - DB_URL=postgresql://postgres:example@postgres:5432
- SES_URL=
Expand All @@ -132,7 +131,6 @@ services:
environment:
# - WS_OPERATION=create
- SERVER_SECRET=secret
- MONGO_URL=${MONGO_URL}
- DB_URL=postgresql://postgres:example@postgres:5432
- SES_URL=
- REGION=pg
Expand Down Expand Up @@ -160,8 +158,6 @@ services:
- COLLABORATOR_PORT=3078
- SECRET=secret
- ACCOUNTS_URL=http://host.docker.internal:3000
- MONGO_URL=${MONGO_URL}
- 'MONGO_OPTIONS={"appName":"collaborator","maxPoolSize":2}'
- STORAGE_CONFIG=${STORAGE_CONFIG}
restart: unless-stopped
front:
Expand All @@ -178,11 +174,8 @@ services:
- 8087:8080
- 8088:8080
environment:
- UV_THREADPOOL_SIZE=10
- SERVER_PORT=8080
- SERVER_SECRET=secret
- MONGO_URL=${MONGO_URL}
- 'MONGO_OPTIONS={"appName":"front","maxPoolSize":1}'
- ACCOUNTS_URL=http://host.docker.internal:3000
- UPLOAD_URL=/files
- ELASTIC_URL=http://host.docker.internal:9200
Expand Down Expand Up @@ -297,8 +290,6 @@ services:
- 4005:4005
environment:
- SECRET=secret
- MONGO_URL=${MONGO_URL}
- 'MONGO_OPTIONS={"appName":"print","maxPoolSize":1}'
- STORAGE_CONFIG=${STORAGE_CONFIG}
deploy:
resources:
Expand All @@ -316,8 +307,6 @@ services:
- ../services/sign/pod-sign/debug/branding.json:/var/cfg/branding.json
environment:
- SECRET=secret
- MONGO_URL=${MONGO_URL}
- 'MONGO_OPTIONS={"appName":"sign","maxPoolSize":1}'
- MINIO_ENDPOINT=minio
- MINIO_ACCESS_KEY=minioadmin
- ACCOUNTS_URL=http://host.docker.internal:3000
Expand Down
7 changes: 6 additions & 1 deletion dev/prod/src/analytics/posthog.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,12 @@ import posthog from 'posthog-js'
export class PosthogAnalyticProvider implements AnalyticProvider {
init(config: Record<string, any>): boolean {
if (config.POSTHOG_API_KEY !== undefined && config.POSTHOG_API_KEY !== '' && config.POSTHOG_HOST !== null) {
posthog.init(config.POSTHOG_API_KEY, { api_host: config.POSTHOG_HOST })
posthog.init(config.POSTHOG_API_KEY, {
api_host: config.POSTHOG_HOST,
autocapture: false,
capture_pageview: false,
capture_pageleave: false
})
return true
}
return false
Expand Down
10 changes: 9 additions & 1 deletion dev/tool/src/__start.ts
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,6 @@ addLocation(serverDriveId, () => import('@hcengineering/server-drive-resources')
addLocation(serverAiBotId, () => import('@hcengineering/server-ai-bot-resources'))

function prepareTools (): {
mongodbUri: string | undefined
dbUrl: string
txes: Tx[]
version: Data<Version>
Expand All @@ -84,4 +83,13 @@ function prepareTools (): {
return { ...prepareToolsRaw(builder(enabled, disabled).getTxes()), version: getModelVersion(), migrateOperations }
}

export function getMongoDBUrl (): string {
const url = process.env.MONGO_URL
if (url === undefined) {
console.error('please provide mongo DB URL')
process.exit(1)
}
return url
}

devTool(prepareTools)
4 changes: 2 additions & 2 deletions dev/tool/src/clean.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1270,7 +1270,7 @@ async function updateYDoc (
doc: RelatedDocument
): Promise<void> {
try {
const ydoc = await loadCollaborativeDoc(storage, workspaceId, _id, ctx)
const ydoc = await loadCollaborativeDoc(ctx, storage, workspaceId, _id)
if (ydoc === undefined) {
ctx.error('document content not found', { document: contentDoc._id })
return
Expand All @@ -1284,7 +1284,7 @@ async function updateYDoc (
})

if (updatedYDoc !== undefined) {
await saveCollaborativeDoc(storage, workspaceId, _id, updatedYDoc, ctx)
await saveCollaborativeDoc(ctx, storage, workspaceId, _id, updatedYDoc)
}
} catch {
// do nothing, the collaborative doc does not sem to exist yet
Expand Down
49 changes: 35 additions & 14 deletions dev/tool/src/db.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,14 @@ import {
type MeasureMetricsContext
} from '@hcengineering/core'
import { getMongoClient, getWorkspaceMongoDB } from '@hcengineering/mongo'
import { convertDoc, createTable, getDBClient, retryTxn, translateDomain } from '@hcengineering/postgres'
import {
convertDoc,
createTable,
getDBClient,
getDocFieldsByDomains,
retryTxn,
translateDomain
} from '@hcengineering/postgres'
import { getTransactorEndpoint } from '@hcengineering/server-client'
import { generateToken } from '@hcengineering/server-token'
import { connect } from '@hcengineering/server-tool'
Expand Down Expand Up @@ -54,10 +61,6 @@ export async function moveFromMongoToPG (
client.close()
}

function escapeBackticks (str: string): string {
return str.replaceAll("'", "''")
}

async function moveWorkspace (
accountDb: AccountDB,
mongo: MongoClient,
Expand Down Expand Up @@ -85,6 +88,13 @@ async function moveWorkspace (
const currentIds = new Set(current.rows.map((r) => r._id))
console.log('move domain', domain)
const docs: Doc[] = []
const fields = getDocFieldsByDomains(domain)
const filedsWithData = [...fields, 'data']
const insertFields: string[] = []
for (const field of filedsWithData) {
insertFields.push(`"${field}"`)
}
const insertStr = insertFields.join(', ')
while (true) {
while (docs.length < 50000) {
const doc = (await cursor.next()) as Doc | null
Expand All @@ -95,18 +105,29 @@ async function moveWorkspace (
if (docs.length === 0) break
while (docs.length > 0) {
const part = docs.splice(0, 500)
const vals = part
.map((doc) => {
const d = convertDoc(doc, ws.workspace)
return `('${d._id}', '${d.workspaceId}', '${d._class}', '${d.createdBy ?? d.modifiedBy}', '${d.modifiedBy}', ${d.modifiedOn}, ${d.createdOn ?? d.modifiedOn}, '${d.space}', ${
d.attachedTo != null ? `'${d.attachedTo}'` : 'NULL'
}, '${escapeBackticks(JSON.stringify(d.data))}')`
})
.join(', ')
const values: any[] = []
const vars: string[] = []
let index = 1
for (let i = 0; i < part.length; i++) {
const doc = part[i]
const variables: string[] = []
const d = convertDoc(domain, doc, ws.workspace)
values.push(d.workspaceId)
variables.push(`$${index++}`)
for (const field of fields) {
values.push(d[field])
variables.push(`$${index++}`)
}
values.push(d.data)
variables.push(`$${index++}`)
vars.push(`(${variables.join(', ')})`)
}
const vals = vars.join(',')
try {
await retryTxn(pgClient, async (client) => {
await client.query(
`INSERT INTO ${translateDomain(domain)} (_id, "workspaceId", _class, "createdBy", "modifiedBy", "modifiedOn", "createdOn", space, "attachedTo", data) VALUES ${vals}`
`INSERT INTO ${translateDomain(domain)} ("workspaceId", ${insertStr}) VALUES ${vals}`,
values
)
})
} catch (err) {
Expand Down
Loading

0 comments on commit f3f6ce6

Please sign in to comment.