Skip to content

Commit 0e93c69

Browse files
committed
Update logs for csv features upload
1 parent 118fd9d commit 0e93c69

File tree

1 file changed

+34
-20
lines changed

1 file changed

+34
-20
lines changed

api/apps/api/src/modules/geo-features/import/features-amounts-upload.service.ts

Lines changed: 34 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,9 @@ export class FeatureAmountUploadService {
5151

5252
let newFeaturesFromCsvUpload;
5353
try {
54-
this.logger.log(`Parsing csv file and saving data to temporary storage`);
54+
this.logger.log(
55+
`Starting process of parsing csv file and saving amount data to temporary storage`,
56+
);
5557
await this.events.createEvent(data);
5658
// saving feature data to temporary table
5759
const featuresRegistry = await this.saveCsvToRegistry(
@@ -64,17 +66,16 @@ export class FeatureAmountUploadService {
6466
}
6567
// Saving new features to apiDB 'features' table
6668

67-
this.logger.log(`Parsing csv file`);
68-
69+
this.logger.log(`Saving new features to (apiBD).features table...`);
6970
newFeaturesFromCsvUpload = await this.saveNewFeaturesFromCsvUpload(
7071
apiQueryRunner,
7172
featuresRegistry.right.id,
7273
data.projectId,
7374
);
74-
75+
this.logger.log(`New features saved in (apiBD).features table`);
7576
// Saving new features amounts and geoms to geoDB 'features_amounts' table
7677
this.logger.log(
77-
`Saving uploaded features data from temporary table to permanent tables`,
78+
`Starting the process of saving new features amounts and geoms to (geoDB).features_amounts table`,
7879
);
7980
await this.saveNewFeaturesAmountsFromCsvUpload(
8081
newFeaturesFromCsvUpload,
@@ -86,20 +87,24 @@ export class FeatureAmountUploadService {
8687

8788
// Removing temporary data from apiDB uploads tables
8889
this.logger.log(
89-
`Removing data from temporary tables after successful upload`,
90+
`Removing data from temporary tables after successful upload...`,
9091
);
9192
await apiQueryRunner.manager.delete(FeatureAmountUploadRegistry, {
9293
id: featuresRegistry.right.id,
9394
});
9495

96+
this.logger.log(
97+
`Upload temporary data removed from apiDB uploads tables`,
98+
);
9599
// Setting project source to legacy-import to create puvspr.dat files from pre-calculated amounts, to allow to use new features after upload
96-
this.logger.log(`Updating project sources value`);
100+
this.logger.log(`Updating project sources value to legacy-import...`);
97101
await this.updateProjectSources(
98102
data.projectId,
99103
ProjectSourcesEnum.legacyImport,
100104
apiQueryRunner.manager,
101105
);
102106

107+
this.logger.log(`Csv file upload process finished successfully`);
103108
// Committing transaction
104109

105110
await apiQueryRunner.commitTransaction();
@@ -131,13 +136,13 @@ export class FeatureAmountUploadService {
131136
queryRunner: QueryRunner,
132137
): Promise<any> {
133138
try {
134-
this.logger.log(`Parsing csv file`);
139+
this.logger.log(`Parsing csv file...`);
135140

136141
const parsedFile = await featureAmountCsvParser(data.fileBuffer);
137142

138143
const { featureNames, puids } = this.getFeatureNamesAndPuids(parsedFile);
139144

140-
this.logger.log(`Validating parsed csv file`);
145+
this.logger.log(`Validating parsed csv file...`);
141146

142147
if (
143148
await this.areFeatureNamesNotAlreadyUsedInProject(
@@ -152,7 +157,7 @@ export class FeatureAmountUploadService {
152157
return left(unknownPuidsInFeatureAmountCsvUpload);
153158
}
154159

155-
this.logger.log(`Saving parsed data to temporary storage`);
160+
this.logger.log(`Saving parsed data to temporary storage...`);
156161
const importedRegistry = await this.saveFeaturesToRegistry(
157162
parsedFile,
158163
data.projectId,
@@ -179,23 +184,23 @@ export class FeatureAmountUploadService {
179184
CHUNK_SIZE_FOR_BATCH_APIDB_OPERATIONS,
180185
);
181186

182-
this.logger.log(`Saving a new upload data to temporary table`);
187+
this.logger.log(`Saving a new upload data to temporary table...`);
183188
const newUpload = await entityManager
184189
.getRepository(FeatureAmountUploadRegistry)
185190
.save({
186191
projectId,
187192
userId,
188193
});
189194
for (const [index, chunk] of featuresChunks.entries()) {
190-
this.logger.log(`Inserting chunk ${index} to temporary table`);
195+
this.logger.log(`Inserting chunk ${index} to temporary table...`);
191196
await entityManager
192197
.createQueryBuilder()
193198
.insert()
194199
.into(UploadedFeatureAmount)
195200
.values(chunk.map((feature) => ({ ...feature, upload: newUpload })))
196201
.execute();
197202
}
198-
this.logger.log(`New upload data saved to temporary table`);
203+
this.logger.log(`New csv upload data from saved to temporary tables`);
199204
return newUpload;
200205
}
201206

@@ -239,7 +244,11 @@ export class FeatureAmountUploadService {
239244
projectId: string,
240245
) {
241246
for (const [index, newFeature] of newFeaturesFromCsvUpload.entries()) {
242-
this.logger.log(`Getting feature amounts for feature ${index}`);
247+
this.logger.log(
248+
`Getting feature amounts for feature number ${index + 1}: ${
249+
newFeature.feature_class_name
250+
}`,
251+
);
243252
const featureAmounts = await apiQueryRunner.manager
244253
.createQueryBuilder()
245254
.select(['fa.puid', 'fa.amount'])
@@ -255,14 +264,17 @@ export class FeatureAmountUploadService {
255264
CHUNK_SIZE_FOR_BATCH_APIDB_OPERATIONS,
256265
);
257266

267+
this.logger.log(
268+
`Feature data divided into ${featuresChunks.length} chunks`,
269+
);
258270
for (const [amountIndex, featureChunk] of featuresChunks.entries()) {
259271
this.logger.log(
260-
`Saving chunk ${amountIndex} of amounts of feature ${index}`,
272+
`Starting the process of saving chunk with index ${amountIndex} of amounts of feature ${newFeature.feature_class_name}...`,
261273
);
262274
const firstParameterNumber = 2;
263275
const parameters: any[] = [projectId];
264276
this.logger.log(
265-
`Generating values to insert for chunk ${amountIndex} of amounts of feature ${index}`,
277+
`Generating values to insert for chunk with index ${amountIndex}...`,
266278
);
267279
const valuesToInsert = featureChunk.map((featureAmount, index) => {
268280
parameters.push(
@@ -288,7 +300,7 @@ export class FeatureAmountUploadService {
288300
});
289301

290302
this.logger.log(
291-
`Inserting amount values of chunk ${amountIndex} of amounts of feature ${index} into geoDB`,
303+
`Inserting amount values of chunk with index ${amountIndex} into (geoDB).features_data table...`,
292304
);
293305
await geoQueryRunner.manager.query(
294306
`
@@ -303,12 +315,14 @@ export class FeatureAmountUploadService {
303315
parameters,
304316
);
305317
this.logger.log(
306-
`Chunk ${amountIndex} of amounts of feature ${index} saved to geoDB features_data`,
318+
`Chunk with index ${amountIndex} saved to (geoDB).features_data`,
307319
);
308320
}
309-
this.logger.log(`All chunks of feature ${index} saved`);
321+
this.logger.log(
322+
`All chunks of feature ${newFeature.feature_class_name} saved`,
323+
);
310324
}
311-
this.logger.log(`All features data saved`);
325+
this.logger.log(`All new features data saved to (geoDB).features_data`);
312326
}
313327

314328
private async areFeatureNamesNotAlreadyUsedInProject(

0 commit comments

Comments
 (0)