@@ -51,7 +51,9 @@ export class FeatureAmountUploadService {
51
51
52
52
let newFeaturesFromCsvUpload ;
53
53
try {
54
- this . logger . log ( `Parsing csv file and saving data to temporary storage` ) ;
54
+ this . logger . log (
55
+ `Starting process of parsing csv file and saving amount data to temporary storage` ,
56
+ ) ;
55
57
await this . events . createEvent ( data ) ;
56
58
// saving feature data to temporary table
57
59
const featuresRegistry = await this . saveCsvToRegistry (
@@ -64,17 +66,16 @@ export class FeatureAmountUploadService {
64
66
}
65
67
// Saving new features to apiDB 'features' table
66
68
67
- this . logger . log ( `Parsing csv file` ) ;
68
-
69
+ this . logger . log ( `Saving new features to (apiBD).features table...` ) ;
69
70
newFeaturesFromCsvUpload = await this . saveNewFeaturesFromCsvUpload (
70
71
apiQueryRunner ,
71
72
featuresRegistry . right . id ,
72
73
data . projectId ,
73
74
) ;
74
-
75
+ this . logger . log ( `New features saved in (apiBD).features table` ) ;
75
76
// Saving new features amounts and geoms to geoDB 'features_amounts' table
76
77
this . logger . log (
77
- `Saving uploaded features data from temporary table to permanent tables ` ,
78
+ `Starting the process of saving new features amounts and geoms to (geoDB).features_amounts table ` ,
78
79
) ;
79
80
await this . saveNewFeaturesAmountsFromCsvUpload (
80
81
newFeaturesFromCsvUpload ,
@@ -86,20 +87,24 @@ export class FeatureAmountUploadService {
86
87
87
88
// Removing temporary data from apiDB uploads tables
88
89
this . logger . log (
89
- `Removing data from temporary tables after successful upload` ,
90
+ `Removing data from temporary tables after successful upload... ` ,
90
91
) ;
91
92
await apiQueryRunner . manager . delete ( FeatureAmountUploadRegistry , {
92
93
id : featuresRegistry . right . id ,
93
94
} ) ;
94
95
96
+ this . logger . log (
97
+ `Upload temporary data removed from apiDB uploads tables` ,
98
+ ) ;
95
99
// Setting project source to legacy-import to create puvspr.dat files from pre-calculated amounts, to allow to use new features after upload
96
- this . logger . log ( `Updating project sources value` ) ;
100
+ this . logger . log ( `Updating project sources value to legacy-import... ` ) ;
97
101
await this . updateProjectSources (
98
102
data . projectId ,
99
103
ProjectSourcesEnum . legacyImport ,
100
104
apiQueryRunner . manager ,
101
105
) ;
102
106
107
+ this . logger . log ( `Csv file upload process finished successfully` ) ;
103
108
// Committing transaction
104
109
105
110
await apiQueryRunner . commitTransaction ( ) ;
@@ -131,13 +136,13 @@ export class FeatureAmountUploadService {
131
136
queryRunner : QueryRunner ,
132
137
) : Promise < any > {
133
138
try {
134
- this . logger . log ( `Parsing csv file` ) ;
139
+ this . logger . log ( `Parsing csv file... ` ) ;
135
140
136
141
const parsedFile = await featureAmountCsvParser ( data . fileBuffer ) ;
137
142
138
143
const { featureNames, puids } = this . getFeatureNamesAndPuids ( parsedFile ) ;
139
144
140
- this . logger . log ( `Validating parsed csv file` ) ;
145
+ this . logger . log ( `Validating parsed csv file... ` ) ;
141
146
142
147
if (
143
148
await this . areFeatureNamesNotAlreadyUsedInProject (
@@ -152,7 +157,7 @@ export class FeatureAmountUploadService {
152
157
return left ( unknownPuidsInFeatureAmountCsvUpload ) ;
153
158
}
154
159
155
- this . logger . log ( `Saving parsed data to temporary storage` ) ;
160
+ this . logger . log ( `Saving parsed data to temporary storage... ` ) ;
156
161
const importedRegistry = await this . saveFeaturesToRegistry (
157
162
parsedFile ,
158
163
data . projectId ,
@@ -179,23 +184,23 @@ export class FeatureAmountUploadService {
179
184
CHUNK_SIZE_FOR_BATCH_APIDB_OPERATIONS ,
180
185
) ;
181
186
182
- this . logger . log ( `Saving a new upload data to temporary table` ) ;
187
+ this . logger . log ( `Saving a new upload data to temporary table... ` ) ;
183
188
const newUpload = await entityManager
184
189
. getRepository ( FeatureAmountUploadRegistry )
185
190
. save ( {
186
191
projectId,
187
192
userId,
188
193
} ) ;
189
194
for ( const [ index , chunk ] of featuresChunks . entries ( ) ) {
190
- this . logger . log ( `Inserting chunk ${ index } to temporary table` ) ;
195
+ this . logger . log ( `Inserting chunk ${ index } to temporary table... ` ) ;
191
196
await entityManager
192
197
. createQueryBuilder ( )
193
198
. insert ( )
194
199
. into ( UploadedFeatureAmount )
195
200
. values ( chunk . map ( ( feature ) => ( { ...feature , upload : newUpload } ) ) )
196
201
. execute ( ) ;
197
202
}
198
- this . logger . log ( `New upload data saved to temporary table ` ) ;
203
+ this . logger . log ( `New csv upload data from saved to temporary tables ` ) ;
199
204
return newUpload ;
200
205
}
201
206
@@ -239,7 +244,11 @@ export class FeatureAmountUploadService {
239
244
projectId : string ,
240
245
) {
241
246
for ( const [ index , newFeature ] of newFeaturesFromCsvUpload . entries ( ) ) {
242
- this . logger . log ( `Getting feature amounts for feature ${ index } ` ) ;
247
+ this . logger . log (
248
+ `Getting feature amounts for feature number ${ index + 1 } : ${
249
+ newFeature . feature_class_name
250
+ } `,
251
+ ) ;
243
252
const featureAmounts = await apiQueryRunner . manager
244
253
. createQueryBuilder ( )
245
254
. select ( [ 'fa.puid' , 'fa.amount' ] )
@@ -255,14 +264,17 @@ export class FeatureAmountUploadService {
255
264
CHUNK_SIZE_FOR_BATCH_APIDB_OPERATIONS ,
256
265
) ;
257
266
267
+ this . logger . log (
268
+ `Feature data divided into ${ featuresChunks . length } chunks` ,
269
+ ) ;
258
270
for ( const [ amountIndex , featureChunk ] of featuresChunks . entries ( ) ) {
259
271
this . logger . log (
260
- `Saving chunk ${ amountIndex } of amounts of feature ${ index } ` ,
272
+ `Starting the process of saving chunk with index ${ amountIndex } of amounts of feature ${ newFeature . feature_class_name } ... ` ,
261
273
) ;
262
274
const firstParameterNumber = 2 ;
263
275
const parameters : any [ ] = [ projectId ] ;
264
276
this . logger . log (
265
- `Generating values to insert for chunk ${ amountIndex } of amounts of feature ${ index } ` ,
277
+ `Generating values to insert for chunk with index ${ amountIndex } ... ` ,
266
278
) ;
267
279
const valuesToInsert = featureChunk . map ( ( featureAmount , index ) => {
268
280
parameters . push (
@@ -288,7 +300,7 @@ export class FeatureAmountUploadService {
288
300
} ) ;
289
301
290
302
this . logger . log (
291
- `Inserting amount values of chunk ${ amountIndex } of amounts of feature ${ index } into geoDB` ,
303
+ `Inserting amount values of chunk with index ${ amountIndex } into ( geoDB).features_data table... ` ,
292
304
) ;
293
305
await geoQueryRunner . manager . query (
294
306
`
@@ -303,12 +315,14 @@ export class FeatureAmountUploadService {
303
315
parameters ,
304
316
) ;
305
317
this . logger . log (
306
- `Chunk ${ amountIndex } of amounts of feature ${ index } saved to geoDB features_data` ,
318
+ `Chunk with index ${ amountIndex } saved to ( geoDB). features_data` ,
307
319
) ;
308
320
}
309
- this . logger . log ( `All chunks of feature ${ index } saved` ) ;
321
+ this . logger . log (
322
+ `All chunks of feature ${ newFeature . feature_class_name } saved` ,
323
+ ) ;
310
324
}
311
- this . logger . log ( `All features data saved` ) ;
325
+ this . logger . log ( `All new features data saved to (geoDB).features_data ` ) ;
312
326
}
313
327
314
328
private async areFeatureNamesNotAlreadyUsedInProject (
0 commit comments