Skip to content

Commit

Permalink
feat: CLI source analysis
Browse files Browse the repository at this point in the history
  • Loading branch information
ishche committed Jan 10, 2024
1 parent ffa6958 commit 0cfdb2d
Show file tree
Hide file tree
Showing 14 changed files with 268 additions and 148 deletions.
103 changes: 76 additions & 27 deletions server/engine/src/main/java/org/eclipse/lsp/cobol/cli/Cli.java
Original file line number Diff line number Diff line change
Expand Up @@ -14,41 +14,41 @@
*/
package org.eclipse.lsp.cobol.cli;


import com.google.common.collect.Multimap;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import com.google.gson.*;
import com.google.inject.Guice;
import com.google.inject.Injector;
import lombok.extern.slf4j.Slf4j;
import org.antlr.v4.runtime.tree.ParseTreeListener;
import org.eclipse.lsp.cobol.cli.di.CliModule;
import org.eclipse.lsp.cobol.cli.modules.CliClientProvider;
import org.eclipse.lsp.cobol.common.AnalysisConfig;
import org.eclipse.lsp.cobol.common.ResultWithErrors;
import org.eclipse.lsp.cobol.common.SubroutineService;
import org.eclipse.lsp.cobol.common.copybook.CopybookProcessingMode;
import org.eclipse.lsp.cobol.common.error.SyntaxError;
import org.eclipse.lsp.cobol.common.mapping.ExtendedDocument;
import org.eclipse.lsp.cobol.common.mapping.ExtendedText;
import org.eclipse.lsp.cobol.common.message.MessageService;
import org.eclipse.lsp.cobol.core.engine.analysis.AnalysisContext;
import org.eclipse.lsp.cobol.core.engine.analysis.Timing;
import org.eclipse.lsp.cobol.core.engine.dialects.DialectService;
import org.eclipse.lsp.cobol.core.engine.pipeline.Pipeline;
import org.eclipse.lsp.cobol.core.engine.pipeline.PipelineResult;
import org.eclipse.lsp.cobol.core.engine.pipeline.stages.CompilerDirectivesStage;
import org.eclipse.lsp.cobol.core.engine.pipeline.stages.DialectProcessingStage;
import org.eclipse.lsp.cobol.core.engine.pipeline.stages.PreprocessorStage;
import org.eclipse.lsp.cobol.core.engine.pipeline.StageResult;
import org.eclipse.lsp.cobol.core.engine.pipeline.stages.*;
import org.eclipse.lsp.cobol.core.engine.processor.AstProcessor;
import org.eclipse.lsp.cobol.core.engine.symbols.SymbolsRepository;
import org.eclipse.lsp.cobol.core.preprocessor.TextPreprocessor;
import org.eclipse.lsp.cobol.core.preprocessor.delegates.GrammarPreprocessor;
import org.eclipse.lsp.cobol.core.semantics.CopybooksRepository;
import org.eclipse.lsp.cobol.service.settings.CachingConfigurationService;
import org.eclipse.lsp4j.Location;
import picocli.CommandLine;

import java.io.File;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import java.util.*;
import java.util.concurrent.Callable;

/**
Expand All @@ -58,7 +58,8 @@
@Slf4j
public class Cli implements Callable<Integer> {
private enum Action {
list_copybooks
list_copybooks,
analysis
}

@CommandLine.Parameters(description = "Values: ${COMPLETION-CANDIDATES}")
Expand All @@ -81,7 +82,7 @@ private enum Action {

public Integer call() throws Exception {
Injector diCtx = Guice.createInjector(new CliModule());
Pipeline pipeline = setupPipeline(diCtx);
Pipeline pipeline = setupPipeline(diCtx, action);

CliClientProvider cliClientProvider = diCtx.getInstance(CliClientProvider.class);
if (cpyPaths != null) {
Expand All @@ -101,38 +102,86 @@ public Integer call() throws Exception {
ResultWithErrors<ExtendedText> resultWithErrors = preprocessor.cleanUpCode(documentUri, text);
AnalysisContext ctx = new AnalysisContext(new ExtendedDocument(resultWithErrors.getResult(), text), createAnalysisConfiguration());
ctx.getAccumulatedErrors().addAll(resultWithErrors.getErrors());

PipelineResult<CopybooksRepository> pipelineResult = (PipelineResult<CopybooksRepository>) pipeline.run(ctx);
Multimap<String, String> definitions = pipelineResult.getData().getDefinitions();
Multimap<String, Location> usages = pipelineResult.getData().getUsages();
Set<String> missing = new HashSet<>(usages.keySet());
missing.removeAll(definitions.keySet());

PipelineResult pipelineResult = pipeline.run(ctx);
Gson gson = new GsonBuilder().setPrettyPrinting().create();
JsonObject result = new JsonObject();
JsonArray copybookUris = new JsonArray();
JsonArray missingCopybooks = new JsonArray();
missing.forEach(missingCopybooks::add);
definitions.values().forEach(copybookUris::add);
result.add("copybookUris", copybookUris);
result.add("missingCopybooks", missingCopybooks);
addTiming(result, pipelineResult.getTimings());
switch (action) {
case analysis:
StageResult<ProcessingResult> analysisResult = (StageResult<ProcessingResult>) pipelineResult.getLastStageResult();
JsonArray diagnostics = new JsonArray();
ctx.getAccumulatedErrors().forEach(err -> {
JsonObject diagnostic = toJson(err, gson);
diagnostics.add(diagnostic);
});
result.add("diagnostics", diagnostics);
break;
case list_copybooks:
StageResult<CopybooksRepository> copybooksResult = (StageResult<CopybooksRepository>) pipelineResult.getLastStageResult();
Multimap<String, String> definitions = copybooksResult.getData().getDefinitions();
Multimap<String, Location> usages = copybooksResult.getData().getUsages();
Set<String> missing = new HashSet<>(usages.keySet());
missing.removeAll(definitions.keySet());

JsonArray copybookUris = new JsonArray();
JsonArray missingCopybooks = new JsonArray();
missing.forEach(missingCopybooks::add);
definitions.values().forEach(copybookUris::add);
result.add("copybookUris", copybookUris);
result.add("missingCopybooks", missingCopybooks);
break;
default:
break;
}
System.out.println(gson.toJson(result));
return 0;
}

private JsonObject toJson(SyntaxError syntaxError, Gson gson) {
JsonObject diagnostic = new JsonObject();
Optional.ofNullable(syntaxError.getErrorCode()).ifPresent(code -> diagnostic.add("code", new JsonPrimitive(code.getLabel())));
Optional.ofNullable(syntaxError.getErrorSource()).ifPresent(es -> diagnostic.add("source", new JsonPrimitive(es.getText())));
Optional.ofNullable(syntaxError.getLocation()).ifPresent(l -> diagnostic.add("location", gson.toJsonTree(l)));
Optional.ofNullable(syntaxError.getSeverity()).ifPresent(s -> diagnostic.add("severity", new JsonPrimitive(s.name())));
Optional.ofNullable(syntaxError.getSuggestion()).ifPresent(s -> diagnostic.add("suggestion", new JsonPrimitive(s)));
Optional.ofNullable(syntaxError.getRelatedInformation()).ifPresent(ri -> diagnostic.add("related", gson.toJsonTree(ri)));
return diagnostic;
}

private void addTiming(JsonObject result, Map<String, Timing> timings) {
JsonObject tObj = new JsonObject();
timings.forEach((key, value) -> tObj.add(key, new JsonPrimitive(value.getTime())));
result.add("timings", tObj);
timings.values().stream()
.map(Timing::getTime)
.reduce(Long::sum)
.ifPresent(totalTime ->
tObj.add("total", new JsonPrimitive(totalTime)));
}

private static AnalysisConfig createAnalysisConfiguration() {
return AnalysisConfig.defaultConfig(CopybookProcessingMode.ENABLED);
}

private static Pipeline setupPipeline(Injector diCtx) {
private static Pipeline setupPipeline(Injector diCtx, Action action) {
DialectService dialectService = diCtx.getInstance(DialectService.class);
MessageService messageService = diCtx.getInstance(MessageService.class);
GrammarPreprocessor grammarPreprocessor = diCtx.getInstance(GrammarPreprocessor.class);
ParseTreeListener parseTreeListener = diCtx.getInstance(ParseTreeListener.class);
SymbolsRepository symbolsRepository = diCtx.getInstance(SymbolsRepository.class);
SubroutineService subroutineService = diCtx.getInstance(SubroutineService.class);
CachingConfigurationService cachingConfigurationService = diCtx.getInstance(CachingConfigurationService.class);
AstProcessor astProcessor = diCtx.getInstance(AstProcessor.class);

Pipeline pipeline = new Pipeline();
pipeline.add(new CompilerDirectivesStage(messageService));
pipeline.add(new DialectProcessingStage(dialectService));
pipeline.add(new PreprocessorStage(grammarPreprocessor));
if (action == Action.analysis) {
pipeline.add(new ImplicitDialectProcessingStage(dialectService));
pipeline.add(new ParserStage(messageService, parseTreeListener));
pipeline.add(new TransformTreeStage(symbolsRepository, messageService, subroutineService, cachingConfigurationService, dialectService, astProcessor));
}
return pipeline;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import com.google.inject.assistedinject.FactoryModuleBuilder;
import com.google.inject.multibindings.Multibinder;
import com.google.inject.name.Names;
import org.antlr.v4.runtime.tree.ParseTreeListener;
import org.eclipse.lsp.cobol.cfg.CFASTBuilder;
import org.eclipse.lsp.cobol.cfg.CFASTBuilderImpl;
import org.eclipse.lsp.cobol.cli.modules.CliClientProvider;
Expand Down Expand Up @@ -49,6 +50,7 @@
import org.eclipse.lsp.cobol.core.preprocessor.delegates.transformer.ContinuationLineTransformation;
import org.eclipse.lsp.cobol.core.preprocessor.delegates.writer.CobolLineWriter;
import org.eclipse.lsp.cobol.core.preprocessor.delegates.writer.CobolLineWriterImpl;
import org.eclipse.lsp.cobol.core.visitor.InterruptingTreeListener;
import org.eclipse.lsp.cobol.lsp.DisposableLSPStateService;
import org.eclipse.lsp.cobol.lsp.jrpc.CobolLanguageClient;
import org.eclipse.lsp.cobol.service.CobolLSPServerStateService;
Expand Down Expand Up @@ -93,7 +95,7 @@ protected void configure() {
bind(LocaleStore.class).to(LocaleStoreImpl.class);
bind(ConfigurationService.class).to(CachingConfigurationService.class);
bind(CopybookNameService.class).to(CopybookNameServiceImpl.class);

bind(ParseTreeListener.class).to(InterruptingTreeListener.class);
bind(String.class)
.annotatedWith(named("resourceFileLocation"))
.toInstance("resourceBundles/messages");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,9 @@
import org.eclipse.lsp.cobol.core.engine.dialects.DialectService;
import org.eclipse.lsp.cobol.core.engine.errors.ErrorFinalizerService;
import org.eclipse.lsp.cobol.core.engine.pipeline.Pipeline;
import org.eclipse.lsp.cobol.core.engine.pipeline.stages.*;
import org.eclipse.lsp.cobol.core.engine.pipeline.PipelineResult;
import org.eclipse.lsp.cobol.core.engine.pipeline.stages.*;
import org.eclipse.lsp.cobol.core.engine.pipeline.StageResult;
import org.eclipse.lsp.cobol.core.engine.processor.AstProcessor;
import org.eclipse.lsp.cobol.core.engine.symbols.SymbolsRepository;
import org.eclipse.lsp.cobol.core.preprocessor.TextPreprocessor;
Expand Down Expand Up @@ -170,7 +171,9 @@ public AnalysisResult run(
AnalysisContext ctx = new AnalysisContext(new ExtendedDocument(resultWithErrors.getResult(), text), analysisConfig);
ctx.getAccumulatedErrors().addAll(resultWithErrors.getErrors());

PipelineResult<?> result = pipeline.run(ctx);
PipelineResult pipelineResult = pipeline.run(ctx);
StageResult<?> result = pipelineResult.getLastStageResult();
PerformanceMeasurementUtils.logTiming(pipelineResult.getTimings(), ctx);

if (result.stopProcessing() || !(result.getData() instanceof ProcessingResult)) {
return toAnalysisResult(new ResultWithErrors<>(
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
/*
* Copyright (c) 2024 Broadcom.
* The term "Broadcom" refers to Broadcom Inc. and/or its subsidiaries.
*
* This program and the accompanying materials are made
* available under the terms of the Eclipse Public License 2.0
* which is available at https://www.eclipse.org/legal/epl-2.0/
*
* SPDX-License-Identifier: EPL-2.0
*
* Contributors:
* Broadcom, Inc. - initial API and implementation
*
*/
package org.eclipse.lsp.cobol.core.engine;

import lombok.experimental.UtilityClass;
import lombok.extern.slf4j.Slf4j;
import org.eclipse.lsp.cobol.core.engine.analysis.AnalysisContext;
import org.eclipse.lsp.cobol.core.engine.analysis.Timing;

import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.Collections;
import java.util.Map;
import java.util.Optional;

/**
* Utility class for performance related logic.
*/
@UtilityClass
@Slf4j
public class PerformanceMeasurementUtils {
private static final String PERFORMANCE_LOG_PATH = "performance.log.path";

/**
* Log timing into CSV file
* @param timing timing data
* @param context analysis context
*/
public void logTiming(Map<String, Timing> timing, AnalysisContext context) {
timing.forEach((key, value) -> LOG.debug("Timing for {}: {}", key, value.getTime()));
Optional.ofNullable(System.getProperty(PERFORMANCE_LOG_PATH))
.map(Paths::get)
.ifPresent(path -> {
try {
if (!Files.exists(path)) {
LOG.info("Write performance data into: " + path);
Files.write(path, Collections.singleton(createHeaderLine(timing)), StandardOpenOption.CREATE);
}
Files.write(path, Collections.singleton(createTimingLine(timing, context)), StandardOpenOption.APPEND);
} catch (IOException e) {
LOG.debug(e.getMessage(), e);
}
});
}

private String createHeaderLine(Map<String, Timing> timing) {
StringBuilder line = new StringBuilder("url");
for (String stageName : timing.keySet()) {
line.append(",");
line.append(stageName);
}
line.append(",");
line.append("Total time");
line.append(",");
line.append("Size");
return line.toString();
}

private String createTimingLine(Map<String, Timing> timing, AnalysisContext context) {
StringBuilder line = new StringBuilder(context.getExtendedDocument().getUri());
long total = 0;
for (String stageName : timing.keySet()) {
line.append(",");
long time = Optional.ofNullable(timing.get(stageName)).map(Timing::getTime).orElse(0L);
line.append(time);
total += time;
}
line.append(",");
line.append(total);
line.append(",");
line.append(context.getExtendedDocument().toString().length());
return line.toString();
}
}
Loading

0 comments on commit 0cfdb2d

Please sign in to comment.