live.document.mavenplugin.sql.SqlAnalysisMojo Maven / Gradle / Ivy
package live.document.mavenplugin.sql;
import live.document.generator.utils.FileUtils;
import live.document.plsqlscanner.PlSqlExplained;
import live.document.plsqlscanner.PlSqlObject;
import live.document.plsqlscanner.PlSqlReader;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* 分析多个指定方法的调用链中,涉及实体Entity类操作的部分。导出CSV
*/
@Mojo(name = "analyse-sql")
public class SqlAnalysisMojo extends AbstractMojo {
@Parameter
private String sqlFile;
@Parameter(defaultValue = "", required = true)
private String analysisResultOutputDir;
@Override
public void execute() throws MojoExecutionException {
try {
FileUtils fileUtils = new FileUtils();
if (analysisResultOutputDir == null || analysisResultOutputDir.trim().length() == 0) {
analysisResultOutputDir = System.getProperty("java.io.tmpdir");
} else {
fileUtils.makeDirectory(Paths.get(analysisResultOutputDir, "dummy"));
}
PlSqlReader reader = new PlSqlReader();
String sql = fileUtils.readMarkdown(Paths.get(sqlFile));
PlSqlExplained parse = reader.parse(sql);
StringBuilder stringBuilder = new StringBuilder();
for (PlSqlObject plSqlObject : parse.getPlSqlObjects()) {
String item = String.format("Name: %s, LoC: %d, CRUD: %d\nCall: %s\nTables: \n%s\n",
plSqlObject.getFullName(),
plSqlObject.getLineOfCode(),
plSqlObject.getCrudStatements(),
plSqlObject.getFunctionCalls().stream().collect(Collectors.joining(", ")),
getTableActions(plSqlObject).stream().collect(Collectors.joining("\n"))
);
stringBuilder.append(item);
stringBuilder.append("\n");
}
Path fileName = Paths.get(analysisResultOutputDir, "sql-explain.txt");
fileUtils.writeFile(fileName, stringBuilder.toString());
getLog().info("File created: file://" + fileName.toString());
} catch (Exception e) {
getLog().error(e);
throw new MojoExecutionException(e.getMessage());
}
}
private List getTableActions(PlSqlObject plSqlObject) {
List actions = new ArrayList<>();
actions.addAll(plSqlObject.getQueryTables().stream().map(t -> t + " [R]").collect(Collectors.toList()));
actions.addAll(plSqlObject.getInsertTables().stream().map(t -> t + " [W]").collect(Collectors.toList()));
actions.addAll(plSqlObject.getUpdateTables().stream().map(t -> t + " [W]").collect(Collectors.toList()));
actions.addAll(plSqlObject.getDeleteTables().stream().map(t -> t + " [W]").collect(Collectors.toList()));
return getDistinctEntityAndOperation(actions);
}
private List getDistinctEntityAndOperation(List allEntityOperations) {
Map map = new HashMap<>();
for (String allEntityOperation : allEntityOperations) {
int spaceIndex = allEntityOperation.indexOf(" ");
String tableName = allEntityOperation.substring(0, spaceIndex);
String tableOperation = allEntityOperation.substring(spaceIndex + 1);
String existsOperation = map.get(tableName);
if (newOperatioHasHighPriority(existsOperation, tableOperation)) {
map.put(tableName, tableOperation);
}
}
List results = map.entrySet()
.stream()
.map(i -> i.getKey() + ", " + i.getValue())
.collect(Collectors.toList());
results.sort(String::compareTo);
return results;
}
private boolean newOperatioHasHighPriority(String existsOperation, String newOperation) {
if (existsOperation == null) {
return true;
}
List operations = Arrays.asList("", "[R]", "[W]", "[RW]");
return operations.indexOf(newOperation) > operations.indexOf(existsOperation);
}
}