org.dbflute.infra.doc.hacomment.DfHacoMapFile Maven / Gradle / Ivy
/*
* Copyright 2014-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.dbflute.infra.doc.hacomment;
import org.dbflute.helper.HandyDate;
import org.dbflute.helper.dfmap.DfMapFile;
import org.dbflute.helper.message.ExceptionMessageBuilder;
import org.dbflute.infra.doc.hacomment.exception.DfHacoMapFileReadFailureException;
import org.dbflute.infra.doc.hacomment.exception.DfHacoMapFileWriteFailureException;
import org.dbflute.optional.OptionalThing;
import org.dbflute.util.DfStringUtil;
import org.dbflute.util.DfTypeUtil;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* @author hakiba
*/
public class DfHacoMapFile {
// ===================================================================================
// Definition
// ==========
// e.g. dbflute_maihamadb/scheme/decomment/
private static final String BASE_HACOMMENT_DIR_PATH = "/schema/hacomment/";
// e.g. dbflute_maihamadb/scheme/decomment/piece/
private static final String BASE_PIECE_DIR_PATH = BASE_HACOMMENT_DIR_PATH + "piece/";
// e.g. dbflute_maihamadb/scheme/decomment/pickup/decomment-pickup.dfmap
private static final String BASE_PICKUP_FILE_PATH = BASE_HACOMMENT_DIR_PATH + "pickup/hacomment-pickup.dfmap";
private static final Map REPLACE_CHAR_MAP;
static {
// done cabos add spaces and replaceChar should be underscore? by jflute (2017/09/07)
List notAvailableCharList = Arrays.asList("/", "\\", "<", ">", "*", "?", "\"", "|", ":", ";", "\0", " ");
String replaceChar = "_";
REPLACE_CHAR_MAP = notAvailableCharList.stream().collect(Collectors.toMap(ch -> ch, ch -> replaceChar));
}
private static final Map REPLACE_MAP_FOR_HACOMMENT_ID =
Stream.of("/", " ", ":").collect(Collectors.toMap(ch -> ch, ch -> ""));
// ===================================================================================
// Attribute
// =========
private final Supplier currentDatetimeSupplier;
// ===================================================================================
// Constructor
// ===========
public DfHacoMapFile(Supplier currentDatetimeSupplier) {
this.currentDatetimeSupplier = currentDatetimeSupplier;
}
// ===================================================================================
// Read
// ====
// -----------------------------------------------------
// Piece
// -----
/**
* Read all hacomment piece map file in "clientDirPath/schema/hacomment/piece/".
* @param clientDirPath The path of DBFlute client directory (NotNull)
* @return List of all hacomment piece map (NotNull: If piece map file not exists, returns empty list)
* @see DfHacoMapPiece#convertToMap()
*/
public List readPieceList(String clientDirPath) {
assertClientDirPath(clientDirPath);
String pieceDirPath = buildPieceDirPath(clientDirPath);
if (Files.notExists(Paths.get(pieceDirPath))) {
return Collections.emptyList();
}
try {
return Files.list(Paths.get(pieceDirPath))
.filter(path -> path.toString().endsWith(".dfmap"))
.filter(path -> path.toString().contains("-piece-"))
.map(path -> doReadPiece(path))
.collect(Collectors.toList());
} catch (IOException e) {
throwHacomMapReadFailureException(pieceDirPath, e);
return Collections.emptyList();
}
}
private DfHacoMapPiece doReadPiece(Path path) {
final DfMapFile mapFile = new DfMapFile();
try {
Map map = mapFile.readMap(Files.newInputStream(path));
return mappingToDecoMapPiece(map);
} catch (RuntimeException | IOException e) {
throwHacomMapReadFailureException(path.toString(), e);
return null; // unreachable
}
}
// done hakiba cast check by hakiba (2017/07/29)
private DfHacoMapPiece mappingToDecoMapPiece(Map map) {
String diffCode = (String) map.get("diffCode");
String diffdate = (String) map.get("diffDate");
String hacomment = (String) map.get("hacomment");
String diffComment = (String) map.get("diffComment");
@SuppressWarnings("unchecked")
List authorList = (List) map.get("authorList");
String pieceCode = (String) map.get("pieceCode");
LocalDateTime pieceDatetime = new HandyDate((String) map.get("pieceDatetime")).getLocalDateTime();
String pieceOwner = (String) map.get("pieceOwner");
@SuppressWarnings("unchecked")
List previousPieceList = (List) map.get("previousPieceList");
return new DfHacoMapPiece(diffCode, diffdate, hacomment, diffComment, authorList, pieceCode, pieceOwner, pieceDatetime,
previousPieceList);
}
// -----------------------------------------------------
// Pickup
// ------
/**
* Read hacomment pickup map file at "clientDirPath/schema/hacomment/pickup/hacomment-pickup.dfmap".
* @param clientDirPath The path of DBFlute client directory (NotNull)
* @return pickup hacomment map (NotNull: If pickup map file not exists, returns empty)
* @see DfHacoMapPickup#convertToMap()
*/
public OptionalThing readPickup(String clientDirPath) {
assertClientDirPath(clientDirPath);
String filePath = buildPickupFilePath(clientDirPath);
if (Files.notExists(Paths.get(filePath)))
return OptionalThing.empty();
return OptionalThing.ofNullable(doReadPickup(Paths.get(filePath)), () -> {});
}
private DfHacoMapPickup doReadPickup(Path path) {
DfMapFile mapFile = new DfMapFile();
try {
Map map = mapFile.readMap(Files.newInputStream(path));
return mappingToHacoMapPickup(map);
} catch (RuntimeException | IOException e) {
throwHacomMapReadFailureException(path.toString(), e);
return null; // unreachable
}
}
private DfHacoMapPickup mappingToHacoMapPickup(Map map) {
LocalDateTime pickupDatetime = DfTypeUtil.toLocalDateTime(map.get("pickupDatetime"));
String formatVersion = (String) map.get("formatVersion");
DfHacoMapPickup pickup = new DfHacoMapPickup(formatVersion);
pickup.setPickupDatetime(pickupDatetime);
@SuppressWarnings("unchecked")
Map hacoMap = (Map) map.getOrDefault("hacoMap", new ArrayList<>());
if (hacoMap.isEmpty()) {
return pickup;
}
@SuppressWarnings("unchecked")
List
© 2015 - 2025 Weber Informatics LLC | Privacy Policy