All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.sinszm.sofa.service.support.HdfsWrapper Maven / Gradle / Ivy

Go to download

高可用服务框架,分布式文件存储操作组件 Copyright © 2021 智慧程序猿(sinsz.com) All rights reserved.

The newest version!
package com.sinszm.sofa.service.support;

import cn.hutool.core.date.DateUtil;
import cn.hutool.core.util.StrUtil;
import com.sinszm.sofa.HadoopHdfsProperties;
import com.sinszm.sofa.annotation.EnableDFS;
import com.sinszm.sofa.enums.DfsType;
import com.sinszm.sofa.util.BaseUtil;
import com.sinszm.sofa.util.SpringHelper;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.springframework.stereotype.Component;

import javax.annotation.Resource;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Date;

/**
 * 文件服务器操作
 *
 * @author fh411
 */
@Slf4j
@EnableDFS(DfsType.HDFS)
@Component
public class HdfsWrapper {

    @Resource
    private HadoopHdfsProperties hadoopHdfsProperties;

    private FileSystem client() {
        return SpringHelper.instance().getBean(FileSystem.class);
    }

    /**
     * 是否存在文件
     * @param route 路径
     * @return      boolean
     */
    private boolean exist(String route) {
        Path dst = new Path(route);
        return this.exist(dst);
    }

    /**
     * 是否存在文件
     * @param dst   路径
     * @return      boolean
     */
    @SneakyThrows
    private boolean exist(Path dst) {
        return client().exists(dst) && client().getFileStatus(dst).getLen() != 0;
    }

    /**
     * 文件上传
     *
     * @param bytes         文件字节
     * @return              文件信息
     */
    @SneakyThrows
    public UploadInfo upload(byte[] bytes, String extension) {
        String fileName = StrUtil.join(".", BaseUtil.uuid(), BaseUtil.trim(extension).replace(".", ""));
        String filePath = DateUtil.formatDate(new Date()).replace("-", "/");
        InputStream in = new ByteArrayInputStream(bytes);
        try {
            String path = StrUtil.join("/", BaseUtil.trim(hadoopHdfsProperties.getBucket()), filePath, fileName);
            Path dst = new Path(path);
            if (this.exist(dst)) {
                return UploadInfo.builder()
                        .group(BaseUtil.trim(hadoopHdfsProperties.getBucket()))
                        .bucket(BaseUtil.trim(hadoopHdfsProperties.getBucket()))
                        .path(StrUtil.join("/", filePath, fileName))
                        .build();
            }
            FSDataOutputStream os = client().create(dst);
            IOUtils.copyBytes(in, os, 4096, false);
            os.hsync();
            os.close();
        } catch (IOException e) {
            e.printStackTrace();
            log.error("HDFS文件上传异常!", e);
        } finally {
            IOUtils.closeStream(in);
        }
        return UploadInfo.builder()
                .group(BaseUtil.trim(hadoopHdfsProperties.getBucket()))
                .bucket(BaseUtil.trim(hadoopHdfsProperties.getBucket()))
                .path(StrUtil.join("/", filePath, fileName))
                .build();
    }

    /**
     * 下载文件
     * @param bucket        文件组或bucket
     * @param path          文件存储路径
     * @return              文件流
     */
    @SneakyThrows
    public InputStream download(String bucket, String path) {
        path = StrUtil.join("/", BaseUtil.trim(bucket), BaseUtil.trim(path));
        Path dst = new Path(path);
        return client().open(dst);
    }

}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy