All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.aliyun.datahub.client.impl.schemaregistry.SchemaRegistryClientImpl Maven / Gradle / Ivy

The newest version!
package com.aliyun.datahub.client.impl.schemaregistry;

import com.aliyun.datahub.client.DatahubClient;
import com.aliyun.datahub.client.exception.DatahubClientException;
import com.aliyun.datahub.client.exception.ResourceNotFoundException;
import com.aliyun.datahub.client.model.ListTopicSchemaResult;
import com.aliyun.datahub.client.model.RecordSchema;
import com.aliyun.datahub.client.model.RecordSchemaInfo;
import com.aliyun.datahub.client.util.JsonUtils;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;

public class SchemaRegistryClientImpl implements SchemaRegistryClient {
    private final static Logger LOGGER = LoggerFactory.getLogger(SchemaRegistryClientImpl.class);
    private final static AtomicInteger CACHE_COUNT = new AtomicInteger(0);
    private static final int SCHEMA_EXPIRE_TIMEOUT_MS = 15 * 60 * 1000; // 15min
    private static final int RETRY_COUNT = 1;

    private volatile LoadingCache schemaCache;
    private final String endpoint;
    private final String accountKey;
    private final long updateInterval;
    private final DatahubClient client;

    public SchemaRegistryClientImpl(String endpoint, String accountKey, long updateInterval, DatahubClient client) {
        this.endpoint = endpoint;
        this.accountKey = accountKey;
        this.updateInterval = updateInterval;
        this.client = client;
    }

    private void initCacheIfNeed() {
        if (schemaCache == null) {
            synchronized (this) {
                if (schemaCache == null) {
                    schemaCache = CacheBuilder.newBuilder()
                            .expireAfterAccess(SCHEMA_EXPIRE_TIMEOUT_MS, TimeUnit.MILLISECONDS)
                            .refreshAfterWrite(updateInterval, TimeUnit.MILLISECONDS)
                            .concurrencyLevel(Runtime.getRuntime().availableProcessors())
                            .build(new CacheLoader() {
                                @Override
                                public TopicSchemaMeta load(String topicKey) throws Exception {
                                    String[] segs = topicKey.split("/");
                                    if (segs.length != 2) {
                                        throw new DatahubClientException("TopicKey format error. key:" + topicKey);
                                    }
                                    return updateTopicSchemaMeta(segs[0], segs[1]);
                                }
                            });
                    LOGGER.info("Init schema registry cache {}, endpoint:{}, accountKey:{}",
                            CACHE_COUNT.getAndIncrement(), endpoint, accountKey);
                }
            }
        }
    }

    @Override
    public RecordSchema getSchema(String projectName, String topicName, int versionId) {
        initCacheIfNeed();
        String topicKey = projectName + "/" + topicName;
        int cnt = 0;
        while (true) {
            try {
                TopicSchemaMeta schemaMeta = schemaCache.get(topicKey);
                return schemaMeta.getSchema(versionId);
            } catch (ResourceNotFoundException e) {
                // Only get schema by id do force refresh
                schemaCache.invalidate(topicKey);
                if (++cnt > RETRY_COUNT) {
                    throw e;
                }
            } catch (Exception e) {
                throw new DatahubClientException(e.getMessage(), e);
            }
        }
    }

    @Override
    public int getVersionId(String projectName, String topicName, RecordSchema schema) {
        initCacheIfNeed();
        String topicKey = projectName + "/" + topicName;
        int cnt = 0;
        while (true) {
            try {
                TopicSchemaMeta schemaMeta = schemaCache.get(topicKey);
                return schemaMeta.getSchema(schema);
            } catch (ResourceNotFoundException e) {
                // Here, get schema by schema also force refresh
                schemaCache.invalidate(topicKey);
                if (++cnt > RETRY_COUNT) {
                    throw e;
                }
            } catch (Exception e) {
                throw new DatahubClientException(e.getMessage());
            }
        }
    }

    private TopicSchemaMeta updateTopicSchemaMeta(String projectName, String topicName) {
        TopicSchemaMeta schemaMeta = new TopicSchemaMeta(projectName, topicName);

        int pageNumber = 0;
        int pageSize = 100;
        int totalPage;

        do {
            ++pageNumber;
            ListTopicSchemaResult result = client.listTopicSchema(projectName, topicName, pageNumber, pageSize);
            totalPage = result.getPageCount();

            for (RecordSchemaInfo info : result.getRecordSchemaInfoList()) {
                schemaMeta.addSchema((int)info.getVersionId(), info.getRecordSchema());
            }
        } while (pageNumber < totalPage);

        return schemaMeta;
    }

    private static class TopicSchemaMeta {
        private final String projectName;
        private final String topicName;
        private final TreeMap versionMap = new TreeMap<>();
        private final Map schemaMap = new HashMap<>();

        public TopicSchemaMeta(String projectName, String topicName) {
            this.projectName = projectName;
            this.topicName = topicName;
        }

        public synchronized RecordSchema getSchema(int versionId) {
            // 查询最新version
            if (versionId == -1) {
                if (versionMap.isEmpty()) {
                    throw new DatahubClientException("Schema map is empty. project:" + projectName + ",topic:" + topicName);
                }

                Map.Entry entry = versionMap.lastEntry();
                return entry.getValue();
            } else {
                RecordSchema schema = versionMap.get(versionId);
                if (schema == null) {
                    throw new ResourceNotFoundException("Not found schema with the specified version. project:" +
                            projectName + ",topic:" + topicName + ",version:" + versionId);
                }
                return schema;
            }
        }

        public synchronized int getSchema(RecordSchema schema) {
            Integer versionId = schemaMap.get(schema);
            if (versionId == null) {
                throw new ResourceNotFoundException("Schema not found. project:" +
                        projectName + ",topic:" + topicName + ",schema:" + JsonUtils.toJson(schema));
            }
            return versionId;
        }

        public void addSchema(int versionId, RecordSchema schema) {
            versionMap.put(versionId, schema);
            schemaMap.put(schema, versionId);
        }
    }
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy