All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.apache.druid.timeline.VersionedIntervalTimeline Maven / Gradle / Ivy

There is a newer version: 31.0.0
Show newest version
/*
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 *   http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing,
 * software distributed under the License is distributed on an
 * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 * KIND, either express or implied.  See the License for the
 * specific language governing permissions and limitations
 * under the License.
 */

package org.apache.druid.timeline;

import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.Iterators;
import com.google.errorprone.annotations.concurrent.GuardedBy;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.UOE;
import org.apache.druid.java.util.common.guava.Comparators;
import org.apache.druid.timeline.partition.PartitionChunk;
import org.apache.druid.timeline.partition.PartitionHolder;
import org.apache.druid.utils.CollectionUtils;
import org.joda.time.Interval;

import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NavigableMap;
import java.util.Objects;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;

/**
 * VersionedIntervalTimeline is a data structure that manages objects on a specific timeline.
 *
 * It associates an {@link Interval} and a generically-typed version with the object that is being stored.
 *
 * In the event of overlapping timeline entries, timeline intervals may be chunked. The underlying data associated
 * with a timeline entry remains unchanged when chunking occurs.
 *
 * After loading objects via the {@link #add} method, the {@link #lookup(Interval)} method can be used to get the list
 * of the most recent objects (according to the version) that match the given interval. The intent is that objects
 * represent a certain time period and when you do a {@link #lookup(Interval)}, you are asking for all of the objects
 * that you need to look at in order to get a correct answer about that time period.
 *
 * The {@link #findFullyOvershadowed} method returns a list of objects that will never be returned by a call to {@link
 * #lookup} because they are overshadowed by some other object. This can be used in conjunction with the {@link #add}
 * and {@link #remove} methods to achieve "atomic" updates. First add new items, then check if those items caused
 * anything to be overshadowed, if so, remove the overshadowed elements and you have effectively updated your data set
 * without any user impact.
 */
public class VersionedIntervalTimeline>
    implements TimelineLookup
{
  private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(true);

  // Below timelines stores only *visible* timelineEntries
  // adjusted interval -> timelineEntry
  private final NavigableMap completePartitionsTimeline = new TreeMap<>(
      Comparators.intervalsByStartThenEnd()
  );
  // IncompletePartitionsTimeline also includes completePartitionsTimeline
  // adjusted interval -> timelineEntry
  @VisibleForTesting
  final NavigableMap incompletePartitionsTimeline = new TreeMap<>(
      Comparators.intervalsByStartThenEnd()
  );
  // true interval -> version -> timelineEntry
  private final Map> allTimelineEntries = new HashMap<>();
  private final AtomicInteger numObjects = new AtomicInteger();

  private final Comparator versionComparator;

  // Set this to true if the client needs to skip tombstones upon lookup (like the broker)
  private final boolean skipObjectsWithNoData;

  public VersionedIntervalTimeline(Comparator versionComparator)
  {
    this(versionComparator, false);
  }

  public VersionedIntervalTimeline(Comparator versionComparator, boolean skipObjectsWithNoData)
  {
    this.versionComparator = versionComparator;
    this.skipObjectsWithNoData = skipObjectsWithNoData;
  }

  public static > Iterable getAllObjects(
      final List> holders
  )
  {
    return () ->
        holders.stream()
               .flatMap(holder -> StreamSupport.stream(holder.getObject().spliterator(), false))
               .map(PartitionChunk::getObject)
               .iterator();
  }

  public Map> getAllTimelineEntries()
  {
    return allTimelineEntries;
  }

  /**
   * Returns a lazy collection with all objects (including partially AND fully overshadowed, see {@link
   * #findFullyOvershadowed}) in this VersionedIntervalTimeline to be used for iteration or {@link Collection#stream()}
   * transformation. The order of objects in this collection is unspecified.
   *
   * Note: iteration over the returned collection may not be as trivially cheap as, for example, iteration over an
   * ArrayList. Try (to some reasonable extent) to organize the code so that it iterates the returned collection only
   * once rather than several times.
   */
  public Collection iterateAllObjects()
  {
    return CollectionUtils.createLazyCollectionFromStream(
        () -> allTimelineEntries
            .values()
            .stream()
            .flatMap((TreeMap entryMap) -> entryMap.values().stream())
            .flatMap((TimelineEntry entry) -> StreamSupport.stream(entry.getPartitionHolder().spliterator(), false))
            .map(PartitionChunk::getObject),
        numObjects.get()
    );
  }

  public int getNumObjects()
  {
    return numObjects.get();
  }

  /**
   * Computes a set with all objects falling within the specified interval which are at least partially "visible" in
   * this interval (that is, are not fully overshadowed within this interval).
   *
   * Note that this method returns a set of {@link ObjectType}. Duplicate objects in different time chunks will be
   * removed in the result.
   */
  public Set findNonOvershadowedObjectsInInterval(Interval interval, Partitions completeness)
  {
    final List> holders;

    lock.readLock().lock();
    try {
      holders = lookup(interval, completeness);
    }
    finally {
      lock.readLock().unlock();
    }

    return FluentIterable
        .from(holders)
        .transformAndConcat(TimelineObjectHolder::getObject)
        .transform(PartitionChunk::getObject)
        .toSet();
  }

  public void add(final Interval interval, VersionType version, PartitionChunk object)
  {
    addAll(Iterators.singletonIterator(new PartitionChunkEntry<>(interval, version, object)));
  }

  public void addAll(
      final Iterator> objects
  )
  {
    lock.writeLock().lock();

    try {
      final IdentityHashMap allEntries = new IdentityHashMap<>();

      while (objects.hasNext()) {
        PartitionChunkEntry chunkEntry = objects.next();
        PartitionChunk object = chunkEntry.getChunk();
        Interval interval = chunkEntry.getInterval();
        VersionType version = chunkEntry.getVersion();
        Map exists = allTimelineEntries.get(interval);
        TimelineEntry entry;

        if (exists == null) {
          entry = new TimelineEntry(interval, version, new PartitionHolder<>(object));
          TreeMap versionEntry = new TreeMap<>(versionComparator);
          versionEntry.put(version, entry);
          allTimelineEntries.put(interval, versionEntry);
          numObjects.incrementAndGet();
        } else {
          entry = exists.get(version);

          if (entry == null) {
            entry = new TimelineEntry(interval, version, new PartitionHolder<>(object));
            exists.put(version, entry);
            numObjects.incrementAndGet();
          } else {
            PartitionHolder partitionHolder = entry.getPartitionHolder();
            if (partitionHolder.add(object)) {
              numObjects.incrementAndGet();
            }
          }
        }

        allEntries.put(entry, interval);
      }

      // "isComplete" is O(objects in holder) so defer it to the end of addAll.
      for (Entry entry : allEntries.entrySet()) {
        Interval interval = entry.getValue();

        if (entry.getKey().getPartitionHolder().isComplete()) {
          add(completePartitionsTimeline, interval, entry.getKey());
        }

        add(incompletePartitionsTimeline, interval, entry.getKey());
      }
    }
    finally {
      lock.writeLock().unlock();
    }
  }

  @Nullable
  public PartitionChunk remove(Interval interval, VersionType version, PartitionChunk chunk)
  {
    lock.writeLock().lock();
    try {
      Map versionEntries = allTimelineEntries.get(interval);
      if (versionEntries == null) {
        return null;
      }

      TimelineEntry entry = versionEntries.get(version);
      if (entry == null) {
        return null;
      }

      PartitionChunk removedChunk = entry.getPartitionHolder().remove(chunk);
      if (removedChunk == null) {
        return null;
      }
      numObjects.decrementAndGet();
      if (entry.getPartitionHolder().isEmpty()) {
        versionEntries.remove(version);
        if (versionEntries.isEmpty()) {
          allTimelineEntries.remove(interval);
        }

        remove(incompletePartitionsTimeline, interval, entry, true);
      }

      remove(completePartitionsTimeline, interval, entry, false);

      return removedChunk;
    }
    finally {
      lock.writeLock().unlock();
    }
  }

  @Override
  @Nullable
  public PartitionChunk findChunk(Interval interval, VersionType version, int partitionNum)
  {
    lock.readLock().lock();
    try {
      for (Entry> entry : allTimelineEntries.entrySet()) {
        if (entry.getKey().equals(interval) || entry.getKey().contains(interval)) {
          TimelineEntry foundEntry = entry.getValue().get(version);
          if (foundEntry != null) {
            return foundEntry.getPartitionHolder().getChunk(partitionNum);
          }
        }
      }

      return null;
    }
    finally {
      lock.readLock().unlock();
    }
  }

  /**
   * Does a lookup for the objects representing the given time interval.  Will *only* return
   * PartitionHolders that are {@linkplain PartitionHolder#isComplete() complete}.
   *
   * @param interval interval to find objects for
   *
   * @return Holders representing the interval that the objects exist for, PartitionHolders
   * are guaranteed to be complete
   */
  @Override
  public List> lookup(Interval interval)
  {
    lock.readLock().lock();
    try {
      return lookup(interval, Partitions.ONLY_COMPLETE);
    }
    finally {
      lock.readLock().unlock();
    }
  }

  @Override
  public List> lookupWithIncompletePartitions(Interval interval)
  {
    lock.readLock().lock();
    try {
      return lookup(interval, Partitions.INCOMPLETE_OK);
    }
    finally {
      lock.readLock().unlock();
    }
  }

  public boolean isEmpty()
  {
    lock.readLock().lock();
    try {
      return completePartitionsTimeline.isEmpty();
    }
    finally {
      lock.readLock().unlock();
    }
  }

  public TimelineObjectHolder first()
  {
    lock.readLock().lock();
    try {
      return timelineEntryToObjectHolder(completePartitionsTimeline.firstEntry().getValue());
    }
    finally {
      lock.readLock().unlock();
    }
  }

  public TimelineObjectHolder last()
  {
    lock.readLock().lock();
    try {
      return timelineEntryToObjectHolder(completePartitionsTimeline.lastEntry().getValue());
    }
    finally {
      lock.readLock().unlock();
    }
  }

  private TimelineObjectHolder timelineEntryToObjectHolder(TimelineEntry entry)
  {
    return new TimelineObjectHolder<>(
        entry.getTrueInterval(),
        entry.getTrueInterval(),
        entry.getVersion(),
        PartitionHolder.copyWithOnlyVisibleChunks(entry.getPartitionHolder())
    );
  }

  /**
   * This method should be deduplicated with DataSourcesSnapshot.determineOvershadowedSegments(): see
   * https://github.com/apache/druid/issues/8070.
   */
  public Set> findFullyOvershadowed()
  {
    lock.readLock().lock();
    try {
      // 1. Put all timelineEntries and remove all visible entries to find out only non-visible timelineEntries.
      final Map> overshadowedPartitionsTimeline =
          computeOvershadowedPartitionsTimeline();

      final Set> overshadowedObjects = overshadowedPartitionsTimeline
          .values()
          .stream()
          .flatMap((Map entry) -> entry.values().stream())
          .map(entry -> new TimelineObjectHolder<>(
              entry.getTrueInterval(),
              entry.getTrueInterval(),
              entry.getVersion(),
              PartitionHolder.deepCopy(entry.getPartitionHolder())
          ))
          .collect(Collectors.toSet());

      // 2. Visible timelineEntries can also have overshadowed objects. Add them to the result too.
      for (TimelineEntry entry : incompletePartitionsTimeline.values()) {
        final List> overshadowedEntries = entry.partitionHolder.getOvershadowed();
        if (!overshadowedEntries.isEmpty()) {
          overshadowedObjects.add(
              new TimelineObjectHolder<>(
                  entry.trueInterval,
                  entry.version,
                  new PartitionHolder<>(overshadowedEntries)
              )
          );
        }
      }

      return overshadowedObjects;
    }
    finally {
      lock.readLock().unlock();
    }
  }

  private Map> computeOvershadowedPartitionsTimeline()
  {
    final Map> overshadowedPartitionsTimeline = new HashMap<>();
    allTimelineEntries.forEach((Interval interval, TreeMap versionEntry) -> {
      @SuppressWarnings("unchecked")
      Map versionEntryCopy = (TreeMap) versionEntry.clone();
      overshadowedPartitionsTimeline.put(interval, versionEntryCopy);
    });

    for (TimelineEntry entry : completePartitionsTimeline.values()) {
      overshadowedPartitionsTimeline.computeIfPresent(
          entry.getTrueInterval(),
          (Interval interval, Map versionEntry) -> {
            versionEntry.remove(entry.getVersion());
            return versionEntry.isEmpty() ? null : versionEntry;
          }
      );
    }

    for (TimelineEntry entry : incompletePartitionsTimeline.values()) {
      overshadowedPartitionsTimeline.computeIfPresent(
          entry.getTrueInterval(),
          (Interval interval, Map versionEntry) -> {
            versionEntry.remove(entry.getVersion());
            return versionEntry.isEmpty() ? null : versionEntry;
          }
      );
    }
    return overshadowedPartitionsTimeline;
  }

  public boolean isOvershadowed(Interval interval, VersionType version, ObjectType object)
  {
    lock.readLock().lock();
    try {
      TimelineEntry entry = completePartitionsTimeline.get(interval);
      if (entry != null) {
        final int majorVersionCompare = versionComparator.compare(version, entry.getVersion());
        if (majorVersionCompare == 0) {
          // If the major versions of the timeline entry and target segment are equal, and
          // the maximum minor version among the segments is not greater than the minor version of the target segment,
          // none of the segments in the interval can overshadow it.
          if (entry.getMaxMinorVersion() > object.getMinorVersion()) {
            for (PartitionChunk chunk : entry.partitionHolder) {
              if (chunk.getObject().overshadows(object)) {
                return true;
              }
            }
          }
          return false;
        } else {
          return majorVersionCompare < 0;
        }
      }

      Interval lower = completePartitionsTimeline.floorKey(
          new Interval(interval.getStart(), DateTimes.MAX)
      );

      if (lower == null || !lower.overlaps(interval)) {
        return false;
      }

      Interval prev = null;
      Interval curr = lower;

      do {
        if (curr == null ||  //no further keys
            (prev != null && curr.getStartMillis() > prev.getEndMillis()) //a discontinuity
        ) {
          return false;
        }

        final TimelineEntry timelineEntry = completePartitionsTimeline.get(curr);
        final int versionCompare = versionComparator.compare(version, timelineEntry.getVersion());

        //lower or same version
        if (versionCompare > 0) {
          return false;
        } else if (versionCompare == 0) {
          // Intentionally use the Iterators API instead of the stream API for performance.
          //noinspection ConstantConditions
          final boolean nonOvershadowedObject = Iterators.all(
              timelineEntry.partitionHolder.iterator(), chunk -> !chunk.getObject().overshadows(object)
          );
          if (nonOvershadowedObject) {
            return false;
          }
        }

        prev = curr;
        curr = completePartitionsTimeline.higherKey(curr);

      } while (interval.getEndMillis() > prev.getEndMillis());

      return true;
    }
    finally {
      lock.readLock().unlock();
    }
  }

  @GuardedBy("lock")
  private void add(
      NavigableMap timeline,
      Interval interval,
      TimelineEntry entry
  )
  {
    TimelineEntry existsInTimeline = timeline.get(interval);

    if (existsInTimeline != null) {
      int compare = versionComparator.compare(entry.getVersion(), existsInTimeline.getVersion());
      if (compare > 0) {
        addIntervalToTimeline(interval, entry, timeline);
      }
      return;
    }

    Interval lowerKey = timeline.lowerKey(interval);

    if (lowerKey != null) {
      if (addAtKey(timeline, lowerKey, entry)) {
        return;
      }
    }

    Interval higherKey = timeline.higherKey(interval);

    if (higherKey != null) {
      if (addAtKey(timeline, higherKey, entry)) {
        return;
      }
    }

    addIntervalToTimeline(interval, entry, timeline);
  }

  /**
   * @return boolean flag indicating whether or not we inserted or discarded something
   */
  @GuardedBy("lock")
  private boolean addAtKey(
      NavigableMap timeline,
      Interval key,
      TimelineEntry entry
  )
  {
    boolean retVal = false;
    Interval currKey = key;
    Interval entryInterval = entry.getTrueInterval();

    if (!currKey.overlaps(entryInterval)) {
      return false;
    }

    while (entryInterval != null && currKey != null && currKey.overlaps(entryInterval)) {
      final Interval nextKey = timeline.higherKey(currKey);

      final int versionCompare = versionComparator.compare(
          entry.getVersion(),
          timeline.get(currKey).getVersion()
      );

      if (versionCompare < 0) {
        // since the entry version is lower than the existing one, the existing one overwrites the given entry
        // if overlapped.
        if (currKey.contains(entryInterval)) {
          // the version of the entry of currKey is larger than that of the given entry. Discard it
          return true;
        } else if (currKey.getStart().isBefore(entryInterval.getStart())) {
          //       | entry |
          //     | cur |
          // =>        |new|
          entryInterval = new Interval(currKey.getEnd(), entryInterval.getEnd());
        } else {
          //     | entry |
          //         | cur |
          // =>  |new|
          addIntervalToTimeline(new Interval(entryInterval.getStart(), currKey.getStart()), entry, timeline);

          //     |   entry   |
          //       | cur |
          // =>          |new|
          if (entryInterval.getEnd().isAfter(currKey.getEnd())) {
            entryInterval = new Interval(currKey.getEnd(), entryInterval.getEnd());
          } else {
            // Discard this entry since there is no portion of the entry interval that goes past the end of the curr
            // key interval.
            entryInterval = null;
          }
        }
      } else if (versionCompare > 0) {
        // since the entry version is greater than the existing one, the given entry overwrites the existing one
        // if overlapped.
        final TimelineEntry oldEntry = timeline.remove(currKey);

        if (currKey.contains(entryInterval)) {
          //     |      cur      |
          //         | entry |
          // =>  |old|  new  |old|
          addIntervalToTimeline(new Interval(currKey.getStart(), entryInterval.getStart()), oldEntry, timeline);
          addIntervalToTimeline(new Interval(entryInterval.getEnd(), currKey.getEnd()), oldEntry, timeline);
          addIntervalToTimeline(entryInterval, entry, timeline);

          return true;
        } else if (currKey.getStart().isBefore(entryInterval.getStart())) {
          //     |   cur  |
          //         |   entry   |
          // =>  |old|
          addIntervalToTimeline(new Interval(currKey.getStart(), entryInterval.getStart()), oldEntry, timeline);
        } else if (entryInterval.getEnd().isBefore(currKey.getEnd())) {
          //            |   cur  |
          //     |   entry   |
          // =>              |old|
          addIntervalToTimeline(new Interval(entryInterval.getEnd(), currKey.getEnd()), oldEntry, timeline);
        }
      } else {
        if (timeline.get(currKey).equals(entry)) {
          // This occurs when restoring segments
          timeline.remove(currKey);
        } else {
          throw new UOE(
              "Cannot add overlapping segments [%s and %s] with the same version [%s]",
              currKey,
              entryInterval,
              entry.getVersion()
          );
        }
      }

      currKey = nextKey;
      retVal = true;
    }

    addIntervalToTimeline(entryInterval, entry, timeline);

    return retVal;
  }

  @GuardedBy("lock")
  private void addIntervalToTimeline(
      Interval interval,
      TimelineEntry entry,
      NavigableMap timeline
  )
  {
    if (interval != null && interval.toDurationMillis() > 0) {
      timeline.put(interval, entry);
    }
  }

  @GuardedBy("lock")
  private void remove(
      NavigableMap timeline,
      Interval interval,
      TimelineEntry entry,
      boolean incompleteOk
  )
  {
    List intervalsToRemove = new ArrayList<>();
    TimelineEntry removed = timeline.get(interval);

    if (removed == null) {
      Iterator> iter = timeline.entrySet().iterator();
      while (iter.hasNext()) {
        Entry timelineEntry = iter.next();
        if (timelineEntry.getValue() == entry) {
          intervalsToRemove.add(timelineEntry.getKey());
        }
      }
    } else {
      intervalsToRemove.add(interval);
    }

    for (Interval i : intervalsToRemove) {
      remove(timeline, i, incompleteOk);
    }
  }

  @GuardedBy("lock")
  private void remove(
      NavigableMap timeline,
      Interval interval,
      boolean incompleteOk
  )
  {
    timeline.remove(interval);

    for (Entry> versionEntry : allTimelineEntries.entrySet()) {
      if (versionEntry.getKey().overlap(interval) != null) {
        if (incompleteOk) {
          add(timeline, versionEntry.getKey(), versionEntry.getValue().lastEntry().getValue());
        } else {
          for (VersionType ver : versionEntry.getValue().descendingKeySet()) {
            TimelineEntry timelineEntry = versionEntry.getValue().get(ver);
            if (timelineEntry.getPartitionHolder().isComplete()) {
              add(timeline, versionEntry.getKey(), timelineEntry);
              break;
            }
          }
        }
      }
    }
  }

  @GuardedBy("lock")
  private List> lookup(Interval interval, Partitions completeness)
  {
    if (interval.getStartMillis() == interval.getEndMillis()) {
      return Collections.emptyList();
    }

    List> retVal = new ArrayList<>();
    NavigableMap timeline;
    if (completeness == Partitions.INCOMPLETE_OK) {
      timeline = incompletePartitionsTimeline;
    } else {
      timeline = completePartitionsTimeline;
    }

    for (Entry entry : timeline.entrySet()) {
      Interval timelineInterval = entry.getKey();
      TimelineEntry val = entry.getValue();

      // exclude empty partition holders (i.e. tombstones) since they do not add value
      // for higher level code...they have no data rows...
      if ((!skipObjectsWithNoData || val.partitionHolder.hasData()) && timelineInterval.overlaps(interval)) {
        retVal.add(
            new TimelineObjectHolder<>(
                timelineInterval,
                val.getTrueInterval(),
                val.getVersion(),
                PartitionHolder.copyWithOnlyVisibleChunks(val.getPartitionHolder())
            )
        );
      }
    }

    if (retVal.isEmpty()) {
      return retVal;
    }

    TimelineObjectHolder firstEntry = retVal.get(0);
    if (interval.overlaps(firstEntry.getInterval()) &&
        interval.getStart().isAfter(firstEntry.getInterval().getStart())) {
      retVal.set(
          0,
          new TimelineObjectHolder<>(
              new Interval(interval.getStart(), firstEntry.getInterval().getEnd()),
              firstEntry.getTrueInterval(),
              firstEntry.getVersion(),
              firstEntry.getObject()
          )
      );
    }

    TimelineObjectHolder lastEntry = retVal.get(retVal.size() - 1);
    if (interval.overlaps(lastEntry.getInterval()) && interval.getEnd().isBefore(lastEntry.getInterval().getEnd())) {
      retVal.set(
          retVal.size() - 1,
          new TimelineObjectHolder<>(
              new Interval(lastEntry.getInterval().getStart(), interval.getEnd()),
              lastEntry.getTrueInterval(),
              lastEntry.getVersion(),
              lastEntry.getObject()
          )
      );
    }

    return retVal;
  }

  public class TimelineEntry
  {
    private final Interval trueInterval;
    private final VersionType version;
    private final PartitionHolder partitionHolder;

    TimelineEntry(Interval trueInterval, VersionType version, PartitionHolder partitionHolder)
    {
      this.trueInterval = Preconditions.checkNotNull(trueInterval);
      this.version = Preconditions.checkNotNull(version);
      this.partitionHolder = Preconditions.checkNotNull(partitionHolder);
    }

    Interval getTrueInterval()
    {
      return trueInterval;
    }

    public VersionType getVersion()
    {
      return version;
    }

    public PartitionHolder getPartitionHolder()
    {
      return partitionHolder;
    }

    /**
     * Returns the maximum minor version across all the added segments.
     * We do not handle updates of this variable when segments are removed for the sake of simplicity.
     */
    private short getMaxMinorVersion()
    {
      return partitionHolder.getMaxMinorVersion();
    }

    @Override
    public boolean equals(Object o)
    {
      if (this == o) {
        return true;
      }

      if (o == null || getClass() != o.getClass()) {
        return false;
      }

      final TimelineEntry that = (TimelineEntry) o;

      if (!this.trueInterval.equals(that.trueInterval)) {
        return false;
      }

      if (!this.version.equals(that.version)) {
        return false;
      }

      if (!this.partitionHolder.equals(that.partitionHolder)) {
        return false;
      }

      return true;
    }

    @Override
    public int hashCode()
    {
      return Objects.hash(trueInterval, version, partitionHolder);
    }
  }

  /**
   * Stores a {@link PartitionChunk} for a given interval and version. The
   * interval corresponds to the {@link LogicalSegment#getInterval()}
   */
  public static class PartitionChunkEntry
  {
    private final Interval interval;
    private final VersionType version;
    private final PartitionChunk chunk;

    public PartitionChunkEntry(
        Interval interval,
        VersionType version,
        PartitionChunk chunk
    )
    {
      this.interval = interval;
      this.version = version;
      this.chunk = chunk;
    }

    public Interval getInterval()
    {
      return interval;
    }

    public VersionType getVersion()
    {
      return version;
    }

    public PartitionChunk getChunk()
    {
      return chunk;
    }
  }
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy