All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager Maven / Gradle / Ivy

There is a newer version: 4.0.1
Show newest version
/*
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package org.apache.hadoop.hive.ql.lockmgr;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.DriverState;
import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData;
import org.apache.hadoop.hive.ql.metadata.*;

import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantLock;

/**
 * shared lock manager for dedicated hive server. all locks are managed in memory
 */
public class EmbeddedLockManager implements HiveLockManager {

  private static final Logger LOG = LoggerFactory.getLogger("EmbeddedHiveLockManager");

  private final Node root = new Node();

  private HiveLockManagerCtx ctx;

  private long sleepTime = 1000;
  private int numRetriesForLock = 0;
  private int numRetriesForUnLock = 0;

  public EmbeddedLockManager() {
  }

  @Override
  public void setContext(HiveLockManagerCtx ctx) throws LockException {
    this.ctx = ctx;
    refresh();
  }

  @Override
  public HiveLock lock(HiveLockObject key, HiveLockMode mode, boolean keepAlive)
      throws LockException {
    LOG.debug("Acquiring lock for {} with mode {} {}", key.getName(), mode,
        key.getData().getLockMode());
    return lock(key, mode, numRetriesForLock, sleepTime);
  }

  @Override
  public List lock(List objs, boolean keepAlive, DriverState driverState)
      throws LockException {
    return lock(objs, numRetriesForLock, sleepTime);
  }

  @Override
  public void unlock(HiveLock hiveLock) throws LockException {
    unlock(hiveLock, numRetriesForUnLock, sleepTime);
  }

  @Override
  public void releaseLocks(List hiveLocks) {
    releaseLocks(hiveLocks, numRetriesForUnLock, sleepTime);
  }

  @Override
  public List getLocks(boolean verifyTablePartitions, boolean fetchData)
      throws LockException {
    return getLocks(verifyTablePartitions, fetchData, ctx.getConf());
  }

  @Override
  public List getLocks(HiveLockObject key, boolean verifyTablePartitions,
      boolean fetchData) throws LockException {
    return getLocks(key, verifyTablePartitions, fetchData, ctx.getConf());
  }

  @Override
  public void prepareRetry() {
  }

  @Override
  public void refresh() {
    HiveConf conf = ctx.getConf();
    sleepTime = conf.getTimeVar(
        HiveConf.ConfVars.HIVE_LOCK_SLEEP_BETWEEN_RETRIES, TimeUnit.MILLISECONDS);
    numRetriesForLock = conf.getIntVar(HiveConf.ConfVars.HIVE_LOCK_NUMRETRIES);
    numRetriesForUnLock = conf.getIntVar(HiveConf.ConfVars.HIVE_UNLOCK_NUMRETRIES);
  }

  public HiveLock lock(HiveLockObject key, HiveLockMode mode, int numRetriesForLock, long sleepTime)
      throws LockException {
    for (int i = 0; i <= numRetriesForLock; i++) {
      if (i > 0) {
        sleep(sleepTime);
      }
      HiveLock lock = lockPrimitive(key, mode);
      if (lock != null) {
        return lock;
      }
    }
    return null;
  }

  private void sleep(long sleepTime) {
    try {
      Thread.sleep(sleepTime);
    } catch (InterruptedException e) {
      // ignore
    }
  }

  private List lock(List objs, int numRetriesForLock, long sleepTime)
      throws LockException {
    sortLocks(objs);
    if (LOG.isDebugEnabled()) {
      for (HiveLockObj obj : objs) {
        LOG.debug("Acquiring lock for {} with mode {}", obj.getObj().getName(),
            obj.getMode());
      }
    }

    for (int i = 0; i <= numRetriesForLock; i++) {
      if (i > 0) {
        sleep(sleepTime);
      }
      List locks = lockPrimitive(objs, numRetriesForLock, sleepTime);
      if (locks != null) {
        return locks;
      }
    }
    return null;
  }

  private HiveLock lockPrimitive(HiveLockObject key, HiveLockMode mode) throws LockException {
    if (root.lock(key.getPaths(), key.getData(), mode == HiveLockMode.EXCLUSIVE)) {
      return new SimpleHiveLock(key, mode);
    }
    return null;
  }

  private List lockPrimitive(List objs, int numRetriesForLock,
      long sleepTime) throws LockException {
    List locks = new ArrayList();
    for (HiveLockObj obj : objs) {
      HiveLock lock = lockPrimitive(obj.getObj(), obj.getMode());
      if (lock == null) {
        releaseLocks(locks, numRetriesForLock, sleepTime);
        return null;
      }
      locks.add(lock);
    }
    return locks;
  }

  private void sortLocks(List objs) {
    Collections.sort(objs, new Comparator() {
      @Override
      public int compare(HiveLockObj o1, HiveLockObj o2) {
        int cmp = o1.getName().compareTo(o2.getName());
        if (cmp == 0) {
          if (o1.getMode() == o2.getMode()) {
            return cmp;
          }
          // EXCLUSIVE locks occur before SHARED locks
          if (o1.getMode() == HiveLockMode.EXCLUSIVE) {
            return -1;
          }
          return +1;
        }
        return cmp;
      }
    });
  }

  public void unlock(HiveLock hiveLock, int numRetriesForUnLock, long sleepTime)
      throws LockException {
    String[] paths = hiveLock.getHiveLockObject().getPaths();
    HiveLockObjectData data = hiveLock.getHiveLockObject().getData();
    for (int i = 0; i <= numRetriesForUnLock; i++) {
      if (i > 0) {
        sleep(sleepTime);
      }
      if (root.unlock(paths, data)) {
        return;
      }
    }
    throw new LockException("Failed to release lock " + hiveLock);
  }

  public void releaseLocks(List hiveLocks, int numRetriesForUnLock, long sleepTime) {
    for (HiveLock locked : hiveLocks) {
      try {
        unlock(locked, numRetriesForUnLock, sleepTime);
      } catch (LockException e) {
        LOG.info("Failed to unlock ", e);
      }
    }
  }

  public List getLocks(boolean verifyTablePartitions, boolean fetchData, HiveConf conf)
      throws LockException {
    return root.getLocks(verifyTablePartitions, fetchData, conf);
  }

  public List getLocks(HiveLockObject key, boolean verifyTablePartitions,
      boolean fetchData, HiveConf conf) throws LockException {
    return root.getLocks(key.getPaths(), verifyTablePartitions, fetchData, conf);
  }

  // from ZooKeeperHiveLockManager
  private HiveLockObject verify(boolean verify, String[] names, HiveLockObjectData data,
      HiveConf conf) throws LockException {
    if (!verify) {
      return new HiveLockObject(names, data);
    }
    String database = names[0];
    String table = names[1];

    try {
      Hive db = Hive.get(conf);
      Table tab = db.getTable(database, table, false);
      if (tab == null) {
        return null;
      }
      if (names.length == 2) {
        return new HiveLockObject(tab, data);
      }
      Map partSpec = new HashMap();
      for (int indx = 2; indx < names.length; indx++) {
        String[] partVals = names[indx].split("=");
        partSpec.put(partVals[0], partVals[1]);
      }

      Partition partn;
      try {
        partn = db.getPartition(tab, partSpec, false);
      } catch (HiveException e) {
        partn = null;
      }

      if (partn == null) {
        return new HiveLockObject(new DummyPartition(tab, null, partSpec), data);
      }

      return new HiveLockObject(partn, data);
    } catch (Exception e) {
      throw new LockException(e);
    }
  }

  @Override
  public void close() {
    root.lock.lock();
    try {
      root.datas = null;
      root.children = null;
    } finally {
      root.lock.unlock();
    }
  }

  private class Node {

    private boolean exclusive;
    private Map children;
    private Map datas;
    private final ReentrantLock lock = new ReentrantLock();

    public Node() {
    }

    public void set(HiveLockObjectData data, boolean exclusive) {
      this.exclusive = exclusive;
      if (datas == null) {
        datas = new HashMap(3);
      }
      datas.put(data.getQueryId(), data);
    }

    public boolean lock(String[] paths, HiveLockObjectData data, boolean exclusive) {
      return lock(paths, 0, data, exclusive);
    }

    public boolean unlock(String[] paths, HiveLockObjectData data) {
      return unlock(paths, 0, data);
    }

    private List getLocks(boolean verify, boolean fetchData, HiveConf conf)
        throws LockException {
      if (!root.hasChild()) {
        return Collections.emptyList();
      }
      List locks = new ArrayList();
      getLocks(new Stack(), verify, fetchData, locks, conf);
      return locks;
    }

    private List getLocks(String[] paths, boolean verify, boolean fetchData,
        HiveConf conf) throws LockException {
      if (!root.hasChild()) {
        return Collections.emptyList();
      }
      List locks = new ArrayList();
      getLocks(paths, 0, verify, fetchData, locks, conf);
      return locks;
    }

    private boolean lock(String[] paths, int index, HiveLockObjectData data, boolean exclusive) {
      if (!lock.tryLock()) {
        return false;
      }
      try {
        if (index == paths.length) {
          if (this.exclusive || exclusive && hasLock()) {
            return false;
          }
          set(data, exclusive);
          return true;
        }
        Node child;
        if (children == null) {
          children = new HashMap(3);
          children.put(paths[index], child = new Node());
        } else {
          child = children.get(paths[index]);
          if (child == null) {
            children.put(paths[index], child = new Node());
          }
        }
        return child.lock(paths, index + 1, data, exclusive);
      } finally {
        lock.unlock();
      }
    }

    private boolean unlock(String[] paths, int index, HiveLockObjectData data) {
      if (!lock.tryLock()) {
        return false;
      }
      try {
        if (index == paths.length) {
          if (hasLock()) {
            datas.remove(data.getQueryId());
          }
          return true;
        }
        Node child = children == null ? null : children.get(paths[index]);
        if (child == null) {
          return true; // should not happen
        }
        if (child.unlock(paths, index + 1, data)) {
          if (!child.hasLock() && !child.hasChild()) {
            children.remove(paths[index]);
          }
          return true;
        }
        return false;
      } finally {
        lock.unlock();
      }
    }

    private void getLocks(Stack names, boolean verify,
        boolean fetchData, List locks, HiveConf conf) throws LockException {
      lock.lock();
      try {
        if (hasLock()) {
          getLocks(names.toArray(new String[names.size()]), verify, fetchData, locks, conf);
        }
        if (children != null) {
          for (Map.Entry entry : children.entrySet()) {
            names.push(entry.getKey());
            entry.getValue().getLocks(names, verify, fetchData, locks, conf);
            names.pop();
          }
        }
      } finally {
        lock.unlock();
      }
    }

    private void getLocks(String[] paths, int index, boolean verify,
        boolean fetchData, List locks, HiveConf conf) throws LockException {
      lock.lock();
      try {
        if (index == paths.length) {
          getLocks(paths, verify, fetchData, locks, conf);
          return;
        }
        Node child = children.get(paths[index]);
        if (child != null) {
          child.getLocks(paths, index + 1, verify, fetchData, locks, conf);
        }
      } finally {
        lock.unlock();
      }
    }

    private void getLocks(String[] paths, boolean verify, boolean fetchData, List locks,
        HiveConf conf) throws LockException {
      HiveLockMode lockMode = getLockMode();
      if (fetchData) {
        for (HiveLockObjectData data : datas.values()) {
          HiveLockObject lock = verify(verify, paths, data, conf);
          if (lock != null) {
            locks.add(new SimpleHiveLock(lock, lockMode));
          }
        }
      } else {
        HiveLockObject lock = verify(verify, paths, null, conf);
        if (lock != null) {
          locks.add(new SimpleHiveLock(lock, lockMode));
        }
      }
    }

    private HiveLockMode getLockMode() {
      return exclusive ? HiveLockMode.EXCLUSIVE : HiveLockMode.SHARED;
    }

    private boolean hasLock() {
      return datas != null && !datas.isEmpty();
    }

    private boolean hasChild() {
      return children != null && !children.isEmpty();
    }
  }

  private static class SimpleHiveLock extends HiveLock {

    private final HiveLockObject lockObj;
    private final HiveLockMode lockMode;

    public SimpleHiveLock(HiveLockObject lockObj, HiveLockMode lockMode) {
      this.lockObj = lockObj;
      this.lockMode = lockMode;
    }

    @Override
    public HiveLockObject getHiveLockObject() {
      return lockObj;
    }

    @Override
    public HiveLockMode getHiveLockMode() {
      return lockMode;
    }

    @Override
    public String toString() {
      return lockMode + "=" + lockObj.getDisplayName() + "(" + lockObj.getData() + ")";
    }

    @Override
    public boolean equals(Object o) {
      if (!(o instanceof SimpleHiveLock)) {
        return false;
      }

      SimpleHiveLock simpleLock = (SimpleHiveLock) o;
      return lockObj.equals(simpleLock.getHiveLockObject()) &&
          lockMode == simpleLock.getHiveLockMode();
    }
  }
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy