Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
com.querydsl.sql.dml.AbstractSQLUpdateClause Maven / Gradle / Ivy
/*
* Copyright 2015, The Querydsl Team (http://www.querydsl.com/team)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.querydsl.sql.dml;
import com.querydsl.core.*;
import com.querydsl.core.QueryFlag.Position;
import com.querydsl.core.dml.UpdateClause;
import com.querydsl.core.types.ConstantImpl;
import com.querydsl.core.types.Expression;
import com.querydsl.core.types.Path;
import com.querydsl.core.types.Predicate;
import com.querydsl.sql.Configuration;
import com.querydsl.sql.RelationalPath;
import com.querydsl.sql.SQLBindings;
import com.querydsl.sql.SQLSerializer;
import com.querydsl.sql.types.Null;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.*;
import java.util.function.Supplier;
import java.util.logging.Logger;
import org.jetbrains.annotations.Range;
/**
* Provides a base class for dialect-specific UPDATE clauses.
*
* @author tiwe
* @param The type extending this class.
*/
public abstract class AbstractSQLUpdateClause>
extends AbstractSQLClause implements UpdateClause {
protected static final Logger logger = Logger.getLogger(AbstractSQLUpdateClause.class.getName());
protected final RelationalPath> entity;
protected final List batches = new ArrayList();
protected Map, Expression>> updates = new LinkedHashMap<>();
protected QueryMetadata metadata = new DefaultQueryMetadata();
protected transient String queryString;
protected transient List constants;
public AbstractSQLUpdateClause(
Connection connection, Configuration configuration, RelationalPath> entity) {
super(configuration, connection);
this.entity = entity;
metadata.addJoin(JoinType.DEFAULT, entity);
}
public AbstractSQLUpdateClause(
Supplier connection, Configuration configuration, RelationalPath> entity) {
super(configuration, connection);
this.entity = entity;
metadata.addJoin(JoinType.DEFAULT, entity);
}
/**
* Add the given String literal at the given position as a query flag
*
* @param position position
* @param flag query flag
* @return the current object
*/
public C addFlag(Position position, String flag) {
metadata.addFlag(new QueryFlag(position, flag));
return (C) this;
}
/**
* Add the given Expression at the given position as a query flag
*
* @param position position
* @param flag query flag
* @return the current object
*/
public C addFlag(Position position, Expression> flag) {
metadata.addFlag(new QueryFlag(position, flag));
return (C) this;
}
/**
* Add the current state of bindings as a batch item
*
* @return the current object
*/
public C addBatch() {
batches.add(new SQLUpdateBatch(metadata, updates));
updates = new LinkedHashMap<>();
metadata = new DefaultQueryMetadata();
metadata.addJoin(JoinType.DEFAULT, entity);
return (C) this;
}
@Override
public void clear() {
batches.clear();
updates = new LinkedHashMap<>();
metadata = new DefaultQueryMetadata();
metadata.addJoin(JoinType.DEFAULT, entity);
}
protected PreparedStatement createStatement() throws SQLException {
listeners.preRender(context);
SQLSerializer serializer = createSerializer();
serializer.serializeUpdate(metadata, entity, updates);
queryString = serializer.toString();
constants = serializer.getConstants();
logQuery(logger, queryString, constants);
context.addSQL(createBindings(metadata, serializer));
listeners.prepared(context);
listeners.prePrepare(context);
PreparedStatement stmt = connection().prepareStatement(queryString);
setParameters(
stmt, serializer.getConstants(), serializer.getConstantPaths(), metadata.getParams());
context.addPreparedStatement(stmt);
listeners.prepared(context);
return stmt;
}
protected Collection createStatements() throws SQLException {
boolean addBatches = !configuration.getUseLiterals();
listeners.preRender(context);
SQLSerializer serializer = createSerializer();
serializer.serializeUpdate(batches.get(0).getMetadata(), entity, batches.get(0).getUpdates());
queryString = serializer.toString();
constants = serializer.getConstants();
logQuery(logger, queryString, constants);
context.addSQL(createBindings(metadata, serializer));
listeners.rendered(context);
Map stmts = new HashMap<>();
// add first batch
listeners.prePrepare(context);
PreparedStatement stmt = connection().prepareStatement(queryString);
setParameters(
stmt, serializer.getConstants(), serializer.getConstantPaths(), metadata.getParams());
if (addBatches) {
stmt.addBatch();
}
stmts.put(serializer.toString(), stmt);
context.addPreparedStatement(stmt);
listeners.prepared(context);
// add other batches
for (int i = 1; i < batches.size(); i++) {
listeners.preRender(context);
serializer = createSerializer();
serializer.serializeUpdate(batches.get(i).getMetadata(), entity, batches.get(i).getUpdates());
context.addSQL(createBindings(metadata, serializer));
listeners.rendered(context);
stmt = stmts.get(serializer.toString());
if (stmt == null) {
listeners.prePrepare(context);
stmt = connection().prepareStatement(serializer.toString());
stmts.put(serializer.toString(), stmt);
context.addPreparedStatement(stmt);
listeners.prepared(context);
}
setParameters(
stmt, serializer.getConstants(), serializer.getConstantPaths(), metadata.getParams());
if (addBatches) {
stmt.addBatch();
}
}
return stmts.values();
}
@Override
public long execute() {
context = startContext(connection(), metadata, entity);
PreparedStatement stmt = null;
Collection stmts = null;
try {
if (batches.isEmpty()) {
stmt = createStatement();
listeners.notifyUpdate(entity, metadata, updates);
listeners.preExecute(context);
int rc = stmt.executeUpdate();
listeners.executed(context);
return rc;
} else {
stmts = createStatements();
listeners.notifyUpdates(entity, batches);
listeners.preExecute(context);
long rc = executeBatch(stmts);
listeners.executed(context);
return rc;
}
} catch (SQLException e) {
onException(context, e);
throw configuration.translate(queryString, constants, e);
} finally {
if (stmt != null) {
close(stmt);
}
if (stmts != null) {
close(stmts);
}
reset();
endContext(context);
}
}
@Override
public List getSQL() {
if (batches.isEmpty()) {
SQLSerializer serializer = createSerializer();
serializer.serializeUpdate(metadata, entity, updates);
return Collections.singletonList(createBindings(metadata, serializer));
} else {
List builder = new ArrayList<>();
for (SQLUpdateBatch batch : batches) {
SQLSerializer serializer = createSerializer();
serializer.serializeUpdate(batch.getMetadata(), entity, batch.getUpdates());
builder.add(createBindings(metadata, serializer));
}
return Collections.unmodifiableList(builder);
}
}
@Override
public C set(Path path, T value) {
if (value instanceof Expression>) {
updates.put(path, (Expression>) value);
} else if (value != null) {
updates.put(path, ConstantImpl.create(value));
} else {
setNull(path);
}
return (C) this;
}
@Override
public C set(Path path, Expression extends T> expression) {
if (expression != null) {
updates.put(path, expression);
} else {
setNull(path);
}
return (C) this;
}
@Override
public C setNull(Path path) {
updates.put(path, Null.CONSTANT);
return (C) this;
}
@Override
public C set(List extends Path>> paths, List> values) {
for (int i = 0; i < paths.size(); i++) {
if (values.get(i) instanceof Expression) {
updates.put(paths.get(i), (Expression>) values.get(i));
} else if (values.get(i) != null) {
updates.put(paths.get(i), ConstantImpl.create(values.get(i)));
} else {
updates.put(paths.get(i), Null.CONSTANT);
}
}
return (C) this;
}
public C where(Predicate p) {
metadata.addWhere(p);
return (C) this;
}
@Override
public C where(Predicate... o) {
for (Predicate p : o) {
metadata.addWhere(p);
}
return (C) this;
}
public C limit(@Range(from = 0, to = Integer.MAX_VALUE) long limit) {
metadata.setModifiers(QueryModifiers.limit(limit));
return (C) this;
}
@Override
public String toString() {
SQLSerializer serializer = createSerializer();
serializer.serializeUpdate(metadata, entity, updates);
return serializer.toString();
}
/**
* Populate the UPDATE clause with the properties of the given bean. The properties need to match
* the fields of the clause's entity instance. Primary key columns are skipped in the population.
*
* @param bean bean to use for population
* @return the current object
*/
@SuppressWarnings("unchecked")
public C populate(Object bean) {
return populate(bean, DefaultMapper.DEFAULT);
}
/**
* Populate the UPDATE clause with the properties of the given bean using the given Mapper.
*
* @param obj object to use for population
* @param mapper mapper to use
* @return the current object
*/
@SuppressWarnings({"rawtypes", "unchecked"})
public C populate(T obj, Mapper mapper) {
Collection extends Path>> primaryKeyColumns =
entity.getPrimaryKey() != null
? entity.getPrimaryKey().getLocalColumns()
: Collections.>emptyList();
Map, Object> values = mapper.createMap(entity, obj);
for (Map.Entry, Object> entry : values.entrySet()) {
if (!primaryKeyColumns.contains(entry.getKey())) {
set((Path) entry.getKey(), entry.getValue());
}
}
return (C) this;
}
@Override
public boolean isEmpty() {
return updates.isEmpty() && batches.isEmpty();
}
@Override
public int getBatchCount() {
return batches.size();
}
}