Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.document.rdb;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.apache.jackrabbit.oak.plugins.document.rdb.RDBJSONSupport.appendJsonMember;
import static org.apache.jackrabbit.oak.plugins.document.rdb.RDBJSONSupport.appendJsonString;
import static org.apache.jackrabbit.oak.plugins.document.rdb.RDBJSONSupport.appendJsonValue;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.zip.GZIPInputStream;
import javax.annotation.Nonnull;
import org.apache.commons.io.IOUtils;
import org.apache.jackrabbit.oak.commons.json.JsopReader;
import org.apache.jackrabbit.oak.commons.json.JsopTokenizer;
import org.apache.jackrabbit.oak.plugins.document.Collection;
import org.apache.jackrabbit.oak.plugins.document.Document;
import org.apache.jackrabbit.oak.plugins.document.DocumentStore;
import org.apache.jackrabbit.oak.plugins.document.DocumentStoreException;
import org.apache.jackrabbit.oak.plugins.document.NodeDocument;
import org.apache.jackrabbit.oak.plugins.document.Revision;
import org.apache.jackrabbit.oak.plugins.document.StableRevisionComparator;
import org.apache.jackrabbit.oak.plugins.document.UpdateOp;
import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Key;
import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Operation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Serialization/Parsing of documents.
*/
public class RDBDocumentSerializer {
private final DocumentStore store;
private final Set columnProperties;
private static final String MODIFIED = "_modified";
private static final String MODCOUNT = "_modCount";
private static final String CMODCOUNT = "_collisionsModCount";
private static final String ID = "_id";
private static final String HASBINARY = NodeDocument.HAS_BINARY_FLAG;
private static final String DELETEDONCE = NodeDocument.DELETED_ONCE;
private final Comparator comparator = StableRevisionComparator.REVERSE;
private static final Logger LOG = LoggerFactory.getLogger(RDBDocumentSerializer.class);
private static final RDBJSONSupport JSON = new RDBJSONSupport(true);
public RDBDocumentSerializer(DocumentStore store, Set columnProperties) {
this.store = store;
this.columnProperties = columnProperties;
}
/**
* Serializes all non-column properties of the {@link Document} into a JSON
* string.
*/
public String asString(@Nonnull Document doc) {
StringBuilder sb = new StringBuilder(32768);
sb.append("{");
boolean needComma = false;
for (Map.Entry entry : doc.entrySet()) {
String key = entry.getKey();
if (!columnProperties.contains(key)) {
if (needComma) {
sb.append(",");
}
appendJsonMember(sb, key, entry.getValue());
needComma = true;
}
}
sb.append("}");
return sb.toString();
}
/**
* Serializes the changes in the {@link UpdateOp} into a JSON array; each
* entry is another JSON array holding operation, key, revision, and value.
*/
public String asString(UpdateOp update) {
StringBuilder sb = new StringBuilder("[");
boolean needComma = false;
for (Map.Entry change : update.getChanges().entrySet()) {
Operation op = change.getValue();
Key key = change.getKey();
// exclude properties that are serialized into special columns
if (columnProperties.contains(key.getName()) && null == key.getRevision())
continue;
if (needComma) {
sb.append(",");
}
sb.append("[");
if (op.type == UpdateOp.Operation.Type.INCREMENT) {
sb.append("\"+\",");
} else if (op.type == UpdateOp.Operation.Type.SET || op.type == UpdateOp.Operation.Type.SET_MAP_ENTRY) {
sb.append("\"=\",");
} else if (op.type == UpdateOp.Operation.Type.MAX) {
sb.append("\"M\",");
} else if (op.type == UpdateOp.Operation.Type.REMOVE || op.type == UpdateOp.Operation.Type.REMOVE_MAP_ENTRY) {
sb.append("\"*\",");
} else {
throw new DocumentStoreException("Can't serialize " + update.toString() + " for JSON append");
}
appendJsonString(sb, key.getName());
sb.append(",");
Revision rev = key.getRevision();
if (rev != null) {
appendJsonString(sb, rev.toString());
sb.append(",");
}
appendJsonValue(sb, op.value);
sb.append("]");
needComma = true;
}
return sb.append("]").toString();
}
/**
* Reconstructs a {@link Document} based on the persisted {@link RDBRow}.
*/
@Nonnull
public T fromRow(@Nonnull Collection collection, @Nonnull RDBRow row) throws DocumentStoreException {
final String charData = row.getData();
checkNotNull(charData, "RDBRow.getData() is null for collection " + collection + ", id: " + row.getId());
T doc = collection.newDocument(store);
doc.put(ID, row.getId());
if (row.getModified() != RDBRow.LONG_UNSET) {
doc.put(MODIFIED, row.getModified());
}
if (row.getModcount() != RDBRow.LONG_UNSET) {
doc.put(MODCOUNT, row.getModcount());
}
if (RDBDocumentStore.USECMODCOUNT && row.getCollisionsModcount() != RDBRow.LONG_UNSET) {
doc.put(CMODCOUNT, row.getCollisionsModcount());
}
if (row.hasBinaryProperties() != null) {
doc.put(HASBINARY, row.hasBinaryProperties().longValue());
}
if (row.deletedOnce() != null) {
doc.put(DELETEDONCE, row.deletedOnce().booleanValue());
}
byte[] bdata = row.getBdata();
boolean blobInUse = false;
JsopTokenizer json;
// case #1: BDATA (blob) contains base data, DATA (string) contains
// update operations
try {
if (bdata != null && bdata.length != 0) {
String s = fromBlobData(bdata);
json = new JsopTokenizer(s);
json.read('{');
readDocumentFromJson(json, doc);
json.read(JsopReader.END);
blobInUse = true;
}
} catch (Exception ex) {
throw new DocumentStoreException(ex);
}
json = new JsopTokenizer(charData);
// start processing the VARCHAR data
try {
int next = json.read();
if (next == '{') {
if (blobInUse) {
throw new DocumentStoreException("expected literal \"blob\" but found: " + row.getData());
}
readDocumentFromJson(json, doc);
} else if (next == JsopReader.STRING) {
if (!blobInUse) {
throw new DocumentStoreException("did not expect \"blob\" here: " + row.getData());
}
if (!"blob".equals(json.getToken())) {
throw new DocumentStoreException("expected string literal \"blob\"");
}
} else {
throw new DocumentStoreException("unexpected token " + next + " in " + row.getData());
}
next = json.read();
if (next == ',') {
do {
Object ob = JSON.parse(json);
if (!(ob instanceof List)) {
throw new DocumentStoreException("expected array but got: " + ob);
}
List> update = (List>) ob;
for (List