org.apache.geode.internal.cache.xmlcache.CacheXmlParser Maven / Gradle / Ivy
Show all versions of geode-core Show documentation
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.xmlcache; import java.io.BufferedInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Collections; import java.util.EmptyStackException; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.ServiceLoader; import java.util.Set; import java.util.Stack; import java.util.StringTokenizer; import javax.xml.XMLConstants; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import org.apache.geode.cache.util.GatewayConflictResolver; import org.apache.logging.log4j.Logger; import org.xml.sax.Attributes; import org.xml.sax.ContentHandler; import org.xml.sax.InputSource; import org.xml.sax.Locator; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; import org.xml.sax.ext.DefaultHandler2; import org.apache.geode.DataSerializable; import org.apache.geode.DataSerializer; import org.apache.geode.InternalGemFireException; import org.apache.geode.cache.Cache; import org.apache.geode.cache.CacheException; import org.apache.geode.cache.CacheListener; import org.apache.geode.cache.CacheLoader; import org.apache.geode.cache.CacheWriter; import org.apache.geode.cache.CacheWriterException; import org.apache.geode.cache.CacheXmlException; import org.apache.geode.cache.CustomExpiry; import org.apache.geode.cache.DataPolicy; import org.apache.geode.cache.Declarable; import org.apache.geode.cache.DiskStoreFactory; import org.apache.geode.cache.DiskWriteAttributes; import org.apache.geode.cache.DynamicRegionFactory; import org.apache.geode.cache.EvictionAction; import org.apache.geode.cache.EvictionAttributes; import org.apache.geode.cache.ExpirationAction; import org.apache.geode.cache.ExpirationAttributes; import org.apache.geode.cache.GatewayException; import org.apache.geode.cache.InterestPolicy; import org.apache.geode.cache.LossAction; import org.apache.geode.cache.MembershipAttributes; import org.apache.geode.cache.MirrorType; import org.apache.geode.cache.PartitionAttributes; import org.apache.geode.cache.PartitionResolver; import org.apache.geode.cache.Region; import org.apache.geode.cache.RegionExistsException; import org.apache.geode.cache.ResumptionAction; import org.apache.geode.cache.Scope; import org.apache.geode.cache.SubscriptionAttributes; import org.apache.geode.cache.TimeoutException; import org.apache.geode.cache.TransactionListener; import org.apache.geode.cache.TransactionWriter; import org.apache.geode.cache.asyncqueue.AsyncEventListener; import org.apache.geode.cache.asyncqueue.AsyncEventQueue; import org.apache.geode.cache.asyncqueue.AsyncEventQueueFactory; import org.apache.geode.cache.client.ClientCache; import org.apache.geode.cache.client.PoolFactory; import org.apache.geode.cache.execute.Function; import org.apache.geode.cache.partition.PartitionListener; import org.apache.geode.cache.query.IndexType; import org.apache.geode.cache.query.internal.index.IndexCreationData; import org.apache.geode.cache.server.CacheServer; import org.apache.geode.cache.server.ClientSubscriptionConfig; import org.apache.geode.cache.server.ServerLoadProbe; import org.apache.geode.cache.util.ObjectSizer; import org.apache.geode.cache.wan.GatewayEventFilter; import org.apache.geode.cache.wan.GatewayEventSubstitutionFilter; import org.apache.geode.cache.wan.GatewayReceiver; import org.apache.geode.cache.wan.GatewayReceiverFactory; import org.apache.geode.cache.wan.GatewaySender; import org.apache.geode.cache.wan.GatewaySenderFactory; import org.apache.geode.cache.wan.GatewayTransportFilter; import org.apache.geode.compression.Compressor; import org.apache.geode.internal.Assert; import org.apache.geode.internal.ClassPathLoader; import org.apache.geode.internal.InternalDataSerializer; import org.apache.geode.internal.cache.DiskStoreAttributes; import org.apache.geode.internal.cache.DiskWriteAttributesImpl; import org.apache.geode.internal.cache.EvictionAttributesImpl; import org.apache.geode.internal.cache.FixedPartitionAttributesImpl; import org.apache.geode.internal.cache.GemFireCacheImpl; import org.apache.geode.internal.cache.PartitionAttributesImpl; import org.apache.geode.internal.cache.PartitionedRegionHelper; import org.apache.geode.internal.cache.lru.LRUCapacityController; import org.apache.geode.internal.cache.lru.MemLRUCapacityController; import org.apache.geode.internal.datasource.ConfigProperty; import org.apache.geode.internal.i18n.LocalizedStrings; import org.apache.geode.internal.jndi.JNDIInvoker; import org.apache.geode.internal.logging.LogService; import org.apache.geode.internal.logging.log4j.LocalizedMessage; import org.apache.geode.internal.logging.log4j.LogMarker; import org.apache.geode.pdx.PdxSerializer; /** * Parses an XML file and creates a {@link Cache}/{@link ClientCache} and {@link Region}s from it. * It works in two phases. The first phase parses the XML and instantiates {@link Declarable}s. If * any problems occur, a {@link CacheXmlException} is thrown. The second phase actually * {@linkplain CacheCreation#create creates} the {@link Cache}/{@link ClientCache},{@link Region}s, * etc. * * * @since GemFire 3.0 */ @SuppressWarnings("deprecation") public class CacheXmlParser extends CacheXml implements ContentHandler { private static final Logger logger = LogService.getLogger(); /** * @since GemFire 8.1 */ private static final String BUFFER_SIZE = "http://apache.org/xml/properties/input-buffer-size"; /** * @since GemFire 8.1 */ private static final String DISALLOW_DOCTYPE_DECL_FEATURE = "http://apache.org/xml/features/disallow-doctype-decl"; /** * @since GemFire 8.1 */ private static final String JAXP_SCHEMA_LANGUAGE = "http://java.sun.com/xml/jaxp/properties/schemaLanguage"; /** The cache to be created */ private CacheCreation cache; /** The stack of intermediate values used while parsing */ protected Stack
StringBuffer is // solely used (as a marker) by thecharacters
method // and by doing this conversion we allow for multiple consecutive string // elements, otherwisecharacters
would continue to // append and our stack order would be out of whack. See bug 32122. private void endString() { StringBuffer str = (StringBuffer) stack.pop(); stack.push(str.toString()/* .trim() */); } /** * finish parsing a "group" element which is just a string * * @since GemFire 5.7 */ private void endGroup() { StringBuffer str = (StringBuffer) stack.pop(); stack.push(str.toString().trim()); } private void endClassName() { StringBuffer str = (StringBuffer) stack.pop(); stack.push(str.toString().trim()); // trim fixes bug 32928 } /** * When anentry
element is finished, thevalue
should be on the stop of * the stack followed by thekey
. TheRegionCreation
for the region * being created should be below that. */ private void endEntry() { Object value = stack.pop(); Object key = stack.pop(); RegionCreation region = (RegionCreation) stack.peek(); // changed by mitul after modifying code for Region implements Map region.put(key, value); } /** * When akey-constraint
element is finished, the name of the class should be on top * of the stack. * * @throws CacheXmlException If the key constraint class cannot be loaded */ private void endKeyConstraint() { String className = ((StringBuffer) stack.pop()).toString().trim(); Class c; try { c = InternalDataSerializer.getCachedClass(className); } catch (Exception ex) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_COULD_NOT_LOAD_KEYCONSTRAINT_CLASS_0 .toLocalizedString(className), ex); } // The region attributes should be on top of the stack RegionAttributesCreation attrs = peekRegionAttributesContext("key-constraint"); attrs.setKeyConstraint(c); } /** * When avalue-constraint
element is finished, the name of the class should be on * top of the stack. * * @throws CacheXmlException If the value constraint class cannot be loaded */ private void endValueConstraint() { String className = ((StringBuffer) stack.pop()).toString().trim(); Class c; try { c = InternalDataSerializer.getCachedClass(className); } catch (Exception ex) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_COULD_NOT_LOAD_VALUECONSTRAINT_CLASS_0 .toLocalizedString(className), ex); } // The region attributes should be on top of the stack RegionAttributesCreation attrs = peekRegionAttributesContext("value-constraint"); attrs.setValueConstraint(c); } /** * When aregion-time-to-live
element is finished, the {@link ExpirationAttributes} * are on top of the stack followed by the {@link RegionAttributesCreation} to which the * expiration attributes are assigned. */ private void endRegionTimeToLive() { ExpirationAttributes expire = (ExpirationAttributes) stack.pop(); RegionAttributesCreation attrs = peekRegionAttributesContext("region-time-to-live"); attrs.setRegionTimeToLive(expire); } /** * When aregion-idle-time
element is finished, the {@link ExpirationAttributes} are * on top of the stack followed by the {@link RegionAttributesCreation} to which the expiration * attributes are assigned. */ private void endRegionIdleTime() { ExpirationAttributes expire = (ExpirationAttributes) stack.pop(); RegionAttributesCreation attrs = peekRegionAttributesContext("region-idle-time"); attrs.setRegionIdleTimeout(expire); } private RegionAttributesCreation peekRegionAttributesContext(String dependentElement) { Object a = stack.peek(); if (!(a instanceof RegionAttributesCreation)) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_A_0_MUST_BE_DEFINED_IN_THE_CONTEXT_OF_REGIONATTRIBUTES .toLocalizedString(dependentElement)); } return (RegionAttributesCreation) a; } private PartitionAttributesImpl peekPartitionAttributesImpl(String dependentElement) { Object a = stack.peek(); if (!(a instanceof PartitionAttributesImpl)) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_A_0_MUST_BE_DEFINED_IN_THE_CONTEXT_OF_PARTITIONATTRIBUTES .toLocalizedString(dependentElement)); } return (PartitionAttributesImpl) a; } /** * When aentry-time-to-live
element is finished, an optional Declarable (the * custom-expiry) is followed by the {@link ExpirationAttributes} are on top of the stack followed * by either the {@link RegionAttributesCreation} to which the expiration attributes are assigned, * or the attributes for a {@link PartitionAttributes} to which the attributes are assigned. */ private void endEntryTimeToLive() { Declarable custom = null; if (stack.peek() instanceof Declarable) { custom = (Declarable) stack.pop(); } ExpirationAttributes expire = (ExpirationAttributes) stack.pop(); Object a = stack.peek(); // if (a instanceof PartitionAttributesFactory) { // ((PartitionAttributesFactory) a).setEntryTimeToLive(expire); // } else if (a instanceof RegionAttributesCreation) { ((RegionAttributesCreation) a).setEntryTimeToLive(expire); if (custom != null) { ((RegionAttributesCreation) a).setCustomEntryTimeToLive((CustomExpiry) custom); } } else { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_A_0_MUST_BE_DEFINED_IN_THE_CONTEXT_OF_REGIONATTRIBUTES_OR_PARTITIONATTRIBUTES .toLocalizedString(ENTRY_TIME_TO_LIVE)); } } /** * When aentry-idle-time
element is finished, an optional Declarable (the * custom-expiry) is followed by the {@link ExpirationAttributes} are on top of the stack followed * by the {@link RegionAttributesCreation} to which the expiration attributes are assigned. */ private void endEntryIdleTime() { Declarable custom = null; if (stack.peek() instanceof Declarable) { custom = (Declarable) stack.pop(); } ExpirationAttributes expire = (ExpirationAttributes) stack.pop(); Object a = stack.peek(); // if (a instanceof PartitionAttributesFactory) { // ((PartitionAttributesFactory) a).setEntryIdleTimeout(expire); // } else if (a instanceof RegionAttributesCreation) { ((RegionAttributesCreation) a).setEntryIdleTimeout(expire); if (custom != null) { ((RegionAttributesCreation) a).setCustomEntryIdleTimeout((CustomExpiry) custom); } } else { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_A_0_MUST_BE_DEFINED_IN_THE_CONTEXT_OF_REGIONATTRIBUTES_OR_PARTITIONATTRIBUTES .toLocalizedString(ENTRY_IDLE_TIME)); } } /** * When apartition-attributes
element is finished, the {@link PartitionAttributes} * are on top of the stack followed by the {@link RegionAttributesCreation} to which the partition * attributes are assigned. */ private void endPartitionAttributes() { PartitionAttributesImpl paf = (PartitionAttributesImpl) stack.pop(); paf.validateAttributes(); RegionAttributesCreation rattrs = peekRegionAttributesContext(PARTITION_ATTRIBUTES); // change the 5.0 default data policy (EMPTY) to the current default if (rattrs.hasDataPolicy() && rattrs.getDataPolicy().isEmpty() && (this.version.compareTo(CacheXmlVersion.GEMFIRE_5_0) == 0)) { rattrs.setDataPolicy(PartitionedRegionHelper.DEFAULT_DATA_POLICY); } rattrs.setPartitionAttributes(paf); } /** * When afixed-partition-attributes
element is finished */ private void endFixedPartitionAttributes() {} /** * When amembership-attributes
element is finished, the arguments for constructing * the MembershipAttributes are on the stack. */ private void endMembershipAttributes() { Set roles = new HashSet(); Object obj = null; while (!(obj instanceof Object[])) { obj = stack.pop(); if (obj instanceof String) { // found a required-role name roles.add(obj); } } Object[] attrs = (Object[]) obj; String laName = ((String) attrs[0]).toUpperCase().replace('-', '_'); String raName = ((String) attrs[1]).toUpperCase().replace('-', '_'); LossAction laction = LossAction.fromName(laName); ResumptionAction raction = ResumptionAction.fromName(raName); MembershipAttributes ra = new MembershipAttributes( (String[]) roles.toArray(new String[roles.size()]), laction, raction); RegionAttributesCreation rattrs = (RegionAttributesCreation) stack.peek(); rattrs.setMembershipAttributes(ra); } /** * When arequired-role
element is finished, */ private void endRequiredRole() { // do nothing... wait for endMembershipAttributes() } /** * When adisk-write-attributes
element is finished, the {@link DiskWriteAttributes} * is on top of the stack followed by the {@link RegionAttributesCreation} to which the expiration * attributes are assigned. */ private void endDiskWriteAttributes() { DiskWriteAttributes dwa = (DiskWriteAttributes) stack.pop(); RegionAttributesCreation attrs = peekRegionAttributesContext(DISK_WRITE_ATTRIBUTES); attrs.setDiskWriteAttributes(dwa); } /** * When adisk-dir
element is finished, the name of the directory is on top of the * stack. Create a new {@link File}and push it on the stack. */ private void endDiskDir() { StringBuffer dirName = (StringBuffer) stack.pop(); File dir = new File(dirName.toString().trim()); if (!dir.exists()) { } stack.push(dir); } /** * When adisk-dirs
element is finished, the directory {@link File}s are on the stack * followed by the {@link RegionAttributesCreation} to which the expiration attributes are * assigned. */ private void endDiskDirs() { List dirs = new ArrayList(); List sizes = new ArrayList(); while (stack.peek() instanceof File) { dirs.add(stack.pop()); sizes.add(stack.pop()); } Assert.assertTrue(!dirs.isEmpty()); Assert.assertTrue(!sizes.isEmpty()); // should set the disk-dirs and sizes in reverse order since parsers would have reversed // the order because of pushing into stack File[] disks = new File[dirs.size()]; int dirsLength = dirs.size(); for (int i = 0; i < dirsLength; i++) { disks[i] = (File) dirs.get((dirsLength - 1) - i); } int[] diskSizes = new int[sizes.size()]; for (int i = 0; i < dirsLength; i++) { diskSizes[i] = ((Integer) sizes.get((dirsLength - 1) - i)).intValue(); } Object a = stack.peek(); if (a instanceof RegionAttributesCreation) { RegionAttributesCreation attrs = (RegionAttributesCreation) a; attrs.setDiskDirsAndSize(disks, diskSizes); } else if (a instanceof DiskStoreAttributesCreation) { DiskStoreAttributesCreation attrs = (DiskStoreAttributesCreation) a; attrs.setDiskDirsAndSize(disks, diskSizes); } else { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_A_0_MUST_BE_DEFINED_IN_THE_CONTEXT_OF_REGIONATTRIBUTES .toLocalizedString(DISK_DIRS)); } } /** * When asynchronous-writes
element is encounter, we push a * {@link DiskWriteAttributes} for doing synchronous writes on the stack. */ private void startSynchronousWrites() { int maxOplogSize = ((Integer) stack.pop()).intValue(); String rollOplog = (String) stack.pop(); // convery megabytes to bytes for DiskWriteAttributes creation long maxOplogSizeInBytes = maxOplogSize; maxOplogSizeInBytes = maxOplogSizeInBytes * 1024 * 1024; Properties props = new Properties(); props.setProperty(MAX_OPLOG_SIZE, String.valueOf(maxOplogSizeInBytes)); props.setProperty(ROLL_OPLOG, rollOplog); props.setProperty(DiskWriteAttributesImpl.SYNCHRONOUS_PROPERTY, "true"); stack.push(new DiskWriteAttributesImpl(props)); } /** * When aasynchronous-writes
element is encounter, we push a * {@link DiskWriteAttributes} for doing asynchronous writes on the stack. */ private void startAsynchronousWrites(Attributes atts) { int maxOplogSize = ((Integer) stack.pop()).intValue(); String rollOplog = (String) stack.pop(); // convery megabytes to bytes for DiskWriteAttributes creation long maxOplogSizeInBytes = maxOplogSize; maxOplogSizeInBytes = maxOplogSizeInBytes * 1024 * 1024; long timeInterval = parseLong(atts.getValue(TIME_INTERVAL)); long bytesThreshold = parseLong(atts.getValue(BYTES_THRESHOLD)); Properties props = new Properties(); props.setProperty(MAX_OPLOG_SIZE, String.valueOf(maxOplogSizeInBytes)); props.setProperty(ROLL_OPLOG, rollOplog); props.setProperty(TIME_INTERVAL, String.valueOf(timeInterval)); props.setProperty(DiskWriteAttributesImpl.SYNCHRONOUS_PROPERTY, "false"); props.setProperty(BYTES_THRESHOLD, String.valueOf(bytesThreshold)); stack.push(new DiskWriteAttributesImpl(props)); } /** * When aparition-attributes
element is encountered, we push a ParitionAttributes?? * for configuring paritioned storage on the stack. */ private void startPartitionAttributes(Attributes atts) { PartitionAttributesImpl paf = new PartitionAttributesImpl(); String redundancy = atts.getValue(PARTITION_REDUNDANT_COPIES); if (redundancy != null) { paf.setRedundantCopies(parseInt(redundancy)); } String localMaxMem = atts.getValue(LOCAL_MAX_MEMORY); if (localMaxMem != null) { paf.setLocalMaxMemory(parseInt(localMaxMem)); } String totalMaxMem = atts.getValue(TOTAL_MAX_MEMORY); if (totalMaxMem != null) { paf.setTotalMaxMemory(parseLong(totalMaxMem)); } String totalNumBuckets = atts.getValue(TOTAL_NUM_BUCKETS); if (totalNumBuckets != null) { paf.setTotalNumBuckets(parseInt(totalNumBuckets)); } String colocatedWith = atts.getValue(PARTITION_COLOCATED_WITH); if (colocatedWith != null) { paf.setColocatedWith(colocatedWith); } String recoveryDelay = atts.getValue(RECOVERY_DELAY); if (recoveryDelay != null) { paf.setRecoveryDelay(parseInt(recoveryDelay)); } String startupRecoveryDelay = atts.getValue(STARTUP_RECOVERY_DELAY); if (startupRecoveryDelay != null) { paf.setStartupRecoveryDelay(parseInt(startupRecoveryDelay)); } stack.push(paf); } /** * When afixed-partition-attributes
element is encountered, we create an instance of * FixedPartitionAttributesImpl and add it to the PartitionAttributesImpl stack. */ private void startFixedPartitionAttributes(Attributes atts) { FixedPartitionAttributesImpl fpai = new FixedPartitionAttributesImpl(); String partitionName = atts.getValue(PARTITION_NAME); if (partitionName != null) { fpai.setPartitionName(partitionName); } String isPrimary = atts.getValue(IS_PRIMARY); if (isPrimary != null) { fpai.isPrimary(parseBoolean(isPrimary)); } String numBuckets = atts.getValue(NUM_BUCKETS); if (numBuckets != null) { fpai.setNumBuckets(parseInt(numBuckets)); } Object a = stack.peek(); if (a instanceof PartitionAttributesImpl) { ((PartitionAttributesImpl) a).addFixedPartitionAttributes(fpai); } } /** * When amembership-attributes
element is encountered, we push an array of * attributes for creation of a MembershipAttributes. */ private void startMembershipAttributes(Attributes atts) { Object[] attrs = new Object[2]; // loss-action, resumption-action attrs[0] = atts.getValue(LOSS_ACTION) == null ? LossAction.NO_ACCESS.toString() : atts.getValue(LOSS_ACTION); attrs[1] = atts.getValue(RESUMPTION_ACTION) == null ? ResumptionAction.REINITIALIZE.toString() : atts.getValue(RESUMPTION_ACTION); stack.push(attrs); } /** * When asubscription-attributes
element is first encountered, we create an * SubscriptionAttibutes?? object from the element's attributes and stick it in the current region * attributes. */ private void startSubscriptionAttributes(Attributes atts) { String ip = atts.getValue(INTEREST_POLICY); SubscriptionAttributes sa; if (ip == null) { sa = new SubscriptionAttributes(); } else if (ip.equals(ALL)) { sa = new SubscriptionAttributes(InterestPolicy.ALL); } else if (ip.equals(CACHE_CONTENT)) { sa = new SubscriptionAttributes(InterestPolicy.CACHE_CONTENT); } else { throw new InternalGemFireException( LocalizedStrings.CacheXmlParser_UNKNOWN_INTERESTPOLICY_0.toLocalizedString(ip)); } RegionAttributesCreation rattrs = (RegionAttributesCreation) stack.peek(); rattrs.setSubscriptionAttributes(sa); } /** * When arequired-role
element is encountered, we push a string for creation of * MembershipAttributes. */ private void startRequiredRole(Attributes atts) { stack.push(atts.getValue(NAME)); } /** * When aindex
element is encounter, we create the IndexCreationData object from the * Stack. Set the required parameters in the IndexCreationData object & push it on stack. * */ private void startIndex(Attributes atts) { boolean isPrimary = false; String type = ""; IndexCreationData icd = new IndexCreationData(atts.getValue(NAME)); int len = atts.getLength(); if (len > 1) { if (Boolean.valueOf(atts.getValue(KEY_INDEX))) { icd.setIndexType(IndexType.PRIMARY_KEY); isPrimary = true; } type = atts.getValue(INDEX_TYPE); } if (len > 2) { String fromClause = atts.getValue(FROM_CLAUSE); String expression = atts.getValue(EXPRESSION); String importStr = atts.getValue(IMPORTS); if (isPrimary) { icd.setIndexData(IndexType.PRIMARY_KEY, null, expression, null); } else { if (type == null) { type = RANGE_INDEX_TYPE; } if (type.equals(HASH_INDEX_TYPE)) { icd.setIndexData(IndexType.HASH, fromClause, expression, importStr); } else if (type.equals(RANGE_INDEX_TYPE)) { icd.setIndexData(IndexType.FUNCTIONAL, fromClause, expression, importStr); } else { logger.trace(LogMarker.CACHE_XML_PARSER, LocalizedMessage.create(LocalizedStrings.CacheXmlParser_UNKNOWN_INDEX_TYPE, type)); icd.setIndexData(IndexType.FUNCTIONAL, fromClause, expression, importStr); } } } this.stack.push(icd); } /** * When index element is ending we need to verify all attributes because of new index tag * definition since 6.6.1 and support previous definition also. * * iffunctional
element was not there then we need to validate expression and * fromClause as not null. */ private void endIndex() { boolean throwExcep = false; IndexCreationData icd = (IndexCreationData) this.stack.pop(); if (icd.getIndexType() == null) { throwExcep = true; } else { if (icd.getIndexType().equals(IndexType.PRIMARY_KEY)) { if (icd.getIndexExpression() == null) { throwExcep = true; } } else { if (icd.getIndexExpression() == null && icd.getIndexFromClause() == null) { throwExcep = true; } } } if (!throwExcep) { RegionCreation rc = (RegionCreation) this.stack.peek(); rc.addIndexData(icd); } else { throw new InternalGemFireException( LocalizedStrings.CacheXmlParser_CACHEXMLPARSERENDINDEXINDEX_CREATION_ATTRIBUTE_NOT_CORRECTLY_SPECIFIED .toLocalizedString()); } } /** * When afunctional
element is encounter, we pop the IndexCreationData object from * the Stack. Set the required parameters in the IndexCreationData object & set it in * RegionCreation object. * */ private void startFunctionalIndex(Attributes atts) { boolean throwExcep = false; IndexCreationData icd = (IndexCreationData) this.stack.peek(); // icd.setIndexType(FUNCTIONAL); int len = -1; if ((len = atts.getLength()) > 1) { String fromClause = atts.getValue(FROM_CLAUSE); String expression = atts.getValue(EXPRESSION); String importStr = null; if (len == 3) importStr = atts.getValue(IMPORTS); if (fromClause == null || expression == null) { throwExcep = true; } else { icd.setIndexData(IndexType.FUNCTIONAL, fromClause, expression, importStr); } } else { throwExcep = true; } if (throwExcep) { throw new InternalGemFireException( LocalizedStrings.CacheXmlParser_CACHEXMLPARSERSTARTFUNCTIONALINDEXINDEX_CREATION_ATTRIBUTE_NOT_CORRECTLY_SPECIFIED .toLocalizedString()); } } /** * When aprimary-key
element is encounter, we pop the IndexCreationData object from * the Stack. Set the required parameters in the IndexCreationData object & set it in * RegionCreation object. * */ private void startPrimaryKeyIndex(Attributes atts) { IndexCreationData icd = (IndexCreationData) this.stack.peek(); // icd.setIndexType(PRIMARY_KEY); boolean throwExcep = false; if (atts.getLength() == 1) { String field = atts.getValue(FIELD); if (field == null) { throwExcep = true; } else { icd.setIndexData(IndexType.PRIMARY_KEY, null, field, null); } } else { throwExcep = true; } if (throwExcep) { throw new InternalGemFireException( LocalizedStrings.CacheXmlParser_CACHEXMLPARSERSTARTPRIMARYKEYINDEXPRIMARYKEY_INDEX_CREATION_FIELD_IS_NULL .toLocalizedString()); } } /** * When aexpiration-attributes
element is first encountered, we create an * ExpirationAttibutes?? object from the element's attributes and push it on the stack. */ private void startExpirationAttributes(Attributes atts) { int timeout = parseInt(atts.getValue(TIMEOUT)); String action = atts.getValue(ACTION); ExpirationAttributes expire; if (action == null) { expire = new ExpirationAttributes(timeout); } else if (action.equals(INVALIDATE)) { expire = new ExpirationAttributes(timeout, ExpirationAction.INVALIDATE); } else if (action.equals(DESTROY)) { expire = new ExpirationAttributes(timeout, ExpirationAction.DESTROY); } else if (action.equals(LOCAL_INVALIDATE)) { expire = new ExpirationAttributes(timeout, ExpirationAction.LOCAL_INVALIDATE); } else if (action.equals(LOCAL_DESTROY)) { expire = new ExpirationAttributes(timeout, ExpirationAction.LOCAL_DESTROY); } else { throw new InternalGemFireException( LocalizedStrings.CacheXmlParser_UNKNOWN_EXPIRATION_ACTION_0.toLocalizedString(action)); } stack.push(expire); } /** * When aserializer-registration element is first encountered, we need to create the * wrapper object to hold the data, and put it on the stack. */ private void startSerializerRegistration() { SerializerCreation sc = new SerializerCreation(); this.stack.push(sc); } /** * When an
element is finished, the {@link Parameter}s and class names * are popped off the stack. The cache writer is instantiated and initialized with the parameters, * if appropriate. *instantiator
element is first encountered, we need to hang on to the id * attribute for use in registration in the end tag function. */ private void startInstantiator(Attributes atts) { int id = parseInt(atts.getValue(ID)); this.stack.push(id); } /** * Creates and initializes an instance of {@link Declarable} from the contents of the stack. * * @throws CacheXmlException Something goes wrong while instantiating or initializing the * declarable */ private Declarable createDeclarable() { Properties props = new Properties(); Object top = stack.pop(); while (top instanceof Parameter) { Parameter param = (Parameter) top; props.put(param.getName(), param.getValue()); top = stack.pop(); } logger.trace(LogMarker.CACHE_XML_PARSER, LocalizedMessage .create(LocalizedStrings.CacheXmlParser_XML_PARSER_CREATEDECLARABLE_PROPERTIES__0, props)); Assert.assertTrue(top instanceof String); String className = (String) top; logger.trace(LogMarker.CACHE_XML_PARSER, LocalizedMessage.create( LocalizedStrings.CacheXmlParser_XML_PARSER_CREATEDECLARABLE_CLASS_NAME_0, className)); Object o; try { Class c = InternalDataSerializer.getCachedClass(className); o = c.newInstance(); } catch (Exception ex) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_WHILE_INSTANTIATING_A_0.toLocalizedString(className), ex); } if (!(o instanceof Declarable)) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_CLASS_0_IS_NOT_AN_INSTANCE_OF_DECLARABLE .toLocalizedString(className)); } Declarable d = (Declarable) o; d.init(props); this.cache.addDeclarableProperties(d, props); return d; } /** * Ending thecompressor
registration should leave us with a class name on the stack. * Pull it off and setup the {@link Compressor} on the region attributes. */ private void endCompressor() { Class klass = getClassFromStack(); if (!Compressor.class.isAssignableFrom(klass)) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_A_0_IS_NOT_AN_INSTANCE_OF_A_COMPRESSOR .toLocalizedString(klass.getName())); } Compressor compressor; try { compressor = (Compressor) klass.newInstance(); } catch (Exception ex) { throw new CacheXmlException(LocalizedStrings.CacheXmlParser_WHILE_INSTANTIATING_A_0 .toLocalizedString(klass.getName()), ex); } Object a = stack.peek(); if (a instanceof RegionAttributesCreation) { RegionAttributesCreation attrs = (RegionAttributesCreation) a; attrs.setCompressor(compressor); } else { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_A_0_MUST_BE_DEFINED_IN_THE_CONTEXT_OF_REGIONATTRIBUTES_OR_1 .toLocalizedString(new Object[] {COMPRESSOR, DYNAMIC_REGION_FACTORY})); } } /** * When acache-loader
element is finished, the {@link Parameter}s and class names * are popped off the stack. The cache loader is instantiated and initialized with the parameters, * if appropriate. When the loader is being created in a dynamic-region-factory, there may be a * disk-dir element on the stack, represented by a File object. Otherwise, dynamic-region-factory * uses a RegionAttributesCreation, just like a region, and is treated the same.cache-writer
* A cache-writer may be created in the context of region-attributes or dynamic-region-factory. In * the latter case, there may be a disk-dir on top of the stack, represented by a File object. */ private void endCacheWriter() { Declarable d = createDeclarable(); if (!(d instanceof CacheWriter)) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_A_0_IS_NOT_AN_INSTANCE_OF_A_CACHEWRITER .toLocalizedString(d.getClass().getName())); } Object a = stack.peek(); // check for partition-attributes // if (a instanceof PartitionAttributesFactory) { // PartitionAttributesFactory fac = (PartitionAttributesFactory) a; // fac.setCacheWriter((CacheWriter) d); // } // else // check for disk-dir if ((a instanceof File)) { Object sav = stack.pop(); Object size = stack.pop(); // pop out disk size a = stack.peek(); // if (!(a instanceof RegionAttributesCreation)) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_0_MUST_BE_DEFINED_IN_THE_CONTEXT_OF_1 .toLocalizedString(new Object[] {CACHE_WRITER, DYNAMIC_REGION_FACTORY})); } stack.push(size); stack.push(sav); } // check for normal region-attributes else if (!(a instanceof RegionAttributesCreation)) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_0_MUST_BE_DEFINED_IN_THE_CONTEXT_OF_REGIONATTRIBUTES .toLocalizedString(CACHE_WRITER)); } RegionAttributesCreation attrs = (RegionAttributesCreation) a; attrs.setCacheWriter((CacheWriter) d); } private void endCustomExpiry() { Declarable d = createDeclarable(); if (!(d instanceof CustomExpiry)) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_A_0_IS_NOT_AN_INSTANCE_OF_CUSTOMEXPIRY .toLocalizedString(d.getClass().getName())); } stack.push(d); } /** * Create an
lru-entry-count
eviction controller, assigning it to the enclosed *region-attributes
. Allow any combination of attributes to be provided. Use the * default values for any attribute that is not provided. * * @param atts */ /** * @param atts */ private void startLRUEntryCount(Attributes atts) { final String maximum = atts.getValue(MAXIMUM); int max = LRUCapacityController.DEFAULT_MAXIMUM_ENTRIES; if (maximum != null) { max = parseInt(maximum); } final String lruAction = atts.getValue(ACTION); EvictionAction action = EvictionAction.DEFAULT_EVICTION_ACTION; if (lruAction != null) { action = EvictionAction.parseAction(lruAction); } RegionAttributesCreation regAttrs = peekRegionAttributesContext(LRU_ENTRY_COUNT); regAttrs.setEvictionAttributes(EvictionAttributes.createLRUEntryAttributes(max, action)); } /** * Start the configuration of alru-memory-size
eviction controller. Allow for any of * the attributes to be missing. Store the attributes on the stack anticipating the declaration of * an {@link ObjectSizer}. * * @param atts */ private void startLRUMemorySize(Attributes atts) { String lruAction = atts.getValue(ACTION); EvictionAction action = EvictionAction.DEFAULT_EVICTION_ACTION; if (lruAction != null) { action = EvictionAction.parseAction(lruAction); } String maximum = atts.getValue(MAXIMUM); int max = MemLRUCapacityController.DEFAULT_MAXIMUM_MEGABYTES; if (maximum != null) { max = parseInt(maximum); } // Store for later addition of ObjectSizer, if any (the cast is for clarity sake) stack.push(EvictionAttributes.createLRUMemoryAttributes(max, null, action)); } /** * Complete the configuration of alru-memory-size
eviction controller. Check for the * declaration of an {@link ObjectSizer}. Assign the attributes to the enclose *region-attributes
*/ private void endLRUMemorySize() { Object declCheck = stack.peek(); Declarable d = null; if (declCheck instanceof String || declCheck instanceof Parameter) { d = createDeclarable(); if (!(d instanceof ObjectSizer)) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_A_0_IS_NOT_AN_INSTANCE_OF_A_OBJECTSIZER .toLocalizedString(d.getClass().getName())); } } EvictionAttributesImpl eai = (EvictionAttributesImpl) stack.pop(); if (d != null) { eai.setObjectSizer((ObjectSizer) d); } RegionAttributesCreation regAttrs = peekRegionAttributesContext(LRU_MEMORY_SIZE); regAttrs.setEvictionAttributes(eai); } /** * Create anlru-heap-percentage
eviction controller, assigning it to the enclosed *region-attributes
* * @param atts */ private void startLRUHeapPercentage(Attributes atts) { final String lruAction = atts.getValue(ACTION); EvictionAction action = EvictionAction.DEFAULT_EVICTION_ACTION; if (lruAction != null) { action = EvictionAction.parseAction(lruAction); } // Store for later addition of ObjectSizer, if any stack.push(EvictionAttributes.createLRUHeapAttributes(null, action)); } /** * Complete the configuration of alru-heap-percentage
eviction controller. Check for * the declaration of an {@link ObjectSizer}. Assign the attributes to the enclosed *region-attributes
*/ private void endLRUHeapPercentage() { Object declCheck = stack.peek(); Declarable d = null; if (declCheck instanceof String || declCheck instanceof Parameter) { d = createDeclarable(); if (!(d instanceof ObjectSizer)) { String s = "A " + d.getClass().getName() + " is not an instance of a ObjectSizer"; throw new CacheXmlException(s); } } EvictionAttributesImpl eai = (EvictionAttributesImpl) stack.pop(); if (d != null) { eai.setObjectSizer((ObjectSizer) d); } RegionAttributesCreation regAttrs = peekRegionAttributesContext(LRU_HEAP_PERCENTAGE); regAttrs.setEvictionAttributes(eai); } /** * When acache-listener
element is finished, the {@link Parameter}s and class names * are popped off the stack. The cache listener is instantiated and initialized with the * parameters, if appropriate. */ private void endCacheListener() { Declarable d = createDeclarable(); if (!(d instanceof CacheListener)) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_A_0_IS_NOT_AN_INSTANCE_OF_A_CACHELISTENER .toLocalizedString(d.getClass().getName())); } RegionAttributesCreation attrs = peekRegionAttributesContext(CACHE_LISTENER); attrs.addCacheListener((CacheListener) d); } private void startAsyncEventQueue(Attributes atts) { AsyncEventQueueCreation asyncEventQueueCreation = new AsyncEventQueueCreation(); // id String id = atts.getValue(ID); asyncEventQueueCreation.setId(id); String parallel = atts.getValue(PARALLEL); if (parallel == null) { asyncEventQueueCreation.setParallel(GatewaySender.DEFAULT_IS_PARALLEL); } else { asyncEventQueueCreation.setParallel(Boolean.parseBoolean(parallel)); } // batch-size String batchSize = atts.getValue(BATCH_SIZE); if (batchSize == null) { asyncEventQueueCreation.setBatchSize(GatewaySender.DEFAULT_BATCH_SIZE); } else { asyncEventQueueCreation.setBatchSize(Integer.parseInt(batchSize)); } // batch-time-interval String batchTimeInterval = atts.getValue(BATCH_TIME_INTERVAL); if (batchTimeInterval == null) { asyncEventQueueCreation.setBatchTimeInterval(GatewaySender.DEFAULT_BATCH_TIME_INTERVAL); } else { asyncEventQueueCreation.setBatchTimeInterval(Integer.parseInt(batchTimeInterval)); } // batch-conflation String batchConflation = atts.getValue(ENABLE_BATCH_CONFLATION); if (batchConflation == null) { asyncEventQueueCreation.setBatchConflationEnabled(GatewaySender.DEFAULT_BATCH_CONFLATION); } else { asyncEventQueueCreation.setBatchConflationEnabled(Boolean.parseBoolean(batchConflation)); } // maximum-queue-memory String maxQueueMemory = atts.getValue(MAXIMUM_QUEUE_MEMORY); if (maxQueueMemory == null) { asyncEventQueueCreation.setMaximumQueueMemory(GatewaySender.DEFAULT_MAXIMUM_QUEUE_MEMORY); } else { asyncEventQueueCreation.setMaximumQueueMemory(Integer.parseInt(maxQueueMemory)); } // persistent String persistent = atts.getValue(PERSISTENT); if (persistent == null) { asyncEventQueueCreation.setPersistent(GatewaySender.DEFAULT_PERSISTENCE_ENABLED); } else { asyncEventQueueCreation.setPersistent(Boolean.parseBoolean(persistent)); } // diskStoreName String diskStoreName = atts.getValue(DISK_STORE_NAME); if (diskStoreName == null) { asyncEventQueueCreation.setDiskStoreName(null); } else { asyncEventQueueCreation.setDiskStoreName(diskStoreName); } // diskSynchronous String diskSynchronous = atts.getValue(DISK_SYNCHRONOUS); if (diskSynchronous == null) { asyncEventQueueCreation.setDiskSynchronous(GatewaySender.DEFAULT_DISK_SYNCHRONOUS); } else { asyncEventQueueCreation.setDiskSynchronous(Boolean.parseBoolean(diskSynchronous)); } String dispatcherThreads = atts.getValue(DISPATCHER_THREADS); if (dispatcherThreads == null) { asyncEventQueueCreation.setDispatcherThreads(GatewaySender.DEFAULT_DISPATCHER_THREADS); } else { asyncEventQueueCreation.setDispatcherThreads(Integer.parseInt(dispatcherThreads)); } String orderPolicy = atts.getValue(ORDER_POLICY); if (orderPolicy != null) { try { asyncEventQueueCreation .setOrderPolicy(GatewaySender.OrderPolicy.valueOf(orderPolicy.toUpperCase())); } catch (IllegalArgumentException e) { throw new InternalGemFireException(LocalizedStrings.AsyncEventQueue_UNKNOWN_ORDER_POLICY_0_1 .toLocalizedString(new Object[] {id, orderPolicy})); } } // forward expiration destroy events. String forward = atts.getValue(FORWARD_EXPIRATION_DESTROY); if (forward != null) { asyncEventQueueCreation.setForwardExpirationDestroy(Boolean.parseBoolean(forward)); } stack.push(asyncEventQueueCreation); } private void endAsyncEventListener() { Declarable d = createDeclarable(); if (!(d instanceof AsyncEventListener)) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_A_0_IS_NOT_AN_INSTANCE_OF_A_ASYNCEVENTLISTENER .toLocalizedString(d.getClass().getName())); } AsyncEventQueueCreation eventChannel = peekAsyncEventQueueContext(ASYNC_EVENT_LISTENER); eventChannel.setAsyncEventListener((AsyncEventListener) d); } private AsyncEventQueueCreation peekAsyncEventQueueContext(String dependentElement) { Object a = stack.peek(); if (!(a instanceof AsyncEventQueueCreation)) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_A_0_MUST_BE_DEFINED_IN_THE_CONTEXT_OF_ASYNCEVENTQUEUE .toLocalizedString(dependentElement)); } return (AsyncEventQueueCreation) a; } private void endAsyncEventQueue() { AsyncEventQueueCreation asyncEventChannelCreation = (AsyncEventQueueCreation) stack.peek(); AsyncEventQueueFactory factory = cache.createAsyncEventQueueFactory(); factory.setParallel(asyncEventChannelCreation.isParallel()); factory.setBatchSize(asyncEventChannelCreation.getBatchSize()); factory.setBatchTimeInterval(asyncEventChannelCreation.getBatchTimeInterval()); factory.setBatchConflationEnabled(asyncEventChannelCreation.isBatchConflationEnabled()); factory.setPersistent(asyncEventChannelCreation.isPersistent()); factory.setDiskStoreName(asyncEventChannelCreation.getDiskStoreName()); factory.setDiskSynchronous(asyncEventChannelCreation.isDiskSynchronous()); factory.setMaximumQueueMemory(asyncEventChannelCreation.getMaximumQueueMemory()); factory.setDispatcherThreads(asyncEventChannelCreation.getDispatcherThreads()); factory.setOrderPolicy(asyncEventChannelCreation.getOrderPolicy()); factory.setForwardExpirationDestroy(asyncEventChannelCreation.isForwardExpirationDestroy()); ListgatewayEventFilters = asyncEventChannelCreation.getGatewayEventFilters(); for (GatewayEventFilter gatewayEventFilter : gatewayEventFilters) { factory.addGatewayEventFilter(gatewayEventFilter); } factory.setGatewayEventSubstitutionListener( asyncEventChannelCreation.getGatewayEventSubstitutionFilter()); AsyncEventQueue asyncEventChannel = factory.create(asyncEventChannelCreation.getId(), asyncEventChannelCreation.getAsyncEventListener()); stack.pop(); } /** * When a partition-resolver
element is finished, the {@link Parameter}s and class * names are popped off the stack. ThePartitionResolver
is instantiated and * initialized with the parameters, if appropriate. */ private void endPartitionResolver() { Declarable d = createDeclarable(); if (!(d instanceof PartitionResolver)) { throw new CacheXmlException(LocalizedStrings.CacheXmlParser_A_0_IS_NOT_AN_INSTANCE_OF_A_1 .toLocalizedString(new Object[] {d.getClass().getName(), "PartitionResolver"})); } PartitionAttributesImpl pai = peekPartitionAttributesImpl(PARTITION_ATTRIBUTES); pai.setPartitionResolver((PartitionResolver) d); } /** * When apartition-listener
element is finished, the {@link Parameter}s and class * names are popped off the stack. ThePartitionListener
is instantiated and * initialized with the parameters, if appropriate. */ private void endPartitionListener() { Declarable d = createDeclarable(); if (!(d instanceof PartitionListener)) { throw new CacheXmlException(LocalizedStrings.CacheXmlParser_A_0_IS_NOT_AN_INSTANCE_OF_A_1 .toLocalizedString(new Object[] {d.getClass().getName(), "PartitionListener"})); } PartitionAttributesImpl pai = peekPartitionAttributesImpl(PARTITION_ATTRIBUTES); pai.addPartitionListener((PartitionListener) d); } /** * When we have encountered a FunctionService element, we create the object and push it onto stack */ private void startFunctionService() { this.stack.push(new FunctionServiceCreation()); } /** * When we have finished a FunctionService element, we create the object and push it onto stack */ private void endFunctionService() { Object top = stack.pop(); if (!(top instanceof FunctionServiceCreation)) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_EXPECTED_A_FUNCTIONSERVICECREATION_INSTANCE .toLocalizedString()); } FunctionServiceCreation fsc = (FunctionServiceCreation) top; fsc.create(); } /** * Start the Resource Manager element configuration * * @param atts XML attributes for the resource-manager */ private void startResourceManager(final Attributes atts) { ResourceManagerCreation rmc = new ResourceManagerCreation(); { String chp = atts.getValue(CRITICAL_HEAP_PERCENTAGE); if (chp != null) { rmc.setCriticalHeapPercentage(parseFloat(chp)); } else { rmc.setCriticalHeapPercentageToDefault(); } } { String ehp = atts.getValue(EVICTION_HEAP_PERCENTAGE); if (ehp != null) { rmc.setEvictionHeapPercentage(parseFloat(ehp)); } else { rmc.setEvictionHeapPercentageToDefault(); } } { String chp = atts.getValue(CRITICAL_OFF_HEAP_PERCENTAGE); if (chp != null) { rmc.setCriticalOffHeapPercentage(parseFloat(chp)); } else { rmc.setCriticalOffHeapPercentageToDefault(); } } { String ehp = atts.getValue(EVICTION_OFF_HEAP_PERCENTAGE); if (ehp != null) { rmc.setEvictionOffHeapPercentage(parseFloat(ehp)); } else { rmc.setEvictionOffHeapPercentageToDefault(); } } this.stack.push(rmc); } private void endResourceManager() { Object top = stack.pop(); if (!(top instanceof ResourceManagerCreation)) { throw new CacheXmlException("Expected a ResourceManagerCreation instance"); } ResourceManagerCreation rmc = (ResourceManagerCreation) top; // TODO set any listeners here // rmc.addResourceListener(null); this.cache.setResourceManagerCreation(rmc); } private void endBackup() { StringBuffer str = (StringBuffer) stack.pop(); File backup = new File(str.toString().trim()); this.cache.addBackup(backup); } /** * When we have finished a function element, we create the Declarable and push it onto stack */ private void endFunctionName() { Declarable d = createDeclarable(); if (!(d instanceof Function)) { String s = LocalizedStrings.CacheXmlParser_A_0_IS_NOT_AN_INSTANCE_OF_A_FUNCTION .toLocalizedString(d.getClass().getName()); throw new CacheXmlException(s); } Object fs = stack.peek(); if (!(fs instanceof FunctionServiceCreation)) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_A_0_IS_ONLY_ALLOWED_IN_THE_CONTEXT_OF_1_MJTDEBUG_E_2 .toLocalizedString(new Object[] {FUNCTION, FUNCTION_SERVICE, fs})); } FunctionServiceCreation funcService = (FunctionServiceCreation) fs; funcService.registerFunction((Function) d); } private Class getClassFromStack() { Object o = this.stack.peek(); if (!(o instanceof String)) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_NO_CLASSNAME_FOUND.toLocalizedString()); } String className = (String) this.stack.pop(); try { Class c = InternalDataSerializer.getCachedClass(className); return c; } catch (Exception e) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_A_0_CLASS_NOT_FOUND.toLocalizedString(className), e); } } /** * Ending the top levelserialization-registration
element and actually doing the * work of registering all the components. */ private void endSerializerRegistration() { SerializerCreation sc = (SerializerCreation) this.stack.pop(); sc.create(); this.cache.setSerializerCreation(sc); } /** * Ending the serialization registration should leave us with a class name on the stack. We will * call the DataSerializer.register() with the class once we find it. */ private void endSerializer() { Class c = getClassFromStack(); if (!(DataSerializer.class.isAssignableFrom(c))) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_A_0_NOT_A_SERIALIZER.toLocalizedString(c.getName())); } SerializerCreation sr = (SerializerCreation) this.stack.peek(); sr.registerSerializer(c); } /** * Ending the instantiator registration should leave us with a class name and an Integer ID on the * stack. Pull them off, and setup the instantiator with an anonymous inner class to do the work. */ private void endInstantiator() { final Class c = getClassFromStack(); Class[] ifaces = c.getInterfaces(); boolean found = false; for (Class clazz : ifaces) { if (clazz == DataSerializable.class) { found = true; break; } } if (!found) { throw new CacheXmlException(LocalizedStrings.CacheXmlParser_A_0_IS_NOT_DATA_SERIALIZABLE .toLocalizedString(c.getName())); } // the next thing on the stack should be the Integer registration ID Object o = this.stack.peek(); if (!(o instanceof Integer)) { String s = LocalizedStrings.CacheXmlParser_NO_SERIALIZATION_ID.toLocalizedString(); throw new CacheXmlException(s); } Integer id = (Integer) this.stack.pop(); SerializerCreation sc = (SerializerCreation) this.stack.peek(); sc.registerInstantiator(c, id); } /** * When we first encounter aparameter
element, we push its name element on to the * stack. */ private void startParameter(Attributes atts) { String name = atts.getValue(NAME); Assert.assertTrue(name != null); stack.push(name); } /** * When we have finished aparameter
element, create a {@link Parameter}from the top * two elements of the stack. */ private void endParameter() { Object value = stack.pop(); String name = (String) stack.pop(); stack.push(new Parameter(name, value)); } /** * When we have finished adeclarable
, instantiate an instance of the * {@link Declarable}and push it on the stack. */ private void endDeclarable() { Declarable d = createDeclarable(); stack.push(d); } public void startElement(String namespaceURI, String localName, String qName, Attributes atts) throws SAXException { if (qName.equals(CACHE)) { startCache(atts); } else if (qName.equals(CLIENT_CACHE)) { startClientCache(atts); } else if (qName.equals(BRIDGE_SERVER)) { startCacheServer(atts); } else if (qName.equals(CACHE_SERVER)) { startCacheServer(atts); } else if (qName.equals(LOAD_PROBE)) { } else if (qName.equals(CONNECTION_POOL)) { startPool(atts); } else if (qName.equals(CLIENT_SUBSCRIPTION)) { startClientHaQueue(atts); } else if (qName.equals(DYNAMIC_REGION_FACTORY)) { startDynamicRegionFactory(atts); } else if (qName.equals(GATEWAY_SENDER)) { startGatewaySender(atts); } else if (qName.equals(GATEWAY_RECEIVER)) { startGatewayReceiver(atts); } else if (qName.equals(GATEWAY_EVENT_FILTER)) { } else if (qName.equals(GATEWAY_TRANSPORT_FILTER)) { } else if (qName.equals(GATEWAY_EVENT_LISTENER)) { } else if (qName.equals(GATEWAY_EVENT_SUBSTITUTION_FILTER)) { } else if (qName.equals(ASYNC_EVENT_QUEUE)) { startAsyncEventQueue(atts); } else if (qName.equals(GATEWAY_CONFLICT_RESOLVER)) { } else if (qName.equals(LOCATOR)) { doLocator(atts); } else if (qName.equals(REGION)) { startRegion(atts); } else if (qName.equals(VM_ROOT_REGION)) { startRegion(atts); } else if (qName.equals(REGION_ATTRIBUTES)) { startRegionAttributes(atts); } else if (qName.equals(DISK_STORE)) { startDiskStore(atts); } else if (qName.equals(KEY_CONSTRAINT)) { } else if (qName.equals(VALUE_CONSTRAINT)) { } else if (qName.equals(INDEX_UPDATE_TYPE)) { } else if (qName.equals(REGION_TIME_TO_LIVE)) { } else if (qName.equals(REGION_IDLE_TIME)) { } else if (qName.equals(ENTRY_TIME_TO_LIVE)) { } else if (qName.equals(ENTRY_IDLE_TIME)) { } else if (qName.equals(EXPIRATION_ATTRIBUTES)) { startExpirationAttributes(atts); } else if (qName.equals(SERVER)) { doServer(atts); } else if (qName.equals(CUSTOM_EXPIRY)) { } else if (qName.equals(SUBSCRIPTION_ATTRIBUTES)) { startSubscriptionAttributes(atts); } else if (qName.equals(ENTRY)) { } else if (qName.equals(CLASS_NAME)) { } else if (qName.equals(PARAMETER)) { startParameter(atts); } else if (qName.equals(DISK_WRITE_ATTRIBUTES)) { startDiskWriteAttributes(atts); } else if (qName.equals(SYNCHRONOUS_WRITES)) { startSynchronousWrites(); } else if (qName.equals(ASYNCHRONOUS_WRITES)) { startAsynchronousWrites(atts); } else if (qName.equals(DISK_DIRS)) { } else if (qName.equals(DISK_DIR)) { startDiskDir(atts); } else if (qName.equals(GROUP)) { } else if (qName.equals(PARTITION_ATTRIBUTES)) { startPartitionAttributes(atts); } else if (qName.equals(FIXED_PARTITION_ATTRIBUTES)) { startFixedPartitionAttributes(atts); } else if (qName.equals(REQUIRED_ROLE)) { startRequiredRole(atts); } else if (qName.equals(MEMBERSHIP_ATTRIBUTES)) { startMembershipAttributes(atts); } else if (qName.equals(LOCAL_PROPERTIES)) { startPartitionProperties(atts, LOCAL_PROPERTIES); } else if (qName.equals(GLOBAL_PROPERTIES)) { startPartitionProperties(atts, GLOBAL_PROPERTIES); } else if (qName.equals(CACHE_LOADER)) { } else if (qName.equals(CACHE_WRITER)) { } else if (qName.equals(EVICTION_ATTRIBUTES)) { } else if (qName.equals(LRU_ENTRY_COUNT)) { startLRUEntryCount(atts); // internal to eviction-attributes } else if (qName.equals(LRU_MEMORY_SIZE)) { // internal to eviction-attributes // Visit endLRUMemorySize() to know the completion // of lru-memory-size eviction configuration startLRUMemorySize(atts); } else if (qName.equals(LRU_HEAP_PERCENTAGE)) { startLRUHeapPercentage(atts); // internal to eviction-attributes } else if (qName.equals(CACHE_LISTENER)) { } else if (qName.equals(ASYNC_EVENT_LISTENER)) { } else if (qName.equals(KEY)) { } else if (qName.equals(VALUE)) { } else if (qName.equals(STRING)) { } else if (qName.equals(DECLARABLE)) { } else if (qName.equals(INDEX)) { // Asif: Create an object of type IndexCreationData & // push it in stack startIndex(atts); // this.stack.push(new IndexCreationData(atts.getValue(NAME))); } else if (qName.equals(FUNCTIONAL)) { startFunctionalIndex(atts); } else if (qName.equals(PRIMARY_KEY)) { startPrimaryKeyIndex(atts); } else if (qName.equals(TRANSACTION_MANAGER)) { startCacheTransactionManager(); } else if (qName.equals(TRANSACTION_LISTENER)) { } else if (qName.equals(TRANSACTION_WRITER)) { } else if (qName.equals(JNDI_BINDINGS)) { // added by Nand Kishor } else if (qName.equals(JNDI_BINDING)) { // added by Nand Kishor // Asif: Push the BindingCreation object in the stack Map gfSpecific = new HashMap(); mapJNDI(atts, gfSpecific); List vendorSpecific = new ArrayList(); this.stack.push(new BindingCreation(gfSpecific, vendorSpecific)); } else if (qName.equals(CONFIG_PROPERTY_BINDING)) { // Asif : Peek at the BindingCreation object from stack // & get the vendor specific data map BindingCreation bc = (BindingCreation) this.stack.peek(); List vendorSpecific = bc.getVendorSpecificList(); // Rohit: Add a ConfigProperty Data Object to the list. vendorSpecific.add(new ConfigProperty()); } else if (qName.equals(CONFIG_PROPERTY_NAME)) { } else if (qName.equals(CONFIG_PROPERTY_VALUE)) { } else if (qName.equals(CONFIG_PROPERTY_TYPE)) { } else if (qName.equals(PARTITION_RESOLVER)) { } else if (qName.equals(PARTITION_LISTENER)) { } else if (qName.equals(FUNCTION_SERVICE)) { startFunctionService(); } else if (qName.equals(FUNCTION)) { } else if (qName.equals(TOP_SERIALIZER_REGISTRATION)) { startSerializerRegistration(); } else if (qName.equals(INITIALIZER)) { startInitializer(); } else if (qName.equals(INSTANTIATOR_REGISTRATION)) { startInstantiator(atts); } else if (qName.equals(SERIALIZER_REGISTRATION)) { // do nothing } else if (qName.equals(RESOURCE_MANAGER)) { startResourceManager(atts); } else if (qName.equals(BACKUP)) { // do nothing } else if (qName.equals(PDX)) { startPdx(atts); } else if (qName.equals(PDX_SERIALIZER)) { // do nothing } else if (qName.equals(COMPRESSOR)) { } else { final XmlParser delegate = getDelegate(namespaceURI); if (null == delegate) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_UNKNOWN_XML_ELEMENT_0.toLocalizedString(qName)); } delegate.startElement(namespaceURI, localName, qName, atts); } } /** * Get delegate {@link XmlParser} for the givennamespaceUri
* * @param namespaceUri to find {@link XmlParser} for. * @return {@link XmlParser} if found, otherwise null. * @since GemFire 8.1 */ // UnitTest CacheXmlParser.testGetDelegate() private XmlParser getDelegate(final String namespaceUri) { XmlParser delegate = delegates.get(namespaceUri); if (null == delegate) { try { final ServiceLoaderserviceLoader = ServiceLoader.load(XmlParser.class, ClassPathLoader.getLatestAsClassLoader()); for (final XmlParser xmlParser : serviceLoader) { if (xmlParser.getNamspaceUri().equals(namespaceUri)) { delegate = xmlParser; delegate.setStack(stack); delegate.setDocumentLocator(documentLocator); delegates.put(xmlParser.getNamspaceUri(), xmlParser); break; } } } catch (final Exception e) { logger.error(e.getMessage(), e); } } return delegate; } private void startPdx(Attributes atts) { String readSerialized = atts.getValue(READ_SERIALIZED); if (readSerialized != null) { cache.setPdxReadSerialized(Boolean.parseBoolean(readSerialized)); } String ignoreUnreadFields = atts.getValue(IGNORE_UNREAD_FIELDS); if (ignoreUnreadFields != null) { cache.setPdxIgnoreUnreadFields(Boolean.parseBoolean(ignoreUnreadFields)); } String persistent = atts.getValue(PERSISTENT); if (persistent != null) { cache.setPdxPersistent(Boolean.parseBoolean(persistent)); } String diskStoreName = atts.getValue(DISK_STORE_NAME); if (diskStoreName != null) { cache.setPdxDiskStore(diskStoreName); } } /** * When a client-subscription
element is first encountered, create a new * {@link ClientSubscriptionConfig } to store theeviction-policy
, **
capacity
andoverflow-directory
, then pass these values to Bridge * Server * * @since GemFire 5.7 */ private void startClientHaQueue(Attributes atts) { ClientHaQueueCreation clientHaQueue = new ClientHaQueueCreation(); String haEvictionPolicy = atts.getValue(CLIENT_SUBSCRIPTION_EVICTION_POLICY); if (haEvictionPolicy != null) { clientHaQueue.setEvictionPolicy(haEvictionPolicy); } String haCapacity = atts.getValue(CLIENT_SUBSCRIPTION_CAPACITY); if (haCapacity != null) { clientHaQueue.setCapacity(Integer.parseInt(haCapacity)); } String diskStoreName = atts.getValue(DISK_STORE_NAME); if (diskStoreName != null) { clientHaQueue.setDiskStoreName(diskStoreName); } else { String haOverflowDirectory = atts.getValue(OVERFLOW_DIRECTORY); if (haOverflowDirectory != null) { clientHaQueue.setOverflowDirectory(haOverflowDirectory); } } this.stack.push(clientHaQueue); } /** * Add a marker string to look for when in endPartitionProperties * * @param atts * @param localOrGlobal either the string LOCAL_PROPERTIES or GLOBAL_PROPERTIES */ private void startPartitionProperties(Attributes atts, String localOrGlobal) { stack.push(localOrGlobal); } private void startDiskDir(Attributes atts) { String size = atts.getValue(DIR_SIZE); Integer diskSize = null; if (size == null) { diskSize = Integer.valueOf(DiskStoreFactory.DEFAULT_DISK_DIR_SIZE); } else { diskSize = Integer.valueOf(size); } stack.push(diskSize); } private void startDiskWriteAttributes(Attributes atts) { String roll = atts.getValue(ROLL_OPLOG); if (roll == null) { roll = "true"; // because it defaults to true } String maxOp = atts.getValue(MAX_OPLOG_SIZE); int maxOplogSize = 0; if (maxOp != null) { maxOplogSize = parseInt(maxOp); } else { maxOplogSize = DiskWriteAttributesImpl.getDefaultMaxOplogSize(); } stack.push(roll); stack.push(Integer.valueOf(maxOplogSize)); } public void endElement(String namespaceURI, String localName, String qName) throws SAXException { try { // logger.debug("endElement namespaceURI=" + namespaceURI // + "; localName = " + localName + "; qName = " + qName); if (qName.equals(CACHE)) { endCache(); } else if (qName.equals(CLIENT_CACHE)) { endClientCache(); } else if (qName.equals(BRIDGE_SERVER)) { endCacheServer(); } else if (qName.equals(CACHE_SERVER)) { endCacheServer(); } else if (qName.equals(LOAD_PROBE)) { endLoadProbe(); } else if (qName.equals(CLIENT_SUBSCRIPTION)) { endClientHaQueue(); } else if (qName.equals(CONNECTION_POOL)) { endPool(); } else if (qName.equals(DYNAMIC_REGION_FACTORY)) { endDynamicRegionFactory(); } else if (qName.equals(GATEWAY_SENDER)) { endSerialGatewaySender(); } else if (qName.equals(GATEWAY_RECEIVER)) { endGatewayReceiver(); } else if (qName.equals(GATEWAY_EVENT_FILTER)) { endGatewayEventFilter(); } else if (qName.equals(GATEWAY_EVENT_SUBSTITUTION_FILTER)) { endGatewayEventSubstitutionFilter(); } else if (qName.equals(GATEWAY_TRANSPORT_FILTER)) { endGatewayTransportFilter(); } else if (qName.equals(ASYNC_EVENT_QUEUE)) { endAsyncEventQueue(); } else if (qName.equals(REGION)) { endRegion(); } else if (qName.equals(GATEWAY_CONFLICT_RESOLVER)) { endGatewayConflictResolver(); } else if (qName.equals(VM_ROOT_REGION)) { endRegion(); } else if (qName.equals(REGION_ATTRIBUTES)) { endRegionAttributes(); } else if (qName.equals(DISK_STORE)) { endDiskStore(); } else if (qName.equals(KEY_CONSTRAINT)) { endKeyConstraint(); } else if (qName.equals(VALUE_CONSTRAINT)) { endValueConstraint(); } else if (qName.equals(REGION_TIME_TO_LIVE)) { endRegionTimeToLive(); } else if (qName.equals(REGION_IDLE_TIME)) { endRegionIdleTime(); } else if (qName.equals(ENTRY_TIME_TO_LIVE)) { endEntryTimeToLive(); } else if (qName.equals(ENTRY_IDLE_TIME)) { endEntryIdleTime(); } else if (qName.equals(CUSTOM_EXPIRY)) { endCustomExpiry(); } else if (qName.equals(DISK_WRITE_ATTRIBUTES)) { endDiskWriteAttributes(); } else if (qName.equals(SYNCHRONOUS_WRITES)) { } else if (qName.equals(ASYNCHRONOUS_WRITES)) { } else if (qName.equals(DISK_DIRS)) { endDiskDirs(); } else if (qName.equals(DISK_DIR)) { endDiskDir(); } else if (qName.equals(GROUP)) { endGroup(); } else if (qName.equals(PARTITION_ATTRIBUTES)) { endPartitionAttributes(); } else if (qName.equals(FIXED_PARTITION_ATTRIBUTES)) { endFixedPartitionAttributes(); } else if (qName.equals(LOCAL_PROPERTIES)) { endPartitionProperites(LOCAL_PROPERTIES); } else if (qName.equals(GLOBAL_PROPERTIES)) { endPartitionProperites(GLOBAL_PROPERTIES); } else if (qName.equals(MEMBERSHIP_ATTRIBUTES)) { endMembershipAttributes(); } else if (qName.equals(REQUIRED_ROLE)) { endRequiredRole(); } else if (qName.equals(EXPIRATION_ATTRIBUTES)) { } else if (qName.equals(CUSTOM_EXPIRY)) { endCustomExpiry(); } else if (qName.equals(SUBSCRIPTION_ATTRIBUTES)) { } else if (qName.equals(ENTRY)) { endEntry(); } else if (qName.equals(CLASS_NAME)) { endClassName(); } else if (qName.equals(PARAMETER)) { endParameter(); } else if (qName.equals(CACHE_LOADER)) { endCacheLoader(); } else if (qName.equals(CACHE_WRITER)) { endCacheWriter(); } else if (qName.equals(EVICTION_ATTRIBUTES)) { } else if (qName.equals(LRU_ENTRY_COUNT)) { // internal to eviction-attributes } else if (qName.equals(LRU_MEMORY_SIZE)) { endLRUMemorySize(); // internal to eviction-attributes } else if (qName.equals(LRU_HEAP_PERCENTAGE)) { endLRUHeapPercentage(); // internal to eviction-attributes } else if (qName.equals(CACHE_LISTENER)) { endCacheListener(); } else if (qName.equals(ASYNC_EVENT_LISTENER)) { endAsyncEventListener(); } else if (qName.equals(KEY)) { } else if (qName.equals(VALUE)) { } else if (qName.equals(STRING)) { endString(); } else if (qName.equals(DECLARABLE)) { endDeclarable(); } else if (qName.equals(FUNCTIONAL)) { } else if (qName.equals(INDEX)) { endIndex(); } else if (qName.equals(PRIMARY_KEY)) { } else if (qName.equals(TRANSACTION_MANAGER)) { endCacheTransactionManager(); } else if (qName.equals(TRANSACTION_LISTENER)) { endTransactionListener(); } else if (qName.equals(TRANSACTION_WRITER)) { endTransactionWriter(); } else if (qName.equals(JNDI_BINDINGS)) { } else if (qName.equals(JNDI_BINDING)) { // Asif Pop the BindingCreation object BindingCreation bc = (BindingCreation) this.stack.pop(); JNDIInvoker.mapDatasource(bc.getGFSpecificMap(), bc.getVendorSpecificList()); } else if (qName.equals(CONFIG_PROPERTY_BINDING)) { } else if (qName.equals(CONFIG_PROPERTY_NAME)) { String name = null; if (this.stack.peek() instanceof StringBuffer) // Pop the config-property-name element value from the stack. name = ((StringBuffer) this.stack.pop()).toString(); BindingCreation bc = (BindingCreation) this.stack.peek(); List vsList = bc.getVendorSpecificList(); ConfigProperty cp = (ConfigProperty) vsList.get(vsList.size() - 1); if (name == null) { String excep = LocalizedStrings.CacheXmlParser_EXCEPTION_IN_PARSING_ELEMENT_0_THIS_IS_A_REQUIRED_FIELD .toLocalizedString(qName); throw new CacheXmlException(excep); } else { // set the name. cp.setName(name); } } else if (qName.equals(CONFIG_PROPERTY_VALUE)) { String value = null; // Pop the config-property-value element value from the stack. if (this.stack.peek() instanceof StringBuffer) value = ((StringBuffer) this.stack.pop()).toString(); BindingCreation bc = (BindingCreation) this.stack.peek(); List vsList = bc.getVendorSpecificList(); ConfigProperty cp = (ConfigProperty) vsList.get(vsList.size() - 1); // Set the value to the ConfigProperty Data Object. cp.setValue(value); } else if (qName.equals(CONFIG_PROPERTY_TYPE)) { String type = null; if (this.stack.peek() instanceof StringBuffer) type = ((StringBuffer) this.stack.pop()).toString(); BindingCreation bc = (BindingCreation) this.stack.peek(); List vsList = bc.getVendorSpecificList(); ConfigProperty cp = (ConfigProperty) vsList.get(vsList.size() - 1); if (type == null) { String excep = LocalizedStrings.CacheXmlParser_EXCEPTION_IN_PARSING_ELEMENT_0_THIS_IS_A_REQUIRED_FIELD .toLocalizedString(qName); throw new CacheXmlException(excep); } else { cp.setType(type); } } else if (qName.equals(LRU_MEMORY_SIZE)) { // internal to eviction-attributes // Visit startLRUMemorySize() to know the begining // of lru-memory-size eviction configuration endLRUMemorySize(); } else if (qName.equals(LOCATOR)) { // nothing needed } else if (qName.equals(SERVER)) { // nothing needed } else if (qName.equals(PARTITION_RESOLVER)) { endPartitionResolver(); } else if (qName.equals(PARTITION_LISTENER)) { endPartitionListener(); } else if (qName.equals(FUNCTION)) { endFunctionName(); } else if (qName.equals(FUNCTION_SERVICE)) { endFunctionService(); } else if (qName.equals(TOP_SERIALIZER_REGISTRATION)) { endSerializerRegistration(); } else if (qName.equals(INITIALIZER)) { endInitializer(); } else if (qName.equals(SERIALIZER_REGISTRATION)) { endSerializer(); } else if (qName.equals(INSTANTIATOR_REGISTRATION)) { endInstantiator(); } else if (qName.equals(RESOURCE_MANAGER)) { endResourceManager(); } else if (qName.equals(BACKUP)) { endBackup(); } else if (qName.equals(PDX)) { // nothing needed } else if (qName.equals(PDX_SERIALIZER)) { endPdxSerializer(); } else if (qName.equals(COMPRESSOR)) { endCompressor(); } else { final XmlParser delegate = getDelegate(namespaceURI); if (null == delegate) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_UNKNOWN_XML_ELEMENT_0.toLocalizedString(qName)); } delegate.endElement(namespaceURI, localName, qName); } } catch (CacheException ex) { throw new SAXException( LocalizedStrings.CacheXmlParser_A_CACHEEXCEPTION_WAS_THROWN_WHILE_PARSING_XML .toLocalizedString(), ex); } } private void endGatewayTransportFilter() { Declarable d = createDeclarable(); if (!(d instanceof GatewayTransportFilter)) { throw new CacheXmlException(LocalizedStrings.CacheXmlParser_A_0_IS_NOT_AN_INSTANCE_OF_A_1 .toLocalizedString(new Object[] {d.getClass().getName(), "GatewayTransportFilter"})); } Object a = stack.peek(); if (a instanceof GatewaySenderFactory) { GatewaySenderFactory senderFactory = (GatewaySenderFactory) a; senderFactory.addGatewayTransportFilter((GatewayTransportFilter) d); } else if (a instanceof GatewayReceiverFactory) { GatewayReceiverFactory receiverFactory = (GatewayReceiverFactory) a; receiverFactory.addGatewayTransportFilter((GatewayTransportFilter) d); } else { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_A_0_MUST_BE_DEFINED_IN_THE_CONTEXT_OF_GATEWAYSENDER_OR_GATEWAYRECEIVER .toLocalizedString(GATEWAY_TRANSPORT_FILTER)); } } private void endGatewayEventFilter() { Declarable d = createDeclarable(); if (!(d instanceof GatewayEventFilter)) { throw new CacheXmlException(LocalizedStrings.CacheXmlParser_A_0_IS_NOT_AN_INSTANCE_OF_A_1 .toLocalizedString(new Object[] {d.getClass().getName(), "GatewayEventFilter"})); } Object obj = stack.peek(); if (obj instanceof GatewaySenderFactory) { GatewaySenderFactory senderFactory = (GatewaySenderFactory) obj; senderFactory.addGatewayEventFilter((GatewayEventFilter) d); } else if (obj instanceof AsyncEventQueueCreation) { AsyncEventQueueCreation asyncEventQueueCreation = (AsyncEventQueueCreation) obj; asyncEventQueueCreation.addGatewayEventFilter((GatewayEventFilter) d); } else { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_A_0_MUST_BE_DEFINED_IN_THE_CONTEXT_OF_GATEWAY_SENDER_OR_ASYNC_EVENT_QUEUE .toLocalizedString("GatewayEventFilter")); } } private void endGatewayEventSubstitutionFilter() { Declarable d = createDeclarable(); if (!(d instanceof GatewayEventSubstitutionFilter)) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_A_0_IS_NOT_AN_INSTANCE_OF_A_1.toLocalizedString( new Object[] {d.getClass().getName(), "GatewayEventSubstitutionFilter"})); } Object obj = stack.peek(); if (obj instanceof GatewaySenderFactory) { GatewaySenderFactory senderFactory = (GatewaySenderFactory) obj; senderFactory.setGatewayEventSubstitutionFilter((GatewayEventSubstitutionFilter) d); } else if (obj instanceof AsyncEventQueueCreation) { AsyncEventQueueCreation asyncEventQueueCreation = (AsyncEventQueueCreation) obj; asyncEventQueueCreation.setGatewayEventSubstitutionFilter((GatewayEventSubstitutionFilter) d); } else { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_A_0_MUST_BE_DEFINED_IN_THE_CONTEXT_OF_GATEWAY_SENDER_OR_ASYNC_EVENT_QUEUE .toLocalizedString("GatewayEventSubstitutionFilter")); } } private GatewaySenderFactory peekGatewaySender(String dependentElement) { Object a = stack.peek(); if (!(a instanceof GatewaySenderFactory)) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_A_0_MUST_BE_DEFINED_IN_THE_CONTEXT_OF_GATEWAY_SENDER .toLocalizedString(dependentElement)); } return (GatewaySenderFactory) a; } /** * */ private void endPdxSerializer() { Declarable d = createDeclarable(); if (!(d instanceof PdxSerializer)) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_A_0_IS_NOT_AN_INSTANCE_OF_A_PDX_SERIALIZER .toLocalizedString(d.getClass().getName())); } PdxSerializer serializer = (PdxSerializer) d; this.cache.setPdxSerializer(serializer); } private void startInitializer() { } private void endInitializer() { Properties props = new Properties(); Object top = stack.pop(); while (top instanceof Parameter) { Parameter param = (Parameter) top; props.put(param.getName(), param.getValue()); top = stack.pop(); } Assert.assertTrue(top instanceof String); String className = (String) top; Object o; try { Class c = InternalDataSerializer.getCachedClass(className); o = c.newInstance(); } catch (Exception ex) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_WHILE_INSTANTIATING_A_0.toLocalizedString(className), ex); } if (!(o instanceof Declarable)) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_CLASS_0_IS_NOT_AN_INSTANCE_OF_DECLARABLE .toLocalizedString(className)); } Declarable d = (Declarable) o; this.cache.setInitializer(d, props); } /** * Do nothing * * @since GemFire 5.7 */ private void endClientHaQueue() {} /** * Process either thelocal-properties
orglobal-properties
for a * {@link org.apache.geode.internal.cache.PartitionedRegion} * * @param globalOrLocal either the string {@link CacheXml#LOCAL_PROPERTIES} or * {@link CacheXml#GLOBAL_PROPERTIES} */ private void endPartitionProperites(String globalOrLocal) { Properties props = new Properties(); Object top = stack.pop(); while (!top.equals(globalOrLocal)) { if (!(top instanceof Parameter)) { throw new CacheXmlException( LocalizedStrings.CacheXmlParser_ONLY_A_PARAMETER_IS_ALLOWED_IN_THE_CONTEXT_OF_0 .toLocalizedString(globalOrLocal)); } Parameter param = (Parameter) top; props.put(param.getName(), param.getValue()); top = stack.pop(); } if (globalOrLocal.equals(GLOBAL_PROPERTIES)) { PartitionAttributesImpl pai = peekPartitionAttributesImpl(GLOBAL_PROPERTIES); pai.setGlobalProperties(props); } else if (globalOrLocal.equals(LOCAL_PROPERTIES)) { PartitionAttributesImpl pai = peekPartitionAttributesImpl(LOCAL_PROPERTIES); pai.setLocalProperties(props); } else { Assert.assertTrue(false, "Argument globalOrLocal has unexpected value " + globalOrLocal); } } public void characters(char[] ch, int start, int length) throws SAXException { // This method needs to handle XML chunking, so its uses a // StringBuffer to uniquely identify previous calls and will // append to the existing StringBuffer for each subsequent call Object o = null; try { o = stack.peek(); } catch (EmptyStackException firstTime) { // No entries on the stack, this is the first element that // performs any stack operations, initialize a StringBuffer (see // finally block) } finally { StringBuffer chars = null; if (o instanceof StringBuffer) { chars = (StringBuffer) o; chars.append(ch, start, length); logger.trace(LogMarker.CACHE_XML_PARSER, LocalizedMessage.create( LocalizedStrings.CacheXmlParser_XML_PARSER_CHARACTERS_APPENDED_CHARACTER_DATA_0, chars)); } else { chars = new StringBuffer(length); chars.append(ch, start, length); stack.push(chars); logger.trace(LogMarker.CACHE_XML_PARSER, LocalizedMessage.create( LocalizedStrings.CacheXmlParser_XML_PARSER_CHARACTERS_NEW_CHARACTER_DATA_0, chars)); } } } ////////// Inherited methods that don't do anything ////////// @Override public void setDocumentLocator(Locator locator) { this.documentLocator = locator; } public void startDocument() throws SAXException {} public void endDocument() throws SAXException {} public void startPrefixMapping(String prefix, String uri) throws SAXException {} public void endPrefixMapping(String prefix) throws SAXException {} public void ignorableWhitespace(char[] ch, int start, int length) throws SAXException {} public void processingInstruction(String target, String data) throws SAXException {} public void skippedEntity(String name) throws SAXException {} /* * Binds a jndi name of datasource to a context. @param atts Attributes ofjndi name * and Datasource related information. * */ private void mapJNDI(Attributes atts, Map gfSpecific) { int attsLen = atts.getLength(); String key = ""; String value = ""; // put attributes into a Map for (int i = 0; i < attsLen; i++) { key = atts.getQName(i); value = atts.getValue(key); gfSpecific.put(key, value); } } /////////////////////// Inner Classes /////////////////////// /** * Class that delegates all of the methods of a {@link org.xml.sax.helpers.DefaultHandler} to a * {@link CacheXmlParser} that implements all of the methods of DefaultHandler
, but * is not aDefaultHandler
. */ static class DefaultHandlerDelegate extends DefaultHandler2 { /** TheCacheXmlParser
that does the real work */ private CacheXmlParser handler; /** * Creates a newDefaultHandlerDelegate
that delegates to the given *CacheXmlParser
. */ public DefaultHandlerDelegate(CacheXmlParser handler) { this.handler = handler; } @Override public InputSource resolveEntity(String publicId, String systemId) throws SAXException, IOException { return handler.resolveEntity(publicId, systemId); } @Override public InputSource resolveEntity(String name, String publicId, String baseURI, String systemId) throws SAXException, IOException { return handler.resolveEntity(name, publicId, baseURI, systemId); } @Override public void setDocumentLocator(Locator locator) { handler.setDocumentLocator(locator); } @Override public void startDocument() throws SAXException { handler.startDocument(); } @Override public void endDocument() throws SAXException { handler.endDocument(); } @Override public void startPrefixMapping(String prefix, String uri) throws SAXException { handler.startPrefixMapping(prefix, uri); } @Override public void endPrefixMapping(String prefix) throws SAXException { handler.endPrefixMapping(prefix); } @Override public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException { handler.startElement(uri, localName, qName, attributes); } @Override public void endElement(String uri, String localName, String qName) throws SAXException { handler.endElement(uri, localName, qName); } @Override public void characters(char[] ch, int start, int length) throws SAXException { handler.characters(ch, start, length); } @Override public void ignorableWhitespace(char[] ch, int start, int length) throws SAXException { handler.ignorableWhitespace(ch, start, length); } @Override public void processingInstruction(String target, String data) throws SAXException { handler.processingInstruction(target, data); } @Override public void skippedEntity(String name) throws SAXException { handler.skippedEntity(name); } @Override public void warning(SAXParseException e) throws SAXException { handler.warning(e); } @Override public void error(SAXParseException e) throws SAXException { handler.error(e); } @Override public void fatalError(SAXParseException e) throws SAXException { handler.fatalError(e); } } /** * Represents a parameter used to initialize a {@link Declarable} */ static class Parameter { /** The name of the parameter */ private String name; /** The value of the parameter */ private Object value; /** * Creates a newParameter
with the given name and value. */ public Parameter(String name, Object value) { this.name = name; this.value = value; } public String getName() { return this.name; } public Object getValue() { return this.value; } } }