Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
/*
* Copyright (c) 2007-2009, James Leigh All rights reserved.
* Copyright (c) 2011 Talis Inc., Some rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* - Neither the name of the openrdf.org nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
*/
package org.openrdf.repository.object.compiler;
import info.aduna.io.FileUtil;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InterruptedIOException;
import java.io.OutputStream;
import java.io.PrintStream;
import java.lang.reflect.Method;
import java.lang.reflect.UndeclaredThrowableException;
import java.net.ConnectException;
import java.net.URL;
import java.net.URLClassLoader;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import org.openrdf.annotations.Iri;
import org.openrdf.model.Model;
import org.openrdf.model.Namespace;
import org.openrdf.model.Resource;
import org.openrdf.model.URI;
import org.openrdf.model.impl.URIImpl;
import org.openrdf.model.vocabulary.OWL;
import org.openrdf.model.vocabulary.RDF;
import org.openrdf.model.vocabulary.RDFS;
import org.openrdf.repository.object.compiler.model.RDFClass;
import org.openrdf.repository.object.compiler.model.RDFOntology;
import org.openrdf.repository.object.compiler.model.RDFProperty;
import org.openrdf.repository.object.compiler.source.ClassPathBuilder;
import org.openrdf.repository.object.compiler.source.JavaCompiler;
import org.openrdf.repository.object.exceptions.ObjectStoreConfigException;
import org.openrdf.repository.object.managers.LiteralManager;
import org.openrdf.repository.object.managers.RoleMapper;
import org.openrdf.repository.object.managers.helpers.RoleClassLoader;
import org.openrdf.repository.object.vocabulary.MSG;
import org.openrdf.rio.RDFFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Converts OWL ontologies into Java source code.
*
* @author James Leigh
*
*/
public class OWLCompiler {
private static final String META_INF_ANNOTATIONS = "META-INF/org.openrdf.annotations";
private static final String META_INF_BEHAVIOURS = "META-INF/org.openrdf.behaviours";
private static final String META_INF_CONCEPTS = "META-INF/org.openrdf.concepts";
private static final String META_INF_DATATYPES = "META-INF/org.openrdf.datatypes";
private static final String META_INF_ONTOLOGIES = "META-INF/org.openrdf.ontologies";
private class AnnotationBuilder implements Runnable {
private final RDFProperty bean;
private final List content;
private final File target;
AnnotationBuilder(File target, List content,
RDFProperty bean) {
this.target = target;
this.content = content;
this.bean = bean;
}
public void run() {
try {
bean.generateAnnotationCode(target, resolver);
URI uri = bean.getURI();
String pkg = resolver.getPackageName(uri);
String className = resolver.getSimpleName(uri);
if (pkg != null) {
className = pkg + '.' + className;
}
synchronized (content) {
logger.debug("Saving {}", className);
content.add(className);
annotations.add(className);
}
} catch (Exception exc) {
logger.error("Error processing {}", bean);
if (exception == null) {
exception = exc;
}
}
}
}
private class ConceptBuilder implements Runnable {
private final RDFClass bean;
private final List content;
private final File target;
ConceptBuilder(File target, List content, RDFClass bean) {
this.target = target;
this.content = content;
this.bean = bean;
}
public void run() {
try {
bean.generateSourceCode(target, resolver);
URI uri = bean.getURI();
String pkg = resolver.getPackageName(uri);
String className = resolver.getSimpleName(uri);
if (pkg != null) {
className = pkg + '.' + className;
}
boolean anon = resolver.isAnonymous(uri)
&& bean.isEmpty(resolver);
synchronized (content) {
logger.debug("Saving {}", className);
content.add(className);
if (!anon) {
concepts.add(className);
}
}
} catch (Exception exc) {
logger.error("Error processing {}", bean);
if (exception == null) {
exception = exc;
}
}
}
}
private final class DatatypeBuilder implements Runnable {
private final RDFClass bean;
private final List content;
private final File target;
DatatypeBuilder(List content, RDFClass bean, File target) {
this.content = content;
this.bean = bean;
this.target = target;
}
public void run() {
try {
for (RDFClass equivalentRdfClass : bean.getRDFClasses(OWL.EQUIVALENTCLASS)) {
Class> equivalentJavaClass = literals.findClass(equivalentRdfClass.getURI());
if (equivalentJavaClass != null) {
String equivalentJavaClassname = equivalentJavaClass.getName();
List uris = datatypes.get(equivalentJavaClassname);
if (uris == null) {
uris = new ArrayList();
uris.add(equivalentRdfClass.getURI());
datatypes.put(equivalentJavaClassname, uris);
}
uris.add(bean.getURI());
literals.addDatatype(equivalentJavaClass, bean.getURI());
return;
}
}
bean.generateSourceCode(target, resolver);
String pkg = resolver.getPackageName(bean.getURI());
String className = resolver.getSimpleName(bean.getURI());
if (pkg != null) {
className = pkg + '.' + className;
}
synchronized (content) {
logger.debug("Saving {}", className);
content.add(className);
datatypes.put(className, null);
}
} catch (Exception exc) {
logger.error("Error processing {}", bean);
if (exception == null) {
exception = exc;
}
}
}
}
private static final String JAVA_NS = "java:";
private static ClassLoader findClassLoader() {
ClassLoader ccl = Thread.currentThread().getContextClassLoader();
if (ccl == null)
return OWLCompiler.class.getClassLoader();
return ccl;
}
Runnable helper = new Runnable() {
public void run() {
try {
for (Runnable r = queue.take(); r != helper; r = queue.take()) {
r.run();
}
} catch (InterruptedException e) {
logger.error(e.toString(), e);
}
}
};
final Logger logger = LoggerFactory.getLogger(OWLCompiler.class);
BlockingQueue queue = new LinkedBlockingQueue();
private String[] baseClasses = new String[0];
Set annotations = new TreeSet();
Set concepts = new TreeSet();
Map> datatypes = new HashMap>();
Exception exception;
LiteralManager literals;
private RoleMapper mapper;
private String memPrefix;
private Model model;
/** context -> prefix -> namespace */
private Collection