Skip to content

Commit

Permalink
Further examples of RDF generation
Browse files Browse the repository at this point in the history
  • Loading branch information
ajs6f committed Feb 5, 2015
1 parent 229f6de commit 6e0b21e
Show file tree
Hide file tree
Showing 17 changed files with 114 additions and 225 deletions.
Expand Up @@ -18,7 +18,6 @@

import static com.hp.hpl.jena.rdf.model.ModelFactory.createDefaultModel;
import static com.hp.hpl.jena.vocabulary.RDF.type;
import static java.util.Arrays.asList;
import static javax.ws.rs.core.HttpHeaders.CACHE_CONTROL;
import static javax.ws.rs.core.MediaType.APPLICATION_OCTET_STREAM_TYPE;
import static javax.ws.rs.core.Response.ok;
Expand All @@ -45,7 +44,6 @@
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;
import java.util.Date;
import java.util.Iterator;
import java.util.function.Predicate;
Expand Down
Expand Up @@ -142,8 +142,7 @@ public int compareTo(final PreferTag otherTag) {
public boolean equals(final Object obj) {
if ((obj != null) && (obj instanceof PreferTag)) {
return getTag().equals(((PreferTag) obj).getTag());
} else {
return false;
}
return false;
}
}
Expand Up @@ -20,12 +20,10 @@
import static com.hp.hpl.jena.graph.NodeFactory.createURI;
import static com.hp.hpl.jena.graph.Triple.create;
import static com.hp.hpl.jena.rdf.model.ResourceFactory.createTypedLiteral;
import static java.util.Arrays.stream;
import static java.util.Objects.nonNull;
import static java.util.Spliterator.IMMUTABLE;
import static java.util.Spliterators.spliteratorUnknownSize;
import static java.util.stream.Collectors.toMap;
import static java.util.stream.StreamSupport.stream;
import static org.fcrepo.kernel.FedoraJcrTypes.ROOT;
import static org.fcrepo.kernel.RdfLexicon.HAS_FIXITY_CHECK_COUNT;
import static org.fcrepo.kernel.RdfLexicon.HAS_FIXITY_ERROR_COUNT;
Expand Down
Expand Up @@ -15,30 +15,35 @@
*/
package org.fcrepo.kernel.impl.rdf.impl;

import com.google.common.base.Function;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterators;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.rdf.model.Resource;

import org.fcrepo.kernel.models.FedoraResource;
import org.fcrepo.kernel.exception.RepositoryRuntimeException;
import org.fcrepo.kernel.identifiers.IdentifierConverter;

import org.slf4j.Logger;

import javax.jcr.Node;
import javax.jcr.RepositoryException;
import javax.jcr.nodetype.NodeType;
import java.util.Iterator;
import java.util.Set;

import static com.google.common.base.Throwables.propagate;
import java.util.Arrays;
import java.util.function.Function;
import java.util.stream.Stream;

import static com.hp.hpl.jena.graph.NodeFactory.createURI;
import static com.hp.hpl.jena.graph.Triple.create;
import static com.hp.hpl.jena.vocabulary.RDF.type;
import static java.util.function.Function.identity;
import static org.fcrepo.kernel.impl.rdf.JcrRdfTools.getRDFNamespaceForJcrNamespace;
import static org.slf4j.LoggerFactory.getLogger;

/**
* Construct rdf:type triples for primary type, mixins, and their supertypes.
*
* @author cabeer
* @author ajs6f
* @since 10/1/14
*/
public class TypeRdfContext extends NodeRdfContext {
Expand All @@ -56,70 +61,34 @@ public TypeRdfContext(final FedoraResource resource,
throws RepositoryException {
super(resource, idTranslator);

//include rdf:type for primaryType, mixins, and their supertypes
concatRdfTypes();
final Node node = resource().getNode();
final NodeType primaryNodeType = node.getPrimaryNodeType();
final NodeType[] mixins = node.getMixinNodeTypes();
final Stream<NodeType> allTypes =
Stream.of(Stream.of(primaryNodeType), Stream.of(primaryNodeType.getSupertypes()), Stream.of(mixins),
Stream.of(mixins).flatMap(t -> Arrays.stream(t.getSupertypes()))).flatMap(identity());
concat(allTypes.map(nodetype2triple));
}

private void concatRdfTypes() throws RepositoryException {
final ImmutableList.Builder<NodeType> nodeTypesB = ImmutableList.<NodeType>builder();

final NodeType primaryNodeType = resource().getNode().getPrimaryNodeType();
nodeTypesB.add(primaryNodeType);

if (primaryNodeType != null && primaryNodeType.getSupertypes() != null) {
final Set<NodeType> primarySupertypes = ImmutableSet.<NodeType>builder()
.add(primaryNodeType.getSupertypes()).build();
nodeTypesB.addAll(primarySupertypes);
}

final NodeType[] mixinNodeTypesArr = resource().getNode().getMixinNodeTypes();

if (mixinNodeTypesArr != null) {
final Set<NodeType> mixinNodeTypes = ImmutableSet.<NodeType>builder().add(mixinNodeTypesArr).build();
nodeTypesB.addAll(mixinNodeTypes);

final ImmutableSet.Builder<NodeType> mixinSupertypes = ImmutableSet.<NodeType>builder();
for (final NodeType mixinNodeType : mixinNodeTypes) {
mixinSupertypes.addAll(ImmutableSet.<NodeType>builder().add(mixinNodeType.getSupertypes()).build());
}

nodeTypesB.addAll(mixinSupertypes.build());
private final Function<NodeType, Triple> nodetype2triple =
nodeType -> {
final String fullTypeName = nodeType.getName();
LOGGER.trace("Translating JCR mixin name: {}", fullTypeName);
final String prefix = fullTypeName.split(":")[0];
final String typeName = fullTypeName.split(":")[1];
final String namespace = getJcrUri(prefix);
LOGGER.trace("with JCR namespace: {}", namespace);
final com.hp.hpl.jena.graph.Node rdfType =
createURI(getRDFNamespaceForJcrNamespace(namespace) + typeName);
LOGGER.trace("into RDF resource: {}", rdfType);
return create(subject(), type.asNode(), rdfType);
};

private String getJcrUri(final String prefix) {
try {
return resource().getNode().getSession().getWorkspace().getNamespaceRegistry().getURI(prefix);
} catch (final RepositoryException e) {
throw new RepositoryRuntimeException(e);
}

final ImmutableList<NodeType> nodeTypes = nodeTypesB.build();
final Iterator<NodeType> nodeTypesIt = nodeTypes.iterator();

concat(Iterators.transform(nodeTypesIt, nodetype2triple()));
}

private Function<NodeType, Triple> nodetype2triple() {
return new Function<NodeType, Triple>() {

@Override
public Triple apply(final NodeType nodeType) {
try {
final String fullTypeName = nodeType.getName();
LOGGER.trace("Translating JCR mixin name: {}", fullTypeName);
final String prefix = fullTypeName.split(":")[0];
final String typeName = fullTypeName.split(":")[1];
final String namespace = getJcrUri(prefix);
LOGGER.trace("with JCR namespace: {}", namespace);
final com.hp.hpl.jena.graph.Node rdfType =
createURI(getRDFNamespaceForJcrNamespace(namespace)
+ typeName);
LOGGER.trace("into RDF resource: {}", rdfType);
return create(subject(), type.asNode(), rdfType);
} catch (final RepositoryException e) {
throw propagate(e);
}
}

};
}

private String getJcrUri(final String prefix) throws RepositoryException {
return resource().getNode().getSession().getWorkspace().getNamespaceRegistry()
.getURI(prefix);
}

}
Expand Up @@ -18,9 +18,6 @@
import static com.hp.hpl.jena.graph.NodeFactory.createLiteral;
import static com.hp.hpl.jena.graph.Triple.create;
import static com.hp.hpl.jena.rdf.model.ResourceFactory.createTypedLiteral;
import static java.util.Arrays.stream;
import static java.util.Spliterators.spliteratorUnknownSize;
import static java.util.stream.StreamSupport.stream;
import static org.fcrepo.kernel.RdfLexicon.CREATED_DATE;
import static org.fcrepo.kernel.RdfLexicon.HAS_VERSION;
import static org.fcrepo.kernel.RdfLexicon.HAS_VERSION_LABEL;
Expand All @@ -43,7 +40,6 @@
import org.fcrepo.kernel.models.FedoraResource;
import org.fcrepo.kernel.exception.RepositoryRuntimeException;
import org.fcrepo.kernel.identifiers.IdentifierConverter;
import org.fcrepo.kernel.impl.utils.Streams;
import org.fcrepo.kernel.utils.iterators.RdfStream;

import com.hp.hpl.jena.graph.Triple;
Expand All @@ -59,8 +55,6 @@
*/
public class VersionsRdfContext extends NodeRdfContext {

private final VersionHistory versionHistory;

private final Logger LOGGER = getLogger(VersionsRdfContext.class);

/**
Expand All @@ -74,40 +68,37 @@ public VersionsRdfContext(final FedoraResource resource,
final IdentifierConverter<Resource, FedoraResource> idTranslator)
throws RepositoryException {
super(resource, idTranslator);
this.versionHistory = resource.getVersionHistory();
final Iterator<Version> allVersions = versionHistory.getAllVersions();
final Iterator<Version> allVersions = resource().getVersionHistory().getAllVersions();
final Stream<Version> versionsStream = fromIterator(allVersions);
concat(versionsStream.flatMap(version2triples));
}

private final Function<Version, Stream<Triple>> version2triples =
new Function<Version, Stream<Triple>>() {

@Override
public Stream<Triple> apply(final Version version) {

try {
/* Discard jcr:rootVersion */
if (version.getName().equals(versionHistory.getRootVersion().getName())) {
LOGGER.trace("Skipped root version from triples");
return new RdfStream();
}

final Node frozenNode = version.getFrozenNode();
final com.hp.hpl.jena.graph.Node versionSubject =
nodeToResource(translator()).convert(frozenNode).asNode();

final RdfStream results =
new RdfStream(create(subject(), HAS_VERSION.asNode(), versionSubject),
create(versionSubject, CREATED_DATE.asNode(),
createTypedLiteral(version.getCreated()).asNode()));
results.concat(Arrays.stream(versionHistory.getVersionLabels(version)).map(
label -> create(versionSubject, HAS_VERSION_LABEL.asNode(), createLiteral(label))));
return results;

} catch (final RepositoryException e) {
throw new RepositoryRuntimeException(e);
}
version -> {

try {
final VersionHistory versionHistory = resource().getVersionHistory();
/* Discard jcr:rootVersion */
if (version.getName().equals(versionHistory.getRootVersion().getName())) {
LOGGER.trace("Skipped root version from triples");
return new RdfStream();
}
};

final Node frozenNode = version.getFrozenNode();
final com.hp.hpl.jena.graph.Node versionSubject =
nodeToResource(translator()).convert(frozenNode).asNode();

final RdfStream results =
new RdfStream(create(subject(), HAS_VERSION.asNode(), versionSubject),
create(versionSubject, CREATED_DATE.asNode(),
createTypedLiteral(version.getCreated()).asNode()));
results.concat(Arrays.stream(versionHistory.getVersionLabels(version)).map(
label -> create(versionSubject, HAS_VERSION_LABEL.asNode(), createLiteral(label))));
return results;

} catch (final RepositoryException e) {
throw new RepositoryRuntimeException(e);
}

};
}
Expand Up @@ -67,20 +67,13 @@ public ItemDefinitionToTriples(final Node context) {

@Override
public RdfStream apply(final T input) {

try {
final Node propertyDefinitionNode = getResource(input).asNode();

LOGGER.trace("Adding triples for nodeType: {} with child nodes: {}", context.getURI(),
propertyDefinitionNode.getURI());

return new RdfStream(
create(propertyDefinitionNode, type.asNode(), Property.asNode()),
create(propertyDefinitionNode, domain.asNode(), context),
create(propertyDefinitionNode, label.asNode(), createLiteral(input.getName())));
} catch (final RepositoryException e) {
throw new RepositoryRuntimeException(e);
}
}

/**
Expand Down Expand Up @@ -124,9 +117,8 @@ public static Resource getResource(final NodeType nodeType) {
*
* @param itemDefinition
* @return a resource for the given ItemDefinition
* @throws javax.jcr.RepositoryException
*/
public static Resource getResource(final ItemDefinition itemDefinition) throws RepositoryException {
public static Resource getResource(final ItemDefinition itemDefinition) {
return getResource((Namespaced) itemDefinition);
}
}
Expand Up @@ -19,10 +19,8 @@
import org.fcrepo.kernel.utils.iterators.RdfStream;
import org.slf4j.Logger;

import javax.jcr.RepositoryException;
import javax.jcr.nodetype.NodeDefinition;
import javax.jcr.nodetype.NodeType;
import static com.google.common.base.Throwables.propagate;
import static com.hp.hpl.jena.graph.Triple.create;
import static com.hp.hpl.jena.vocabulary.RDFS.range;
import static org.slf4j.LoggerFactory.getLogger;
Expand All @@ -47,32 +45,26 @@ public NodeDefinitionToTriples(final Node domain) {
@Override
public RdfStream apply(final NodeDefinition input) {

try {
final Node propertyDefinitionNode = getResource(input).asNode();

final Node propertyDefinitionNode = getResource(input).asNode();
final NodeType[] requiredPrimaryTypes = input.getRequiredPrimaryTypes();

final NodeType[] requiredPrimaryTypes = input.getRequiredPrimaryTypes();

if (requiredPrimaryTypes.length > 1) {
// TODO we can express this as an OWL unionOf. But should we?
LOGGER.trace(
"Skipping RDFS:range for {} with multiple primary types",
propertyDefinitionNode.getName());
} else if (requiredPrimaryTypes.length == 1) {
LOGGER.trace("Adding RDFS:range for {} with primary types {}",
input.getName(),
requiredPrimaryTypes[0].getName());
return new RdfStream(create(propertyDefinitionNode, range
.asNode(), getResource(requiredPrimaryTypes[0])
.asNode())).concat(super.apply(input));
} else {
LOGGER.trace("Skipping RDFS:range for {} with no required primary types");
}
return super.apply(input);

} catch (final RepositoryException e) {
throw propagate(e);
if (requiredPrimaryTypes.length > 1) {
// TODO we can express this as an OWL unionOf. But should we?
LOGGER.trace(
"Skipping RDFS:range for {} with multiple primary types",
propertyDefinitionNode.getName());
} else if (requiredPrimaryTypes.length == 1) {
LOGGER.trace("Adding RDFS:range for {} with primary types {}",
input.getName(),
requiredPrimaryTypes[0].getName());
return new RdfStream(create(propertyDefinitionNode, range
.asNode(), getResource(requiredPrimaryTypes[0])
.asNode())).concat(super.apply(input));
} else {
LOGGER.trace("Skipping RDFS:range for {} with no required primary types");
}
return super.apply(input);

}
}

0 comments on commit 6e0b21e

Please sign in to comment.