Navigation Menu

Skip to content

Commit

Permalink
Fix for FCREPO-1640
Browse files Browse the repository at this point in the history
  • Loading branch information
ajs6f committed Jul 17, 2015
1 parent aadf58d commit 8b27b56
Show file tree
Hide file tree
Showing 11 changed files with 109 additions and 198 deletions.
Expand Up @@ -40,7 +40,6 @@
import static org.fcrepo.kernel.RdfLexicon.LDP_NAMESPACE;
import static org.fcrepo.kernel.RdfLexicon.isManagedNamespace;
import static org.fcrepo.kernel.impl.rdf.ManagedRdf.isManagedTriple;

import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
Expand Down Expand Up @@ -99,7 +98,6 @@
import org.apache.jena.riot.Lang;
import org.glassfish.jersey.media.multipart.ContentDisposition;
import org.jvnet.hk2.annotations.Optional;

import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterators;
import com.hp.hpl.jena.graph.Triple;
Expand Down Expand Up @@ -177,19 +175,20 @@ protected Response getContent(final String rangeValue,

} else {
rdfStream.concat(getResourceTriples());

if (prefer != null) {
prefer.getReturn().addResponseHeaders(servletResponse);
}

}
servletResponse.addHeader("Vary", "Accept, Range, Accept-Encoding, Accept-Language");

return Response.ok(rdfStream).build();
return ok(rdfStream).build();
}

protected RdfStream getResourceTriples() {

// use the thing described, not the description, for the subject of descriptive triples
if (resource() instanceof NonRdfSourceDescription) {
resource = ((NonRdfSourceDescription) resource()).getDescribedResource();
}
final PreferTag returnPreference;

if (prefer != null && prefer.hasReturn()) {
Expand All @@ -204,12 +203,8 @@ protected RdfStream getResourceTriples() {

final RdfStream rdfStream = new RdfStream();

final Predicate<Triple> tripleFilter;
if (ldpPreferences.prefersServerManaged()) {
tripleFilter = x -> true;
} else {
tripleFilter = IS_MANAGED_TYPE.or(isManagedTriple::apply).negate();
}
final Predicate<Triple> tripleFilter = ldpPreferences.prefersServerManaged() ? x -> true :
IS_MANAGED_TYPE.or(isManagedTriple::apply).negate();

if (ldpPreferences.prefersServerManaged()) {
rdfStream.concat(getTriples(LdpRdfContext.class));
Expand Down Expand Up @@ -240,17 +235,6 @@ protected RdfStream getResourceTriples() {
rdfStream.concat(getTriples(LdpIsMemberOfRdfContext.class));
}

// Include binary properties if this is a binary description
if (resource() instanceof NonRdfSourceDescription) {
final FedoraResource described = ((NonRdfSourceDescription) resource()).getDescribedResource();
rdfStream.concat(filter(described.getTriples(translator(), ImmutableList.of(TypeRdfContext.class,
PropertiesRdfContext.class,
ContentRdfContext.class)), tripleFilter::test));
if (ldpPreferences.prefersServerManaged()) {
rdfStream.concat(getTriples(described,LdpRdfContext.class));
}
}

// Embed all hash and blank nodes
rdfStream.concat(filter(getTriples(HashRdfContext.class), tripleFilter::test));
rdfStream.concat(filter(getTriples(BlankNodeRdfContext.class), tripleFilter::test));
Expand Down Expand Up @@ -393,7 +377,6 @@ protected FedoraResource resource() {
if (resource == null) {
resource = getResourceFromPath(externalPath());
}

return resource;
}

Expand Down
Expand Up @@ -16,7 +16,6 @@
package org.fcrepo.http.api;

import static com.google.common.base.Predicates.containsPattern;
import static com.google.common.collect.Iterables.any;
import static com.google.common.collect.Lists.newArrayList;
import static com.google.common.collect.Lists.transform;

Expand Down Expand Up @@ -98,10 +97,8 @@
import org.slf4j.Logger;
import org.springframework.mock.web.MockHttpServletResponse;

import com.google.common.base.Function;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.RDFNode;
Expand Down Expand Up @@ -369,13 +366,7 @@ public void testGet() throws Exception {

final RdfStream entity = (RdfStream) actual.getEntity();
final Model model = entity.asModel();
final List<String> rdfNodes = Lists.transform(Lists.newArrayList(model.listObjects()),
new Function<RDFNode, String>() {
@Override
public String apply(final RDFNode input) {
return input.toString();
}
});
final List<String> rdfNodes = transform(newArrayList(model.listObjects()), RDFNode::toString);
assertTrue("Expected RDF contexts missing", rdfNodes.containsAll(ImmutableSet.of(
"class org.fcrepo.kernel.impl.rdf.impl.LdpContainerRdfContext",
"class org.fcrepo.kernel.impl.rdf.impl.LdpIsMemberOfRdfContext",
Expand All @@ -399,13 +390,7 @@ public void testGetWithObject() throws Exception {

final RdfStream entity = (RdfStream) actual.getEntity();
final Model model = entity.asModel();
final List<String> rdfNodes = Lists.transform(Lists.newArrayList(model.listObjects()),
new Function<RDFNode, String>() {
@Override
public String apply(final RDFNode input) {
return input.toString();
}
});
final List<String> rdfNodes = transform(newArrayList(model.listObjects()), RDFNode::toString);
assertTrue("Expected RDF contexts missing", rdfNodes.containsAll(ImmutableSet.of(
"class org.fcrepo.kernel.impl.rdf.impl.LdpContainerRdfContext",
"class org.fcrepo.kernel.impl.rdf.impl.LdpIsMemberOfRdfContext",
Expand Down Expand Up @@ -460,13 +445,7 @@ public void testGetWithObjectPreferMinimal() throws Exception {

final RdfStream entity = (RdfStream) actual.getEntity();
final Model model = entity.asModel();
final List<String> rdfNodes = Lists.transform(Lists.newArrayList(model.listObjects()),
new Function<RDFNode, String>() {
@Override
public String apply(final RDFNode input) {
return input.toString();
}
});
final List<String> rdfNodes = transform(newArrayList(model.listObjects()), RDFNode::toString);
assertTrue("Expected RDF contexts missing", rdfNodes.containsAll(ImmutableSet.of(
"class org.fcrepo.kernel.impl.rdf.impl.TypeRdfContext",
"class org.fcrepo.kernel.impl.rdf.impl.PropertiesRdfContext"
Expand All @@ -491,13 +470,7 @@ public void testGetWithObjectOmitContainment() throws Exception {

final RdfStream entity = (RdfStream) actual.getEntity();
final Model model = entity.asModel();
final List<String> rdfNodes = Lists.transform(Lists.newArrayList(model.listObjects()),
new Function<RDFNode, String>() {
@Override
public String apply(final RDFNode input) {
return input.toString();
}
});
final List<String> rdfNodes = transform(newArrayList(model.listObjects()), RDFNode::toString);
assertTrue("Should include membership contexts",
rdfNodes.contains("class org.fcrepo.kernel.impl.rdf.impl.LdpContainerRdfContext"));

Expand All @@ -516,13 +489,7 @@ public void testGetWithObjectOmitMembership() throws Exception {

final RdfStream entity = (RdfStream) actual.getEntity();
final Model model = entity.asModel();
final List<String> rdfNodes = Lists.transform(Lists.newArrayList(model.listObjects()),
new Function<RDFNode, String>() {
@Override
public String apply(final RDFNode input) {
return input.toString();
}
});
final List<String> rdfNodes = transform(newArrayList(model.listObjects()), RDFNode::toString);
assertFalse("Should not include membership contexts",
rdfNodes.contains("class org.fcrepo.kernel.impl.rdf.impl.LdpContainerRdfContext"));
assertFalse("Should not include membership contexts",
Expand All @@ -542,17 +509,11 @@ public void testGetWithObjectIncludeReferences() throws ParseException, IOExcept

final RdfStream entity = (RdfStream) actual.getEntity();
final Model model = entity.asModel();
final List<String> rdfNodes = transform(newArrayList(model.listObjects()),
new Function<RDFNode, String>() {

@Override
public String apply(final RDFNode input) {
return input.toString();
}
});
final List<String> rdfNodes = transform(newArrayList(model.listObjects()), RDFNode::toString);
log.debug("Received RDF nodes: {}", rdfNodes);
final String referencesContextClassName = ReferencesRdfContext.class.getName();
assertTrue("Should include references contexts", any(rdfNodes, containsPattern(referencesContextClassName)));
assertTrue("Should include references contexts",
rdfNodes.stream().anyMatch(containsPattern(referencesContextClassName)::apply));
}

@Test
Expand Down Expand Up @@ -590,7 +551,7 @@ public void testGetWithExternalMessageBinary() throws Exception {
}

@Test
public void testGetWithBinaryDescription() throws Exception {
public void testGetWithBinaryDescription() throws RepositoryException, IOException {
final NonRdfSourceDescription mockResource
= (NonRdfSourceDescription)setResource(NonRdfSourceDescription.class);
when(mockResource.getDescribedResource()).thenReturn(mockBinary);
Expand All @@ -607,15 +568,9 @@ public void testGetWithBinaryDescription() throws Exception {
mockResponse.getHeaders("Link")
.contains("<" + idTranslator.toDomain(binaryPath) + ">; rel=\"describes\""));

final RdfStream entity = (RdfStream) actual.getEntity();
final Model model = entity.asModel();
final List<String> rdfNodes = Lists.transform(Lists.newArrayList(model.listObjects()),
new Function<RDFNode, String>() {
@Override
public String apply(final RDFNode input) {
return input.toString();
}
});
final Model model = ((RdfStream) actual.getEntity()).asModel();
final List<String> rdfNodes = transform(newArrayList(model.listObjects()), RDFNode::toString);
log.info("Found RDF objects\n{}", rdfNodes);
assertTrue("Expected RDF contexts missing", rdfNodes.containsAll(ImmutableSet.of(
"class org.fcrepo.kernel.impl.rdf.impl.LdpContainerRdfContext",
"class org.fcrepo.kernel.impl.rdf.impl.LdpIsMemberOfRdfContext",
Expand All @@ -624,8 +579,7 @@ public String apply(final RDFNode input) {
"class org.fcrepo.kernel.impl.rdf.impl.PropertiesRdfContext",
"class org.fcrepo.kernel.impl.rdf.impl.ChildrenRdfContext",
"class org.fcrepo.kernel.impl.rdf.impl.AclRdfContext",
"class org.fcrepo.kernel.impl.rdf.impl.ParentRdfContext",
"child:properties"
"class org.fcrepo.kernel.impl.rdf.impl.ParentRdfContext"
)));

}
Expand Down
Expand Up @@ -91,6 +91,7 @@
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringReader;
import java.io.StringWriter;
import java.net.URI;
import java.text.ParseException;
Expand Down Expand Up @@ -386,13 +387,15 @@ public void testGetNonRDFSourceDescription() throws IOException {
final String id = getRandomUniqueId();
createDatastream(id, "x", "some content");
try (final CloseableHttpResponse response = execute(getDSDescMethod(id, "x"));
final CloseableGraphStore graphStore = getGraphStore(response)) {
final String dsSubject = serverAddress + id + "/x";
final Node fedoraNonRdfSourceDescription = createURI(REPOSITORY_NAMESPACE + "NonRdfSourceDescription");
assertTrue("Description should be a fedora:NonRdfSourceDescription", graphStore.contains(ANY,
createURI(dsSubject + "/" + FCR_METADATA), rdfType, fedoraNonRdfSourceDescription));
assertTrue("Binary should be a ldp:NonRDFSource", graphStore.contains(ANY,
createURI(dsSubject), rdfType, NON_RDF_SOURCE.asNode()));
final CloseableGraphStore graph = getGraphStore(response)) {
final Node correctDSSubject = createURI(serverAddress + id + "/x");
assertTrue("Binary should be a ldp:NonRDFSource", graph.contains(ANY,
correctDSSubject, rdfType, NON_RDF_SOURCE.asNode()));
// every triple in the response should have a subject of the actual resource described
logger.info("Found graph:\n{}", graph);
graph.find().forEachRemaining(quad -> {
assertEquals("Found a triple with incorrect subject!", correctDSSubject, quad.getSubject());
});
}
}

Expand Down Expand Up @@ -516,31 +519,26 @@ public void testPatchBinaryDescriptionWithBinaryProperties() throws IOException
}

@Test
public void testPatchBinaryNameAndType() throws Exception {
public void testPatchBinaryNameAndType() throws IOException {
final String pid = getRandomUniqueId();

createDatastream(pid, "x", "some content");

final String location = serverAddress + pid + "/x/fcr:metadata";
final HttpPatch patch = new HttpPatch(location);
patch.addHeader("Content-Type", "application/sparql-update");
final BasicHttpEntity e = new BasicHttpEntity();
final String sparql = "INSERT DATA { <" + serverAddress + pid + "/x> <" + HAS_MIME_TYPE + "> \"text/plain\""
+ " . <" + serverAddress + pid + "/x> <" + HAS_ORIGINAL_NAME + "> \"x.txt\" }";
e.setContent(new ByteArrayInputStream(sparql.getBytes()));
patch.setEntity(e);
final HttpResponse response = client.execute(patch);
assertEquals(NO_CONTENT.getStatusCode(), response.getStatusLine()
.getStatusCode());

final GraphStore graphStore = getGraphStore(new HttpGet(location));
assertTrue(graphStore.contains(ANY, createURI(serverAddress + pid + "/x"),
HAS_MIME_TYPE.asNode(), createLiteral("text/plain")));
assertTrue(graphStore.contains(ANY, createURI(serverAddress + pid + "/x"),
HAS_ORIGINAL_NAME.asNode(), createLiteral("x.txt")));
assertFalse("Should not contain old mime type property",
graphStore.contains(ANY, createURI(serverAddress + pid + "/x"),
createURI(REPOSITORY_NAMESPACE + "mimeType"), ANY));
patch.setEntity(new StringEntity("INSERT DATA { <" + serverAddress + pid + "/x> <" + HAS_MIME_TYPE +
"> \"text/plain\"" + " . <" + serverAddress + pid + "/x> <" + HAS_ORIGINAL_NAME + "> \"x.txt\" }"));
try (final CloseableHttpResponse response = client.execute(patch)) {
assertEquals(NO_CONTENT.getStatusCode(), getStatus(response));
try (final CloseableGraphStore graphStore = getGraphStore(new HttpGet(location))) {
final Node subject = createURI(serverAddress + pid + "/x");
assertTrue(graphStore.contains(ANY, subject, HAS_MIME_TYPE.asNode(), createLiteral("text/plain")));
assertTrue(graphStore.contains(ANY, subject, HAS_ORIGINAL_NAME.asNode(), createLiteral("x.txt")));
assertFalse("Should not contain old mime type property", graphStore.contains(ANY,
subject, createURI(REPOSITORY_NAMESPACE + "mimeType"), ANY));
}
}
}

@Test
Expand Down Expand Up @@ -1208,7 +1206,7 @@ public void testUpdateBinaryWithoutContentType() throws IOException {
}

@Test
public void testRoundTripReplaceGraphForDatastream() throws IOException {
public void testRoundTripReplaceGraphForDatastreamDescription() throws IOException {
final String id = getRandomUniqueId();
final String subjectURI = serverAddress + id + "/ds1";
createDatastream(id, "ds1", "some-content");
Expand All @@ -1217,14 +1215,17 @@ public void testRoundTripReplaceGraphForDatastream() throws IOException {
getObjMethod.addHeader("Accept", "text/turtle");
final Model model = createDefaultModel();
try (final CloseableHttpResponse getResponse = execute(getObjMethod)) {
model.read(getResponse.getEntity().getContent(), subjectURI, "TURTLE");
final String graph = EntityUtils.toString(getResponse.getEntity());
logger.trace("Got serialized object graph for testRoundTripReplaceGraphForDatastream():\n {}", graph);
try (final StringReader r = new StringReader(graph)) {
model.read(r, subjectURI, "TURTLE");
}
}

final HttpPut replaceMethod = new HttpPut(subjectURI + "/" + FCR_METADATA);
try (final StringWriter w = new StringWriter()) {
model.write(w, "N-TRIPLE");
replaceMethod.setEntity(new ByteArrayEntity(w.toString().getBytes()));
logger.trace("Retrieved object graph for testRoundTripReplaceGraphForDatastream():\n {}", w);
replaceMethod.setEntity(new StringEntity(w.toString()));
logger.trace("Transmitting object graph for testRoundTripReplaceGraphForDatastream():\n {}", w);
}
replaceMethod.addHeader("Content-Type", "application/n-triples");
assertEquals(NO_CONTENT.getStatusCode(), getStatus(replaceMethod));
Expand Down
Expand Up @@ -464,7 +464,7 @@ public void testDatastreamAutoMixinAndRevert() throws IOException {
try (final CloseableGraphStore updatedDSProperties =
getContent(serverAddress + pid + "/" + dsid + "/fcr:metadata")) {
assertTrue("Node must have versionable mixin.", updatedDSProperties.contains(ANY,
createURI(serverAddress + pid + "/" + dsid + "/fcr:metadata"), type.asNode(),
createURI(serverAddress + pid + "/" + dsid), type.asNode(),
createURI(MIX_NAMESPACE + "versionable")));
}
// update the content
Expand Down
Expand Up @@ -27,7 +27,6 @@

import org.fcrepo.kernel.identifiers.IdentifierConverter;
import org.fcrepo.kernel.impl.rdf.impl.mappings.PropertyToTriple;
import org.fcrepo.kernel.models.FedoraBinary;
import org.fcrepo.kernel.models.FedoraResource;

import org.slf4j.Logger;
Expand Down Expand Up @@ -71,13 +70,7 @@ private Iterator<Triple> triplesFromProperties(final FedoraResource n)
throws RepositoryException {
LOGGER.trace("Creating triples for node: {}", n);

final Iterator<Property> allProperties;
if (n instanceof FedoraBinary) {
final FedoraResource description = ((FedoraBinary)n).getDescription();
allProperties = Iterators.concat(n.getNode().getProperties(), description.getNode().getProperties());
} else {
allProperties = n.getNode().getProperties();
}
final Iterator<Property> allProperties = n.getNode().getProperties();

final UnmodifiableIterator<Property> properties =
Iterators.filter(allProperties, isInternalProperty.negate().and(IS_NOT_UUID)::test);
Expand Down

0 comments on commit 8b27b56

Please sign in to comment.