001/* 002 * Copyright 2019 DuraSpace, Inc. 003 * 004 * Licensed under the Apache License, Version 2.0 (the "License"); 005 * you may not use this file except in compliance with the License. 006 * You may obtain a copy of the License at 007 * 008 * http://www.apache.org/licenses/LICENSE-2.0 009 * 010 * Unless required by applicable law or agreed to in writing, software 011 * distributed under the License is distributed on an "AS IS" BASIS, 012 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 013 * See the License for the specific language governing permissions and 014 * limitations under the License. 015 */ 016 017package org.fcrepo.migration.handlers.ocfl; 018 019import at.favre.lib.bytes.Bytes; 020import com.google.common.base.Preconditions; 021import com.google.common.base.Strings; 022import com.hp.hpl.jena.datatypes.xsd.XSDDatatype; 023import com.hp.hpl.jena.rdf.model.Model; 024import com.hp.hpl.jena.rdf.model.ModelFactory; 025import org.apache.commons.codec.digest.DigestUtils; 026import org.apache.commons.io.IOUtils; 027import org.apache.commons.lang3.StringUtils; 028import org.apache.tika.config.TikaConfig; 029import org.apache.tika.detect.Detector; 030import org.apache.tika.io.TikaInputStream; 031import org.apache.tika.metadata.Metadata; 032import org.apache.tika.mime.MimeType; 033import org.apache.tika.mime.MimeTypeException; 034import org.apache.tika.mime.MimeTypes; 035import org.fcrepo.migration.DatastreamVersion; 036import org.fcrepo.migration.FedoraObjectVersionHandler; 037import org.fcrepo.migration.MigrationType; 038import org.fcrepo.migration.ObjectVersionReference; 039import org.fcrepo.migration.ObjectInfo; 040import org.fcrepo.migration.ContentDigest; 041import org.fcrepo.storage.ocfl.InteractionModel; 042import org.fcrepo.storage.ocfl.OcflObjectSession; 043import org.fcrepo.storage.ocfl.OcflObjectSessionFactory; 044import org.fcrepo.storage.ocfl.ResourceHeaders; 045import org.fcrepo.storage.ocfl.ResourceHeadersVersion; 046import org.slf4j.Logger; 047 048import java.io.ByteArrayInputStream; 049import java.io.ByteArrayOutputStream; 050import java.io.IOException; 051import java.io.InputStream; 052import java.io.UncheckedIOException; 053import java.net.URI; 054import java.nio.file.Files; 055import java.security.DigestInputStream; 056import java.security.MessageDigest; 057import java.security.NoSuchAlgorithmException; 058import java.time.Instant; 059import java.time.OffsetDateTime; 060import java.util.ArrayList; 061import java.util.HashMap; 062import java.util.Map; 063import java.util.concurrent.atomic.AtomicBoolean; 064 065import static org.slf4j.LoggerFactory.getLogger; 066 067/** 068 * Writes a Fedora object as a single ArchiveGroup. 069 * <p> 070 * All datastreams and object metadata from a fcrepo3 object are persisted to a 071 * single OCFL object (ArchiveGroup in fcrepo6 parlance). 072 * </p> 073 * <p> 074 * The contents of each datastream are written verbatim. No attempt is made to 075 * re-write the RELS-EXT to replace subjects and objects with their LDP 076 * counterparts. 077 * </p> 078 * <p> 079 * Note: fedora-specific OCFL serialization features (such as redirects, 080 * container metadata, etc) is not fully defined yet, so are not included here 081 * 082 * @author apb@jhu.edu 083 */ 084public class ArchiveGroupHandler implements FedoraObjectVersionHandler { 085 086 private static final Logger LOGGER = getLogger(ArchiveGroupHandler.class); 087 088 private static final String FCREPO_ROOT = "info:fedora/"; 089 090 private static final Map<String, String> externalHandlingMap = Map.of( 091 "E", "proxy", 092 "R", "redirect" 093 ); 094 095 private static final String INLINE_XML = "X"; 096 097 private static final String DS_INACTIVE = "I"; 098 private static final String DS_DELETED = "D"; 099 100 private static final String OBJ_STATE_PROP = "info:fedora/fedora-system:def/model#state"; 101 private static final String OBJ_INACTIVE = "Inactive"; 102 private static final String OBJ_DELETED = "Deleted"; 103 104 private final OcflObjectSessionFactory sessionFactory; 105 private final boolean addDatastreamExtensions; 106 private final boolean deleteInactive; 107 private final boolean foxmlFile; 108 private final MigrationType migrationType; 109 private final String user; 110 private final String idPrefix; 111 private final Detector mimeDetector; 112 private final boolean disableChecksumValidation; 113 114 /** 115 * Create an ArchiveGroupHandler, 116 * 117 * @param sessionFactory 118 * OCFL session factory 119 * @param migrationType 120 * the type of migration to do 121 * @param addDatastreamExtensions 122 * true if datastreams should be written with file extensions 123 * @param deleteInactive 124 * true if inactive objects and datastreams should be migrated as deleted 125 * @param foxmlFile 126 * true if foxml file should be migrated as a whole file, instead of creating property files 127 * @param user 128 * the username to associated with the migrated resources 129 * @param idPrefix 130 * the prefix to add to the Fedora 3 pid (default "info:fedora/", like Fedora 3) 131 * @param disableChecksumValidation 132 * if true, migrator should not try to verify that the datastream content matches Fedora 3 checksums 133 */ 134 public ArchiveGroupHandler(final OcflObjectSessionFactory sessionFactory, 135 final MigrationType migrationType, 136 final boolean addDatastreamExtensions, 137 final boolean deleteInactive, 138 final boolean foxmlFile, 139 final String user, 140 final String idPrefix, 141 final boolean disableChecksumValidation) { 142 this.sessionFactory = Preconditions.checkNotNull(sessionFactory, "sessionFactory cannot be null"); 143 this.migrationType = Preconditions.checkNotNull(migrationType, "migrationType cannot be null"); 144 this.addDatastreamExtensions = addDatastreamExtensions; 145 this.deleteInactive = deleteInactive; 146 this.foxmlFile = foxmlFile; 147 this.user = Preconditions.checkNotNull(Strings.emptyToNull(user), "user cannot be blank"); 148 this.idPrefix = idPrefix; 149 this.disableChecksumValidation = disableChecksumValidation; 150 try { 151 this.mimeDetector = new TikaConfig().getDetector(); 152 } catch (Exception e) { 153 throw new RuntimeException(e); 154 } 155 } 156 157 @Override 158 public void processObjectVersions(final Iterable<ObjectVersionReference> versions, final ObjectInfo objectInfo) { 159 // We use the PID to identify the OCFL object 160 final String objectId = objectInfo.getPid(); 161 final String f6ObjectId = idPrefix + objectId; 162 163 // We need to manually keep track of the datastream creation dates 164 final Map<String, String> dsCreateDates = new HashMap<>(); 165 166 String objectState = null; 167 final Map<String, String> datastreamStates = new HashMap<>(); 168 169 for (var ov : versions) { 170 final OcflObjectSession session = sessionFactory.newSession(f6ObjectId); 171 172 if (ov.isFirstVersion()) { 173 if (session.containsResource(f6ObjectId)) { 174 throw new RuntimeException(f6ObjectId + " already exists!"); 175 } 176 objectState = getObjectState(ov, objectId); 177 // Object properties are written only once (as fcrepo3 object properties were unversioned). 178 if (foxmlFile) { 179 try (InputStream is = Files.newInputStream(objectInfo.getFoxmlPath())) { 180 final var foxmlDsId = f6ObjectId + "/FOXML"; 181 final var headers = createHeaders(foxmlDsId, f6ObjectId, 182 InteractionModel.NON_RDF).build(); 183 session.writeResource(headers, is); 184 //mark FOXML as a deleted datastream so it gets deleted in handleDeletedResources() 185 datastreamStates.put(foxmlDsId, DS_DELETED); 186 } catch (IOException io) { 187 LOGGER.error("error writing " + objectId + " FOXML file to " + f6ObjectId + ": " + io); 188 throw new UncheckedIOException(io); 189 } 190 } else { 191 writeObjectFiles(objectId, f6ObjectId, ov, session); 192 } 193 } 194 195 // Write datastreams and their metadata 196 for (var dv : ov.listChangedDatastreams()) { 197 final var mimeType = resolveMimeType(dv); 198 final String dsId = dv.getDatastreamInfo().getDatastreamId(); 199 final String f6DsId = resolveF6DatastreamId(dsId, f6ObjectId, mimeType); 200 final var datastreamFilename = lastPartFromId(f6DsId); 201 202 if (dv.isFirstVersionIn(ov.getObject())) { 203 dsCreateDates.put(dsId, dv.getCreated()); 204 datastreamStates.put(f6DsId, dv.getDatastreamInfo().getState()); 205 } 206 final var createDate = dsCreateDates.get(dsId); 207 208 final var datastreamHeaders = createDatastreamHeaders(dv, f6DsId, f6ObjectId, 209 datastreamFilename, mimeType, createDate); 210 211 if (externalHandlingMap.containsKey(dv.getDatastreamInfo().getControlGroup())) { 212 InputStream content = null; 213 // for plain OCFL migrations, write a file containing the external/redirect URL 214 if (migrationType == MigrationType.PLAIN_OCFL) { 215 content = IOUtils.toInputStream(dv.getExternalOrRedirectURL()); 216 } 217 session.writeResource(datastreamHeaders, content); 218 } else { 219 try (var contentStream = dv.getContent()) { 220 writeDatastreamContent(dv, datastreamHeaders, contentStream, session); 221 } catch (final IOException e) { 222 throw new UncheckedIOException(e); 223 } 224 } 225 226 if (!foxmlFile) { 227 writeDescriptionFiles(f6DsId, datastreamFilename, createDate, datastreamHeaders, dv, session); 228 } 229 } 230 231 LOGGER.debug("Committing object <{}>", f6ObjectId); 232 233 session.versionCreationTimestamp(OffsetDateTime.parse(ov.getVersionDate())); 234 session.commit(); 235 } 236 237 handleDeletedResources(f6ObjectId, objectState, datastreamStates); 238 } 239 240 private boolean fedora3DigestValid(final ContentDigest f3Digest) { 241 return f3Digest != null && StringUtils.isNotBlank(f3Digest.getType()) && 242 StringUtils.isNotBlank(f3Digest.getDigest()); 243 } 244 245 private void writeDatastreamContent(final DatastreamVersion dv, 246 final ResourceHeaders datastreamHeaders, 247 final InputStream contentStream, 248 final OcflObjectSession session) throws IOException { 249 if (disableChecksumValidation) { 250 session.writeResource(datastreamHeaders, contentStream); 251 return; 252 } 253 final var f3Digest = dv.getContentDigest(); 254 final var ocflObjectId = session.ocflObjectId(); 255 final var datastreamId = dv.getDatastreamInfo().getDatastreamId(); 256 final var datastreamControlGroup = dv.getDatastreamInfo().getControlGroup(); 257 if (fedora3DigestValid(f3Digest)) { 258 try (var digestStream = new DigestInputStream(contentStream, 259 MessageDigest.getInstance(f3Digest.getType()))) { 260 session.writeResource(datastreamHeaders, digestStream); 261 final var expectedDigest = f3Digest.getDigest(); 262 final var actualDigest = Bytes.wrap(digestStream.getMessageDigest().digest()).encodeHex(); 263 if (!actualDigest.equalsIgnoreCase(expectedDigest)) { 264 final var msg = String.format("%s/%s: digest %s doesn't match expected digest %s", 265 ocflObjectId, datastreamId, actualDigest, expectedDigest); 266 throw new RuntimeException(msg); 267 } 268 } catch (final NoSuchAlgorithmException e) { 269 final var msg = String.format("%s/%s: no digest algorithm %s. Writing resource & continuing.", 270 ocflObjectId, datastreamId, f3Digest.getType()); 271 LOGGER.warn(msg); 272 session.writeResource(datastreamHeaders, contentStream); 273 } 274 } else { 275 if (datastreamControlGroup.equalsIgnoreCase("M")) { 276 final var msg = String.format("%s/%s: missing/invalid digest. Writing resource & continuing.", 277 ocflObjectId, datastreamId); 278 LOGGER.warn(msg); 279 } 280 session.writeResource(datastreamHeaders, contentStream); 281 } 282 } 283 284 private void handleDeletedResources(final String f6ObjectId, 285 final String objectState, 286 final Map<String, String> datastreamStates) { 287 final OcflObjectSession session = sessionFactory.newSession(f6ObjectId); 288 289 try { 290 final var now = OffsetDateTime.now(); 291 final var hasDeletes = new AtomicBoolean(false); 292 293 if (OBJ_DELETED.equals(objectState) || (deleteInactive && OBJ_INACTIVE.equals(objectState))) { 294 hasDeletes.set(true); 295 296 datastreamStates.keySet().forEach(f6DsId -> { 297 deleteDatastream(f6DsId, now.toInstant(), session); 298 }); 299 300 if (migrationType == MigrationType.PLAIN_OCFL) { 301 deleteOcflMigratedResource(f6ObjectId, InteractionModel.BASIC_CONTAINER, session); 302 } else { 303 deleteF6MigratedResource(f6ObjectId, now.toInstant(), session); 304 } 305 } else { 306 datastreamStates.forEach((f6DsId, state) -> { 307 if (DS_DELETED.equals(state) || (deleteInactive && DS_INACTIVE.equals(state))) { 308 hasDeletes.set(true); 309 deleteDatastream(f6DsId, now.toInstant(), session); 310 } 311 }); 312 } 313 314 if (hasDeletes.get()) { 315 session.versionCreationTimestamp(now); 316 session.commit(); 317 } else { 318 session.abort(); 319 } 320 } catch (RuntimeException e) { 321 session.abort(); 322 throw e; 323 } 324 } 325 326 private void writeObjectFiles(final String pid, 327 final String f6ObjectId, 328 final ObjectVersionReference ov, 329 final OcflObjectSession session) { 330 final var objectHeaders = createObjectHeaders(f6ObjectId, ov); 331 final var content = getObjTriples(ov, pid); 332 session.writeResource(objectHeaders, content); 333 } 334 335 private void writeDescriptionFiles(final String f6Dsid, 336 final String datastreamFilename, 337 final String createDate, 338 final ResourceHeaders datastreamHeaders, 339 final DatastreamVersion dv, 340 final OcflObjectSession session) { 341 final var descriptionHeaders = createDescriptionHeaders(f6Dsid, 342 datastreamFilename, 343 datastreamHeaders); 344 session.writeResource(descriptionHeaders, getDsTriples(dv, f6Dsid, createDate)); 345 } 346 347 private String f6DescriptionId(final String f6ResourceId) { 348 return f6ResourceId + "/fcr:metadata"; 349 } 350 351 private String lastPartFromId(final String id) { 352 return id.substring(id.lastIndexOf('/') + 1); 353 } 354 355 private String resolveF6DatastreamId(final String datastreamId, final String f6ObjectId, final String mimeType) { 356 var id = f6ObjectId + "/" + datastreamId; 357 358 if (addDatastreamExtensions && !Strings.isNullOrEmpty(mimeType)) { 359 id += getExtension(mimeType); 360 } 361 362 return id; 363 } 364 365 private ResourceHeaders.Builder createHeaders(final String id, 366 final String parentId, 367 final InteractionModel model) { 368 final var headers = ResourceHeaders.builder(); 369 headers.withHeadersVersion(ResourceHeadersVersion.V1_0); 370 headers.withId(id); 371 headers.withParent(parentId); 372 headers.withInteractionModel(model.getUri()); 373 return headers; 374 } 375 376 private ResourceHeaders createObjectHeaders(final String f6ObjectId, final ObjectVersionReference ov) { 377 final var headers = createHeaders(f6ObjectId, FCREPO_ROOT, InteractionModel.BASIC_CONTAINER); 378 headers.withArchivalGroup(true); 379 headers.withObjectRoot(true); 380 headers.withLastModifiedBy(user); 381 headers.withCreatedBy(user); 382 383 ov.getObjectProperties().listProperties().forEach(p -> { 384 if (p.getName().contains("lastModifiedDate")) { 385 final var lastModified = Instant.parse(p.getValue()); 386 headers.withLastModifiedDate(lastModified); 387 headers.withMementoCreatedDate(lastModified); 388 headers.withStateToken(DigestUtils.md5Hex( 389 String.valueOf(lastModified.toEpochMilli())).toUpperCase()); 390 } else if (p.getName().contains("createdDate")) { 391 headers.withCreatedDate(Instant.parse(p.getValue())); 392 } 393 }); 394 395 return headers.build(); 396 } 397 398 private ResourceHeaders createDatastreamHeaders(final DatastreamVersion dv, 399 final String f6DsId, 400 final String f6ObjectId, 401 final String filename, 402 final String mime, 403 final String createDate) { 404 final var lastModified = Instant.parse(dv.getCreated()); 405 final var headers = createHeaders(f6DsId, f6ObjectId, InteractionModel.NON_RDF); 406 headers.withArchivalGroupId(f6ObjectId); 407 headers.withFilename(filename); 408 headers.withCreatedDate(Instant.parse(createDate)); 409 headers.withLastModifiedDate(lastModified); 410 headers.withLastModifiedBy(user); 411 headers.withCreatedBy(user); 412 headers.withMementoCreatedDate(lastModified); 413 414 if (externalHandlingMap.containsKey(dv.getDatastreamInfo().getControlGroup())) { 415 headers.withExternalHandling( 416 externalHandlingMap.get(dv.getDatastreamInfo().getControlGroup())); 417 headers.withExternalUrl(dv.getExternalOrRedirectURL()); 418 } 419 420 headers.withArchivalGroup(false); 421 headers.withObjectRoot(false); 422 if (dv.getSize() > -1 && !INLINE_XML.equals(dv.getDatastreamInfo().getControlGroup())) { 423 headers.withContentSize(dv.getSize()); 424 } 425 426 if (dv.getContentDigest() != null && !Strings.isNullOrEmpty(dv.getContentDigest().getDigest())) { 427 final var digest = dv.getContentDigest(); 428 final var digests = new ArrayList<URI>(); 429 digests.add(URI.create("urn:" + digest.getType().toLowerCase() + ":" + digest.getDigest().toLowerCase())); 430 headers.withDigests(digests); 431 } 432 433 headers.withMimeType(mime); 434 headers.withStateToken(DigestUtils.md5Hex( 435 String.valueOf(lastModified.toEpochMilli())).toUpperCase()); 436 437 return headers.build(); 438 } 439 440 private ResourceHeaders createDescriptionHeaders(final String f6DsId, 441 final String filename, 442 final ResourceHeaders datastreamHeaders) { 443 final var id = f6DescriptionId(f6DsId); 444 final var headers = createHeaders(id, f6DsId, InteractionModel.NON_RDF_DESCRIPTION); 445 446 headers.withArchivalGroupId(datastreamHeaders.getArchivalGroupId()); 447 headers.withFilename(filename); 448 headers.withCreatedDate(datastreamHeaders.getCreatedDate()); 449 headers.withLastModifiedDate(datastreamHeaders.getLastModifiedDate()); 450 headers.withCreatedBy(datastreamHeaders.getCreatedBy()); 451 headers.withLastModifiedBy(datastreamHeaders.getLastModifiedBy()); 452 headers.withMementoCreatedDate(datastreamHeaders.getMementoCreatedDate()); 453 454 headers.withArchivalGroup(false); 455 headers.withObjectRoot(false); 456 headers.withStateToken(datastreamHeaders.getStateToken()); 457 458 return headers.build(); 459 } 460 461 private String resolveMimeType(final DatastreamVersion dv) { 462 String mime = dv.getMimeType(); 463 464 if (Strings.isNullOrEmpty(mime)) { 465 final var meta = new Metadata(); 466 meta.set(Metadata.RESOURCE_NAME_KEY, dv.getDatastreamInfo().getDatastreamId()); 467 try (var content = TikaInputStream.get(dv.getContent())) { 468 mime = mimeDetector.detect(content, meta).toString(); 469 } catch (IOException e) { 470 throw new UncheckedIOException(e); 471 } 472 } 473 474 return mime; 475 } 476 477 private void deleteDatastream(final String id, 478 final Instant lastModified, 479 final OcflObjectSession session) { 480 if (migrationType == MigrationType.PLAIN_OCFL) { 481 deleteOcflMigratedResource(id, InteractionModel.NON_RDF, session); 482 deleteOcflMigratedResource(f6DescriptionId(id), InteractionModel.NON_RDF_DESCRIPTION, session); 483 } else { 484 deleteF6MigratedResource(id, lastModified, session); 485 deleteF6MigratedResource(f6DescriptionId(id), lastModified, session); 486 } 487 } 488 489 private void deleteF6MigratedResource(final String id, 490 final Instant lastModified, 491 final OcflObjectSession session) { 492 LOGGER.debug("Deleting resource {}", id); 493 final var headers = session.readHeaders(id); 494 session.deleteContentFile(ResourceHeaders.builder(headers) 495 .withDeleted(true) 496 .withLastModifiedDate(lastModified) 497 .withMementoCreatedDate(lastModified) 498 .build()); 499 } 500 501 private void deleteOcflMigratedResource(final String id, 502 final InteractionModel interactionModel, 503 final OcflObjectSession session) { 504 LOGGER.debug("Deleting resource {}", id); 505 session.deleteContentFile(ResourceHeaders.builder() 506 .withId(id) 507 .withInteractionModel(interactionModel.getUri()) 508 .build()); 509 } 510 511 private String getObjectState(final ObjectVersionReference ov, final String pid) { 512 return ov.getObjectProperties().listProperties().stream() 513 .filter(prop -> OBJ_STATE_PROP.equals(prop.getName())) 514 .findFirst() 515 .orElseThrow(() -> new IllegalStateException(String.format("Object %s is missing state information", 516 pid))) 517 .getValue(); 518 } 519 520 // Get object-level triples 521 private static InputStream getObjTriples(final ObjectVersionReference o, final String pid) { 522 final ByteArrayOutputStream out = new ByteArrayOutputStream(); 523 final Model triples = ModelFactory.createDefaultModel(); 524 final String uri = "info:fedora/" + pid; 525 526 o.getObjectProperties().listProperties().forEach(p -> { 527 if (p.getName().contains("Date")) { 528 addDateLiteral(triples, uri, p.getName(), p.getValue()); 529 } else { 530 addStringLiteral(triples, uri, p.getName(), p.getValue()); 531 } 532 }); 533 534 triples.write(out, "N-TRIPLES"); 535 return new ByteArrayInputStream(out.toByteArray()); 536 } 537 538 // Get datastream-level triples 539 private InputStream getDsTriples(final DatastreamVersion dv, 540 final String f6DsId, 541 final String createDate) { 542 final ByteArrayOutputStream out = new ByteArrayOutputStream(); 543 final Model triples = ModelFactory.createDefaultModel(); 544 545 if (migrationType == MigrationType.PLAIN_OCFL) { 546 // These triples are server managed in F6 547 addDateLiteral(triples, 548 f6DsId, 549 "http://fedora.info/definitions/v4/repository#created", 550 createDate); 551 addDateLiteral(triples, 552 f6DsId, 553 "http://fedora.info/definitions/v4/repository#lastModified", 554 dv.getCreated()); 555 addStringLiteral(triples, 556 f6DsId, 557 "http://purl.org/dc/terms/identifier", 558 dv.getDatastreamInfo().getDatastreamId()); 559 addStringLiteral(triples, 560 f6DsId, 561 "http://www.ebu.ch/metadata/ontologies/ebucore/ebucore#hasMimeType", 562 dv.getMimeType()); 563 addLongLiteral(triples, 564 f6DsId, 565 "http://www.loc.gov/premis/rdf/v1#size", 566 dv.getSize()); 567 568 if (dv.getContentDigest() != null) { 569 addStringLiteral(triples, 570 f6DsId, 571 "http://www.loc.gov/premis/rdf/v1#hasMessageDigest", 572 "urn:" + dv.getContentDigest().getType().toLowerCase() + ":" + 573 dv.getContentDigest().getDigest().toLowerCase()); 574 } 575 } 576 577 addStringLiteral(triples, 578 f6DsId, 579 "http://purl.org/dc/terms/title", 580 dv.getLabel()); 581 addStringLiteral(triples, 582 f6DsId, 583 "http://fedora.info/definitions/1/0/access/objState", 584 dv.getDatastreamInfo().getState()); 585 addStringLiteral(triples, 586 f6DsId, 587 "http://www.loc.gov/premis/rdf/v1#formatDesignation", 588 dv.getFormatUri()); 589 590 triples.write(out, "N-TRIPLES"); 591 return new ByteArrayInputStream(out.toByteArray()); 592 } 593 594 private static void addStringLiteral(final Model m, 595 final String s, 596 final String p, 597 final String o) { 598 if (o != null) { 599 m.add(m.createResource(s), m.createProperty(p), o); 600 } 601 } 602 603 private static void addDateLiteral(final Model m, 604 final String s, 605 final String p, 606 final String date) { 607 if (date != null) { 608 m.addLiteral(m.createResource(s), 609 m.createProperty(p), 610 m.createTypedLiteral(date, XSDDatatype.XSDdateTime)); 611 } 612 } 613 614 private static void addLongLiteral(final Model m, 615 final String s, 616 final String p, 617 final long number) { 618 if (number != -1) { 619 m.addLiteral(m.createResource(s), 620 m.createProperty(p), 621 m.createTypedLiteral(number, XSDDatatype.XSDlong)); 622 } 623 } 624 625 /** 626 * @param mime any mimetype as String 627 * @return extension associated with arg mime, return includes '.' in extension (.txt). 628 * ..Empty String if unrecognized mime 629 */ 630 private static String getExtension(final String mime) { 631 final MimeTypes allTypes = MimeTypes.getDefaultMimeTypes(); 632 MimeType type; 633 try { 634 type = allTypes.forName(mime); 635 } catch (final MimeTypeException e) { 636 type = null; 637 } 638 639 if (type != null) { 640 return type.getExtension(); 641 } 642 643 LOGGER.warn("No mimetype found for '{}'", mime); 644 return ""; 645 } 646 647}