mirror of
https://github.com/haraldk/TwelveMonkeys.git
synced 2025-08-03 11:35:29 -04:00
Work in progress for PSD metadata support:
- Refactored metadata support - Moved standard metadata support (EXIF, IPTC & XMP) to separate module - Changes to PSD metadata implementation
This commit is contained in:
parent
effd80d42f
commit
aad80d043f
31
twelvemonkeys-imageio/metadata/pom.xml
Normal file
31
twelvemonkeys-imageio/metadata/pom.xml
Normal file
@ -0,0 +1,31 @@
|
||||
<?xml version="1.0"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>com.twelvemonkeys.imageio</groupId>
|
||||
<artifactId>twelvemonkeys-imageio-metadata</artifactId>
|
||||
<version>2.3-SNAPSHOT</version>
|
||||
<name>TwelveMonkeys ImageIO Metadata</name>
|
||||
<description>
|
||||
ImageIO metadata module.
|
||||
</description>
|
||||
|
||||
<parent>
|
||||
<artifactId>twelvemonkeys-imageio</artifactId>
|
||||
<groupId>com.twelvemonkeys</groupId>
|
||||
<version>2.3-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.twelvemonkeys.imageio</groupId>
|
||||
<artifactId>twelvemonkeys-imageio-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.twelvemonkeys.imageio</groupId>
|
||||
<artifactId>twelvemonkeys-imageio-core</artifactId>
|
||||
<classifier>tests</classifier>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
@ -0,0 +1,116 @@
|
||||
package com.twelvemonkeys.imageio.metadata;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* AbstractDirectory
|
||||
*
|
||||
* @author <a href="mailto:harald.kuhr@gmail.com">Harald Kuhr</a>
|
||||
* @author last modified by $Author: haraldk$
|
||||
* @version $Id: AbstractDirectory.java,v 1.0 Nov 11, 2009 5:31:04 PM haraldk Exp$
|
||||
*/
|
||||
public abstract class AbstractDirectory implements Directory {
|
||||
// A linked hashmap or a stable bag structure might also work..
|
||||
private final List<Entry> mEntries = new ArrayList<Entry>();
|
||||
|
||||
protected AbstractDirectory(final Collection<? extends Entry> pEntries) {
|
||||
if (pEntries != null) {
|
||||
mEntries.addAll(pEntries);
|
||||
}
|
||||
}
|
||||
|
||||
public Entry getEntryById(final Object pIdentifier) {
|
||||
for (Entry entry : this) {
|
||||
if (entry.getIdentifier().equals(pIdentifier)) {
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public Entry getEntryByName(final String pName) {
|
||||
for (Entry entry : this) {
|
||||
if (entry.getFieldName().equals(pName)) {
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public Iterator<Entry> iterator() {
|
||||
return mEntries.iterator();
|
||||
}
|
||||
|
||||
/**
|
||||
* Throws {@code UnsupportedOperationException} if this directory is read-only.
|
||||
*
|
||||
* @throws UnsupportedOperationException if this directory is read-only.
|
||||
* @see #isReadOnly()
|
||||
*/
|
||||
protected final void assertMutable() {
|
||||
if (isReadOnly()) {
|
||||
throw new UnsupportedOperationException("Directory is read-only");
|
||||
}
|
||||
}
|
||||
|
||||
public boolean add(final Entry pEntry) {
|
||||
assertMutable();
|
||||
|
||||
// TODO: Replace if entry is already present?
|
||||
// Some directories may need special ordering, or may/may not support multiple entries for certain ids...
|
||||
return mEntries.add(pEntry);
|
||||
}
|
||||
|
||||
public boolean remove(final Object pEntry) {
|
||||
assertMutable();
|
||||
|
||||
return mEntries.remove(pEntry);
|
||||
}
|
||||
|
||||
public int size() {
|
||||
return mEntries.size();
|
||||
}
|
||||
|
||||
/**
|
||||
* This implementation returns {@code true}.
|
||||
* Subclasses should override this method, if the directory is mutable.
|
||||
*
|
||||
* @return {@code true}
|
||||
*/
|
||||
public boolean isReadOnly() {
|
||||
return true;
|
||||
}
|
||||
|
||||
/// Standard object support
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return mEntries.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(final Object pOther) {
|
||||
if (this == pOther) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (getClass() != pOther.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Safe cast, as it must be a subclass for the classes to be equal
|
||||
AbstractDirectory other = (AbstractDirectory) pOther;
|
||||
|
||||
return mEntries.equals(other.mEntries);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("%s%s", getClass().getSimpleName(), mEntries.toString());
|
||||
}
|
||||
}
|
@ -0,0 +1,99 @@
|
||||
package com.twelvemonkeys.imageio.metadata;
|
||||
|
||||
import com.twelvemonkeys.lang.Validate;
|
||||
|
||||
import java.lang.reflect.Array;
|
||||
|
||||
/**
|
||||
* AbstractEntry
|
||||
*
|
||||
* @author <a href="mailto:harald.kuhr@gmail.com">Harald Kuhr</a>
|
||||
* @author last modified by $Author: haraldk$
|
||||
* @version $Id: AbstractEntry.java,v 1.0 Nov 12, 2009 12:43:13 AM haraldk Exp$
|
||||
*/
|
||||
public abstract class AbstractEntry implements Entry {
|
||||
|
||||
private final Object mIdentifier;
|
||||
private final Object mValue; // TODO: Might need to be mutable..
|
||||
|
||||
protected AbstractEntry(final Object pIdentifier, final Object pValue) {
|
||||
Validate.notNull(pIdentifier, "identifier");
|
||||
|
||||
mIdentifier = pIdentifier;
|
||||
mValue = pValue;
|
||||
}
|
||||
|
||||
public Object getIdentifier() {
|
||||
return mIdentifier;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns {@code null}, meaning unknown or undefined.
|
||||
*
|
||||
* @return {@code null}.
|
||||
*/
|
||||
public String getFieldName() {
|
||||
return null;
|
||||
}
|
||||
|
||||
public Object getValue() {
|
||||
return mValue;
|
||||
}
|
||||
|
||||
public String getValueAsString() {
|
||||
return String.valueOf(mValue);
|
||||
}
|
||||
|
||||
public String getTypeName() {
|
||||
if (mValue == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return mValue.getClass().getSimpleName();
|
||||
}
|
||||
|
||||
public int valueCount() {
|
||||
// TODO: Collection support?
|
||||
if (mValue != null && mValue.getClass().isArray()) {
|
||||
return Array.getLength(mValue);
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
/// Object
|
||||
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return mIdentifier.hashCode() + 31 * mValue.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(final Object pOther) {
|
||||
if (this == pOther) {
|
||||
return true;
|
||||
}
|
||||
if (!(pOther instanceof AbstractEntry)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
AbstractEntry other = (AbstractEntry) pOther;
|
||||
|
||||
return mIdentifier.equals(other.mIdentifier) && (
|
||||
mValue == null && other.mValue == null || mValue != null && mValue.equals(other.mValue)
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
String name = getFieldName();
|
||||
String nameStr = name != null ? "/" + name + "" : "";
|
||||
|
||||
String type = getTypeName();
|
||||
String typeStr = type != null ? " (" + type + ")" : "";
|
||||
|
||||
return String.format("%s%s: %s%s", getIdentifier(), nameStr, getValueAsString(), typeStr);
|
||||
}
|
||||
}
|
@ -0,0 +1,33 @@
|
||||
package com.twelvemonkeys.imageio.metadata;
|
||||
|
||||
/**
|
||||
* Directory
|
||||
*
|
||||
* @author <a href="mailto:harald.kuhr@gmail.com">Harald Kuhr</a>
|
||||
* @author last modified by $Author: haraldk$
|
||||
* @version $Id: Directory.java,v 1.0 Nov 11, 2009 4:20:58 PM haraldk Exp$
|
||||
*/
|
||||
public interface Directory extends Iterable<Entry> {
|
||||
// TODO: Spec when more entries exist? Or make Entry support multi-values!?
|
||||
// For multiple entries with same id in directory, the first entry (using the order from the stream) will be returned
|
||||
Entry getEntryById(Object pIdentifier);
|
||||
|
||||
Entry getEntryByName(String pName);
|
||||
|
||||
// Iterator containing the entries in
|
||||
//Iterator<Entry> getBestEntries(Object pIdentifier, Object pQualifier, String pLanguage);
|
||||
|
||||
|
||||
/// Collection-like API
|
||||
// TODO: addOrReplaceIfPresent... (trouble for multi-values) Or mutable entries?
|
||||
// boolean replace(Entry pEntry)??
|
||||
// boolean contains(Object pIdentifier)?
|
||||
|
||||
boolean add(Entry pEntry);
|
||||
|
||||
boolean remove(Object pEntry); // Object in case we retro-fit Collection/Map..
|
||||
|
||||
int size();
|
||||
|
||||
boolean isReadOnly();
|
||||
}
|
@ -0,0 +1,39 @@
|
||||
package com.twelvemonkeys.imageio.metadata;
|
||||
|
||||
/**
|
||||
* Entry
|
||||
*
|
||||
* @author <a href="mailto:harald.kuhr@gmail.com">Harald Kuhr</a>
|
||||
* @author last modified by $Author: haraldk$
|
||||
* @version $Id: Entry.java,v 1.0 Nov 11, 2009 4:21:08 PM haraldk Exp$
|
||||
*/
|
||||
public interface Entry {
|
||||
// "tag" identifier from spec
|
||||
Object getIdentifier();
|
||||
|
||||
// Human readable "tag" (field) name from sepc
|
||||
String getFieldName();
|
||||
|
||||
// The internal "tag" value as stored in the stream, may be a Directory
|
||||
Object getValue();
|
||||
|
||||
// Human readable "tag" value
|
||||
String getValueAsString();
|
||||
|
||||
//void setValue(Object pValue); // TODO: qualifiers...
|
||||
|
||||
// Optional, implementation/spec specific type, describing the object returned from getValue
|
||||
String getTypeName();
|
||||
|
||||
// TODO: Or something like getValue(qualifierType, qualifierValue) + getQualifiers()/getQualifierValues
|
||||
// TODO: The problem with current model is getEntry() which only has single value support
|
||||
|
||||
// Optional, xml:lang-support
|
||||
//String getLanguage();
|
||||
|
||||
// Optional, XMP alt-support. TODO: Do we need both?
|
||||
//Object getQualifier();
|
||||
|
||||
// For arrays only
|
||||
int valueCount();
|
||||
}
|
@ -0,0 +1,15 @@
|
||||
package com.twelvemonkeys.imageio.metadata;
|
||||
|
||||
import javax.imageio.stream.ImageInputStream;
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* MetadataReader
|
||||
*
|
||||
* @author <a href="mailto:harald.kuhr@gmail.com">Harald Kuhr</a>
|
||||
* @author last modified by $Author: haraldk$
|
||||
* @version $Id: MetadataReader.java,v 1.0 Nov 13, 2009 8:38:11 PM haraldk Exp$
|
||||
*/
|
||||
public abstract class MetadataReader {
|
||||
public abstract Directory read(ImageInputStream pInput) throws IOException;
|
||||
}
|
@ -0,0 +1,47 @@
|
||||
package com.twelvemonkeys.imageio.metadata.exif;
|
||||
|
||||
/**
|
||||
* EXIF
|
||||
*
|
||||
* @author <a href="mailto:harald.kuhr@gmail.com">Harald Kuhr</a>
|
||||
* @author last modified by $Author: haraldk$
|
||||
* @version $Id: EXIF.java,v 1.0 Nov 11, 2009 5:36:04 PM haraldk Exp$
|
||||
*/
|
||||
interface EXIF {
|
||||
/*
|
||||
1 = BYTE 8-bit unsigned integer.
|
||||
2 = ASCII 8-bit byte that contains a 7-bit ASCII code; the last byte
|
||||
must be NUL (binary zero).
|
||||
3 = SHORT 16-bit (2-byte) unsigned integer.
|
||||
4 = LONG 32-bit (4-byte) unsigned integer.
|
||||
5 = RATIONAL Two LONGs: the first represents the numerator of a
|
||||
fraction; the second, the denominator.
|
||||
|
||||
TIFF 6.0 and above:
|
||||
6 = SBYTE An 8-bit signed (twos-complement) integer.
|
||||
7 = UNDEFINED An 8-bit byte that may contain anything, depending on
|
||||
the definition of the field.
|
||||
8 = SSHORT A 16-bit (2-byte) signed (twos-complement) integer.
|
||||
9 = SLONG A 32-bit (4-byte) signed (twos-complement) integer.
|
||||
10 = SRATIONAL Two SLONGs: the first represents the numerator of a
|
||||
fraction, the second the denominator.
|
||||
11 = FLOAT Single precision (4-byte) IEEE format.
|
||||
12 = DOUBLE Double precision (8-byte) IEEE format.
|
||||
*/
|
||||
|
||||
static int EXIF_IFD = 0x8769;
|
||||
|
||||
static String[] TYPE_NAMES = {
|
||||
"BYTE", "ASCII", "SHORT", "LONG", "RATIONAL",
|
||||
|
||||
"SBYTE", "UNDEFINED", "SSHORT", "SLONG", "SRATIONAL", "FLOAT", "DOUBLE",
|
||||
};
|
||||
|
||||
static int[] TYPE_LENGTHS = {
|
||||
1, 1, 2, 4, 8,
|
||||
|
||||
1, 1, 2, 4, 8, 4, 8,
|
||||
};
|
||||
|
||||
int TIFF_MAGIC = 42;
|
||||
}
|
@ -0,0 +1,20 @@
|
||||
package com.twelvemonkeys.imageio.metadata.exif;
|
||||
|
||||
import com.twelvemonkeys.imageio.metadata.AbstractDirectory;
|
||||
import com.twelvemonkeys.imageio.metadata.Entry;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* EXIFDirectory
|
||||
*
|
||||
* @author <a href="mailto:harald.kuhr@gmail.com">Harald Kuhr</a>
|
||||
* @author last modified by $Author: haraldk$
|
||||
* @version $Id: EXIFDirectory.java,v 1.0 Nov 11, 2009 5:02:59 PM haraldk Exp$
|
||||
*/
|
||||
final class EXIFDirectory extends AbstractDirectory {
|
||||
EXIFDirectory(final Collection<? extends Entry> pEntries) {
|
||||
super(pEntries);
|
||||
}
|
||||
}
|
@ -0,0 +1,30 @@
|
||||
package com.twelvemonkeys.imageio.metadata.exif;
|
||||
|
||||
import com.twelvemonkeys.imageio.metadata.AbstractEntry;
|
||||
|
||||
/**
|
||||
* EXIFEntry
|
||||
*
|
||||
* @author <a href="mailto:harald.kuhr@gmail.com">Harald Kuhr</a>
|
||||
* @author last modified by $Author: haraldk$
|
||||
* @version $Id: EXIFEntry.java,v 1.0 Nov 13, 2009 5:47:35 PM haraldk Exp$
|
||||
*/
|
||||
final class EXIFEntry extends AbstractEntry {
|
||||
final private short mType;
|
||||
|
||||
EXIFEntry(final Object pIdentifier, final Object pValue, final short pType) {
|
||||
super(pIdentifier, pValue);
|
||||
mType = pType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getFieldName() {
|
||||
// TODO: Need tons of constants... ;-)
|
||||
return super.getFieldName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTypeName() {
|
||||
return EXIF.TYPE_NAMES[mType];
|
||||
}
|
||||
}
|
@ -0,0 +1,216 @@
|
||||
package com.twelvemonkeys.imageio.metadata.exif;
|
||||
|
||||
import com.twelvemonkeys.imageio.metadata.Directory;
|
||||
import com.twelvemonkeys.imageio.metadata.Entry;
|
||||
import com.twelvemonkeys.imageio.metadata.MetadataReader;
|
||||
import com.twelvemonkeys.lang.StringUtil;
|
||||
|
||||
import javax.imageio.IIOException;
|
||||
import javax.imageio.stream.ImageInputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteOrder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* EXIFReader
|
||||
*
|
||||
* @author <a href="mailto:harald.kuhr@gmail.com">Harald Kuhr</a>
|
||||
* @author last modified by $Author: haraldk$
|
||||
* @version $Id: EXIFReader.java,v 1.0 Nov 13, 2009 5:42:51 PM haraldk Exp$
|
||||
*/
|
||||
public final class EXIFReader extends MetadataReader {
|
||||
|
||||
@Override
|
||||
public Directory read(final ImageInputStream pInput) throws IOException {
|
||||
byte[] bom = new byte[2];
|
||||
pInput.readFully(bom);
|
||||
if (bom[0] == 'I' && bom[1] == 'I') {
|
||||
pInput.setByteOrder(ByteOrder.LITTLE_ENDIAN);
|
||||
}
|
||||
else if (!(bom[0] == 'M' && bom[1] == 'M')) {
|
||||
throw new IIOException(String.format("Invalid byte order marker '%s'", StringUtil.decode(bom, 0, bom.length, "ASCII")));
|
||||
}
|
||||
|
||||
int magic = pInput.readUnsignedShort();
|
||||
if (magic != EXIF.TIFF_MAGIC) {
|
||||
throw new IIOException(String.format("Wrong TIFF magic in EXIF data: %04x, expected: %04x", magic, EXIF.TIFF_MAGIC));
|
||||
}
|
||||
|
||||
long directoryOffset = pInput.readUnsignedInt();
|
||||
|
||||
return readDirectory(pInput, directoryOffset);
|
||||
}
|
||||
|
||||
private EXIFDirectory readDirectory(final ImageInputStream pInput, final long pOffset) throws IOException {
|
||||
List<Entry> entries = new ArrayList<Entry>();
|
||||
|
||||
pInput.seek(pOffset);
|
||||
int entryCount = pInput.readUnsignedShort();
|
||||
|
||||
for (int i = 0; i < entryCount; i++) {
|
||||
entries.add(readEntry(pInput));
|
||||
}
|
||||
|
||||
long nextOffset = pInput.readUnsignedInt();
|
||||
|
||||
if (nextOffset != 0) {
|
||||
EXIFDirectory next = readDirectory(pInput, nextOffset);
|
||||
|
||||
for (Entry entry : next) {
|
||||
entries.add(entry);
|
||||
}
|
||||
}
|
||||
|
||||
return new EXIFDirectory(entries);
|
||||
}
|
||||
|
||||
private EXIFEntry readEntry(final ImageInputStream pInput) throws IOException {
|
||||
int tagId = pInput.readUnsignedShort();
|
||||
|
||||
short type = pInput.readShort();
|
||||
int count = pInput.readInt(); // Number of values
|
||||
|
||||
Object value;
|
||||
|
||||
// TODO: Handle other sub-IFDs
|
||||
// GPS IFD: 0x8825, Interoperability IFD: 0xA005
|
||||
if (tagId == EXIF.EXIF_IFD) {
|
||||
long offset = pInput.readUnsignedInt();
|
||||
pInput.mark();
|
||||
|
||||
try {
|
||||
value = readDirectory(pInput, offset);
|
||||
}
|
||||
finally {
|
||||
pInput.reset();
|
||||
}
|
||||
}
|
||||
else {
|
||||
int valueLength = getValueLength(type, count);
|
||||
|
||||
if (valueLength > 0 && valueLength <= 4) {
|
||||
value = readValueInLine(pInput, type, count);
|
||||
pInput.skipBytes(4 - valueLength);
|
||||
}
|
||||
else {
|
||||
long valueOffset = pInput.readUnsignedInt(); // This is the *value* iff the value size is <= 4 bytes
|
||||
value = readValue(pInput, valueOffset, type, count);
|
||||
}
|
||||
}
|
||||
|
||||
return new EXIFEntry(tagId, value, type);
|
||||
}
|
||||
|
||||
private Object readValue(final ImageInputStream pInput, final long pOffset, final short pType, final int pCount) throws IOException {
|
||||
long pos = pInput.getStreamPosition();
|
||||
try {
|
||||
pInput.seek(pOffset);
|
||||
return readValueInLine(pInput, pType, pCount);
|
||||
}
|
||||
finally {
|
||||
pInput.seek(pos);
|
||||
}
|
||||
}
|
||||
|
||||
private Object readValueInLine(final ImageInputStream pInput, final short pType, final int pCount) throws IOException {
|
||||
return readValueDirect(pInput, pType, pCount);
|
||||
}
|
||||
|
||||
private static Object readValueDirect(final ImageInputStream pInput, final short pType, final int pCount) throws IOException {
|
||||
switch (pType) {
|
||||
case 2:
|
||||
// TODO: This might be UTF-8 or ISO-8859-1, even spec says ASCII
|
||||
byte[] ascii = new byte[pCount];
|
||||
pInput.readFully(ascii);
|
||||
return StringUtil.decode(ascii, 0, ascii.length, "UTF-8"); // UTF-8 is ASCII compatible
|
||||
case 1:
|
||||
if (pCount == 1) {
|
||||
return pInput.readUnsignedByte();
|
||||
}
|
||||
case 6:
|
||||
if (pCount == 1) {
|
||||
return pInput.readByte();
|
||||
}
|
||||
case 7:
|
||||
byte[] bytes = new byte[pCount];
|
||||
pInput.readFully(bytes);
|
||||
return bytes;
|
||||
case 3:
|
||||
if (pCount == 1) {
|
||||
return pInput.readUnsignedShort();
|
||||
}
|
||||
case 8:
|
||||
if (pCount == 1) {
|
||||
return pInput.readShort();
|
||||
}
|
||||
|
||||
short[] shorts = new short[pCount];
|
||||
pInput.readFully(shorts, 0, shorts.length);
|
||||
return shorts;
|
||||
case 4:
|
||||
if (pCount == 1) {
|
||||
return pInput.readUnsignedInt();
|
||||
}
|
||||
case 9:
|
||||
if (pCount == 1) {
|
||||
return pInput.readInt();
|
||||
}
|
||||
|
||||
int[] ints = new int[pCount];
|
||||
pInput.readFully(ints, 0, ints.length);
|
||||
return ints;
|
||||
case 11:
|
||||
if (pCount == 1) {
|
||||
return pInput.readFloat();
|
||||
}
|
||||
|
||||
float[] floats = new float[pCount];
|
||||
pInput.readFully(floats, 0, floats.length);
|
||||
return floats;
|
||||
case 12:
|
||||
if (pCount == 1) {
|
||||
return pInput.readDouble();
|
||||
}
|
||||
|
||||
double[] doubles = new double[pCount];
|
||||
pInput.readFully(doubles, 0, doubles.length);
|
||||
return doubles;
|
||||
|
||||
// TODO: Consider using a Rational class
|
||||
case 5:
|
||||
if (pCount == 1) {
|
||||
return pInput.readUnsignedInt() / (double) pInput.readUnsignedInt();
|
||||
}
|
||||
|
||||
double[] rationals = new double[pCount];
|
||||
for (int i = 0; i < rationals.length; i++) {
|
||||
rationals[i] = pInput.readUnsignedInt() / (double) pInput.readUnsignedInt();
|
||||
}
|
||||
|
||||
return rationals;
|
||||
case 10:
|
||||
if (pCount == 1) {
|
||||
return pInput.readInt() / (double) pInput.readInt();
|
||||
}
|
||||
|
||||
double[] srationals = new double[pCount];
|
||||
for (int i = 0; i < srationals.length; i++) {
|
||||
srationals[i] = pInput.readInt() / (double) pInput.readInt();
|
||||
}
|
||||
|
||||
return srationals;
|
||||
|
||||
default:
|
||||
throw new IIOException(String.format("Unknown EXIF type '%s'", pType));
|
||||
}
|
||||
}
|
||||
|
||||
private int getValueLength(final int pType, final int pCount) {
|
||||
if (pType > 0 && pType <= EXIF.TYPE_LENGTHS.length) {
|
||||
return EXIF.TYPE_LENGTHS[pType - 1] * pCount;
|
||||
}
|
||||
|
||||
return -1;
|
||||
}
|
||||
}
|
@ -0,0 +1,130 @@
|
||||
package com.twelvemonkeys.imageio.metadata.iptc;
|
||||
|
||||
/**
|
||||
* IPTC
|
||||
*
|
||||
* @author <a href="mailto:harald.kuhr@gmail.com">Harald Kuhr</a>
|
||||
* @author last modified by $Author: haraldk$
|
||||
* @version $Id: IPTC.java,v 1.0 Nov 11, 2009 6:20:21 PM haraldk Exp$
|
||||
*/
|
||||
public interface IPTC {
|
||||
static final int ENVELOPE_RECORD = 1 << 8;
|
||||
static final int APPLICATION_RECORD = 2 << 8;
|
||||
|
||||
static final int TAG_CODED_CHARACTER_SET = ENVELOPE_RECORD | 90;
|
||||
|
||||
/** 2:00 Record Version (mandatory) */
|
||||
public static final int TAG_RECORD_VERSION = APPLICATION_RECORD; // 0x0200
|
||||
|
||||
/** 2:03 Object Type Reference */
|
||||
public static final int TAG_OBJECT_TYPE_REFERENCE = APPLICATION_RECORD | 3;
|
||||
/** 2:04 Object Attribute Reference (repeatable) */
|
||||
public static final int TAG_OBJECT_ATTRIBUTE_REFERENCE = APPLICATION_RECORD | 4;
|
||||
/** 2:05 Object Name */
|
||||
public static final int TAG_OBJECT_NAME = APPLICATION_RECORD | 5; // 0x0205
|
||||
/** 2:07 Edit Status */
|
||||
public static final int TAG_EDIT_STATUS = APPLICATION_RECORD | 7;
|
||||
/** 2:08 Editorial Update */
|
||||
public static final int TAG_EDITORIAL_UPDATE = APPLICATION_RECORD | 8;
|
||||
/** 2:10 Urgency */
|
||||
public static final int TAG_URGENCY = APPLICATION_RECORD | 10;
|
||||
/** 2:12 Subect Reference (repeatable) */
|
||||
public static final int TAG_SUBJECT_REFERENCE = APPLICATION_RECORD | 12;
|
||||
/** 2:15 Category */
|
||||
public static final int TAG_CATEGORY = APPLICATION_RECORD | 15; // 0x020f
|
||||
/** 2:20 Supplemental Category (repeatable) */
|
||||
public static final int TAG_SUPPLEMENTAL_CATEGORIES = APPLICATION_RECORD | 20;
|
||||
/** 2:22 Fixture Identifier */
|
||||
public static final int TAG_FIXTURE_IDENTIFIER = APPLICATION_RECORD | 22;
|
||||
/** 2:25 Keywords (repeatable) */
|
||||
public static final int TAG_KEYWORDS = APPLICATION_RECORD | 25;
|
||||
/** 2:26 Content Locataion Code (repeatable) */
|
||||
public static final int TAG_CONTENT_LOCATION_CODE = APPLICATION_RECORD | 26;
|
||||
/** 2:27 Content Locataion Name (repeatable) */
|
||||
public static final int TAG_CONTENT_LOCATION_NAME = APPLICATION_RECORD | 27;
|
||||
/** 2:30 Release Date */
|
||||
public static final int TAG_RELEASE_DATE = APPLICATION_RECORD | 30;
|
||||
/** 2:35 Release Time */
|
||||
public static final int TAG_RELEASE_TIME = APPLICATION_RECORD | 35;
|
||||
/** 2:37 Expiration Date */
|
||||
public static final int TAG_EXPIRATION_DATE = APPLICATION_RECORD | 37;
|
||||
/** 2:38 Expiration Time */
|
||||
public static final int TAG_EXPIRATION_TIME = APPLICATION_RECORD | 38;
|
||||
/** 2:40 Special Instructions */
|
||||
public static final int TAG_SPECIAL_INSTRUCTIONS = APPLICATION_RECORD | 40; // 0x0228
|
||||
/** 2:42 Action Advised (1: Kill, 2: Replace, 3: Append, 4: Reference) */
|
||||
public static final int TAG_ACTION_ADVICED = APPLICATION_RECORD | 42;
|
||||
/** 2:45 Reference Service (repeatable in triplets with 2:47 and 2:50) */
|
||||
public static final int TAG_REFERENCE_SERVICE = APPLICATION_RECORD | 45;
|
||||
/** 2:47 Reference Date (mandatory if 2:45 present) */
|
||||
public static final int TAG_REFERENCE_DATE = APPLICATION_RECORD | 47;
|
||||
/** 2:50 Reference Number (mandatory if 2:45 present) */
|
||||
public static final int TAG_REFERENCE_NUMBER = APPLICATION_RECORD | 50;
|
||||
/** 2:55 Date Created */
|
||||
public static final int TAG_DATE_CREATED = APPLICATION_RECORD | 55; // 0x0237
|
||||
/** 2:60 Time Created */
|
||||
public static final int TAG_TIME_CREATED = APPLICATION_RECORD | 60;
|
||||
/** 2:62 Digital Creation Date */
|
||||
public static final int TAG_DIGITAL_CREATION_DATE = APPLICATION_RECORD | 62;
|
||||
/** 2:63 Digital Creation Date */
|
||||
public static final int TAG_DIGITAL_CREATION_TIME = APPLICATION_RECORD | 63;
|
||||
/** 2:65 Originating Program */
|
||||
public static final int TAG_ORIGINATING_PROGRAM = APPLICATION_RECORD | 65;
|
||||
/** 2:70 Program Version (only valid if 2:65 present) */
|
||||
public static final int TAG_PROGRAM_VERSION = APPLICATION_RECORD | 70;
|
||||
/** 2:75 Object Cycle (a: morning, p: evening, b: both) */
|
||||
public static final int TAG_OBJECT_CYCLE = APPLICATION_RECORD | 75;
|
||||
/** 2:80 By-line (repeatable) */
|
||||
public static final int TAG_BY_LINE = APPLICATION_RECORD | 80; // 0x0250
|
||||
/** 2:85 By-line Title (repeatable) */
|
||||
public static final int TAG_BY_LINE_TITLE = APPLICATION_RECORD | 85; // 0x0255
|
||||
/** 2:90 City */
|
||||
public static final int TAG_CITY = APPLICATION_RECORD | 90; // 0x025a
|
||||
/** 2:92 Sub-location */
|
||||
public static final int TAG_SUB_LOCATION = APPLICATION_RECORD | 92;
|
||||
/** 2:95 Province/State */
|
||||
public static final int TAG_PROVINCE_OR_STATE = APPLICATION_RECORD | 95; // 0x025f
|
||||
/** 2:100 Country/Primary Location Code */
|
||||
public static final int TAG_COUNTRY_OR_PRIMARY_LOCATION_CODE = APPLICATION_RECORD | 100;
|
||||
/** 2:101 Country/Primary Location Name */
|
||||
public static final int TAG_COUNTRY_OR_PRIMARY_LOCATION = APPLICATION_RECORD | 101; // 0x0265
|
||||
/** 2:103 Original Transmission Reference */
|
||||
public static final int TAG_ORIGINAL_TRANSMISSION_REFERENCE = APPLICATION_RECORD | 103; // 0x0267
|
||||
/** 2:105 Headline */
|
||||
public static final int TAG_HEADLINE = APPLICATION_RECORD | 105; // 0x0269
|
||||
/** 2:110 Credit */
|
||||
public static final int TAG_CREDIT = APPLICATION_RECORD | 110; // 0x026e
|
||||
/** 2:115 Source */
|
||||
public static final int TAG_SOURCE = APPLICATION_RECORD | 115; // 0x0273
|
||||
/** 2:116 Copyright Notice */
|
||||
public static final int TAG_COPYRIGHT_NOTICE = APPLICATION_RECORD | 116; // 0x0274
|
||||
/** 2:118 Contact */
|
||||
public static final int TAG_CONTACT = APPLICATION_RECORD | 118;
|
||||
/** 2:120 Catption/Abstract */
|
||||
public static final int TAG_CAPTION = APPLICATION_RECORD | 120; // 0x0278
|
||||
/** 2:122 Writer/Editor (repeatable) */
|
||||
public static final int TAG_WRITER = APPLICATION_RECORD | 122; // 0x027a
|
||||
/** 2:125 Rasterized Caption (binary data) */
|
||||
public static final int TAG_RASTERIZED_CATPTION = APPLICATION_RECORD | 125;
|
||||
/** 2:130 Image Type */
|
||||
public static final int TAG_IMAGE_TYPE = APPLICATION_RECORD | 130;
|
||||
/** 2:131 Image Orientation */
|
||||
public static final int TAG_IMAGE_ORIENTATION = APPLICATION_RECORD | 131;
|
||||
/** 2:135 Language Identifier */
|
||||
public static final int TAG_LANGUAGE_IDENTIFIER = APPLICATION_RECORD | 135;
|
||||
|
||||
// TODO: 2:150-2:154 Audio
|
||||
|
||||
// TODO: Should we expose this field?
|
||||
/**
|
||||
* 2:199 JobMinder Assignment Data (Custom IPTC field).
|
||||
* A common custom IPTC field used by a now discontinued application called JobMinder.
|
||||
*
|
||||
* @see <a href="http://www.jobminder.net/">JobMinder Homepage</a>
|
||||
*/
|
||||
static final int CUSTOM_TAG_JOBMINDER_ASSIGNMENT_DATA = APPLICATION_RECORD | 199;
|
||||
|
||||
// TODO: Other custom fields in 155-200 range?
|
||||
|
||||
// TODO: 2:200-2:202 Object Preview Data
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
package com.twelvemonkeys.imageio.metadata.iptc;
|
||||
|
||||
import com.twelvemonkeys.imageio.metadata.AbstractDirectory;
|
||||
import com.twelvemonkeys.imageio.metadata.Entry;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
/**
|
||||
* IPTCDirectory
|
||||
*
|
||||
* @author <a href="mailto:harald.kuhr@gmail.com">Harald Kuhr</a>
|
||||
* @author last modified by $Author: haraldk$
|
||||
* @version $Id: IPTCDirectory.java,v 1.0 Nov 11, 2009 5:02:59 PM haraldk Exp$
|
||||
*/
|
||||
final class IPTCDirectory extends AbstractDirectory {
|
||||
IPTCDirectory(final Collection<? extends Entry> pEntries) {
|
||||
super(pEntries);
|
||||
}
|
||||
}
|
@ -0,0 +1,16 @@
|
||||
package com.twelvemonkeys.imageio.metadata.iptc;
|
||||
|
||||
import com.twelvemonkeys.imageio.metadata.AbstractEntry;
|
||||
|
||||
/**
|
||||
* IPTCEntry
|
||||
*
|
||||
* @author <a href="mailto:harald.kuhr@gmail.com">Harald Kuhr</a>
|
||||
* @author last modified by $Author: haraldk$
|
||||
* @version $Id: IPTCEntry.java,v 1.0 Nov 13, 2009 8:57:04 PM haraldk Exp$
|
||||
*/
|
||||
class IPTCEntry extends AbstractEntry {
|
||||
public IPTCEntry(int pTagId, Object pValue) {
|
||||
super(pTagId, pValue);
|
||||
}
|
||||
}
|
@ -0,0 +1,149 @@
|
||||
package com.twelvemonkeys.imageio.metadata.iptc;
|
||||
|
||||
import com.twelvemonkeys.imageio.metadata.Directory;
|
||||
import com.twelvemonkeys.imageio.metadata.Entry;
|
||||
import com.twelvemonkeys.imageio.metadata.MetadataReader;
|
||||
import com.twelvemonkeys.lang.StringUtil;
|
||||
|
||||
import javax.imageio.IIOException;
|
||||
import javax.imageio.stream.ImageInputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.CharBuffer;
|
||||
import java.nio.charset.CharacterCodingException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.CharsetDecoder;
|
||||
import java.nio.charset.CodingErrorAction;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* IPTCReader
|
||||
*
|
||||
* @author <a href="mailto:harald.kuhr@gmail.com">Harald Kuhr</a>
|
||||
* @author last modified by $Author: haraldk$
|
||||
* @version $Id: IPTCReader.java,v 1.0 Nov 13, 2009 8:37:23 PM haraldk Exp$
|
||||
*/
|
||||
public class IPTCReader extends MetadataReader {
|
||||
private static final int ENCODING_UNKNOWN = -1;
|
||||
private static final int ENCODING_UNSPECIFIED = 0;
|
||||
private static final int ENCODING_UTF_8 = 0x1b2547;
|
||||
|
||||
private int mEncoding = ENCODING_UNSPECIFIED;
|
||||
|
||||
|
||||
@Override
|
||||
public Directory read(final ImageInputStream pInput) throws IOException {
|
||||
final List<Entry> entries = new ArrayList<Entry>();
|
||||
|
||||
// 0x1c identifies start of a tag
|
||||
while (pInput.read() == 0x1c) {
|
||||
int tagId = pInput.readShort();
|
||||
int tagByteCount = pInput.readUnsignedShort();
|
||||
|
||||
Entry entry = readEntry(pInput, tagId, tagByteCount);
|
||||
if (entry != null) {
|
||||
entries.add(entry);
|
||||
}
|
||||
}
|
||||
|
||||
return new IPTCDirectory(entries);
|
||||
}
|
||||
|
||||
private Entry readEntry(final ImageInputStream pInput, final int pTagId, final int pLength) throws IOException {
|
||||
Object value = null;
|
||||
|
||||
switch (pTagId) {
|
||||
case IPTC.TAG_CODED_CHARACTER_SET:
|
||||
// TODO: Mapping from ISO 646 to Java supported character sets?
|
||||
// TODO: Move somewhere else?
|
||||
mEncoding = parseEncoding(pInput, pLength);
|
||||
return null;
|
||||
case IPTC.TAG_RECORD_VERSION:
|
||||
// A single unsigned short value
|
||||
value = pInput.readUnsignedShort();
|
||||
break;
|
||||
// case IPTC.TAG_RELEASE_DATE:
|
||||
// case IPTC.TAG_EXPIRATION_DATE:
|
||||
// case IPTC.TAG_REFERENCE_DATE:
|
||||
// case IPTC.TAG_DATE_CREATED:
|
||||
// case IPTC.TAG_DIGITAL_CREATION_DATE:
|
||||
// // Date object
|
||||
// Date date = parseISO8601DatePart(pInput, tagByteCount);
|
||||
// if (date != null) {
|
||||
// directory.setDate(tagIdentifier, date);
|
||||
// return;
|
||||
// }
|
||||
// case IPTC.TAG_RELEASE_TIME:
|
||||
// case IPTC.TAG_EXPIRATION_TIME:
|
||||
// case IPTC.TAG_TIME_CREATED:
|
||||
// case IPTC.TAG_DIGITAL_CREATION_TIME:
|
||||
// // NOTE: Spec says fields should be sent in order, so this is okay
|
||||
// date = getDateForTime(directory, tagIdentifier);
|
||||
//
|
||||
// Date time = parseISO8601TimePart(pInput, tagByteCount, date);
|
||||
// if (time != null) {
|
||||
// directory.setDate(tagIdentifier, time);
|
||||
// return;
|
||||
// }
|
||||
//
|
||||
default:
|
||||
// Skip non-Application fields, as they are typically not human readable
|
||||
if ((pTagId & 0xff00) != IPTC.APPLICATION_RECORD) {
|
||||
pInput.skipBytes(pLength);
|
||||
return null;
|
||||
}
|
||||
|
||||
// fall through
|
||||
}
|
||||
|
||||
// If we don't have a value, treat it as a string
|
||||
if (value == null) {
|
||||
if (pLength < 1) {
|
||||
value = "(No value)";
|
||||
}
|
||||
else {
|
||||
value = parseString(pInput, pLength);
|
||||
}
|
||||
}
|
||||
|
||||
return new IPTCEntry(pTagId, value);
|
||||
}
|
||||
|
||||
private int parseEncoding(final ImageInputStream pInput, int tagByteCount) throws IOException {
|
||||
return tagByteCount == 3
|
||||
&& (pInput.readUnsignedByte() << 16 | pInput.readUnsignedByte() << 8 | pInput.readUnsignedByte()) == ENCODING_UTF_8
|
||||
? ENCODING_UTF_8 : ENCODING_UNKNOWN;
|
||||
}
|
||||
|
||||
// TODO: Pass encoding as parameter? Use if specified
|
||||
private String parseString(final ImageInputStream pInput, final int pLength) throws IOException {
|
||||
byte[] data = new byte[pLength];
|
||||
pInput.readFully(data);
|
||||
|
||||
// NOTE: The IPTC specification says character data should use ISO 646 or ISO 2022 encoding.
|
||||
// UTF-8 contains all 646 characters, but not 2022.
|
||||
// This is however close to what libiptcdata does, see: http://libiptcdata.sourceforge.net/docs/iptc-i18n.html
|
||||
Charset charset = Charset.forName("UTF-8");
|
||||
CharsetDecoder decoder = charset.newDecoder();
|
||||
|
||||
try {
|
||||
// First try to decode using UTF-8 (which seems to be the de-facto standard)
|
||||
// Will fail fast on illegal UTF-8-sequences
|
||||
CharBuffer chars = decoder.onMalformedInput(CodingErrorAction.REPORT)
|
||||
.onUnmappableCharacter(CodingErrorAction.REPORT)
|
||||
.decode(ByteBuffer.wrap(data));
|
||||
return chars.toString();
|
||||
}
|
||||
catch (CharacterCodingException notUTF8) {
|
||||
if (mEncoding == ENCODING_UTF_8) {
|
||||
throw new IIOException("Wrong encoding of IPTC data, explicitly set to UTF-8 in DataSet 1:90", notUTF8);
|
||||
}
|
||||
|
||||
// Fall back to use ISO-8859-1
|
||||
// This will not fail, but may may create wrong fallback-characters
|
||||
return StringUtil.decode(data, 0, data.length, "ISO8859_1");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,127 @@
|
||||
package com.twelvemonkeys.imageio.metadata.xmp;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.charset.UnsupportedCharsetException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Random;
|
||||
|
||||
/**
|
||||
* XMPScannerTestCase
|
||||
*
|
||||
* @author <a href="mailto:harald.kuhr@gmail.com">Harald Kuhr</a>
|
||||
* @author last modified by $Author: haraldk$
|
||||
* @version $Id: XMPScannerTestCase.java,v 1.0 Nov 13, 2009 3:59:43 PM haraldk Exp$
|
||||
*/
|
||||
public class XMPScannerTestCase extends TestCase {
|
||||
|
||||
static final String XMP =
|
||||
"<?xpacket begin=\"\uFEFF\" id=\"W5M0MpCehiHzreSzNTczkc9d\"?>" +
|
||||
"<x:xmpmeta xmlns:x=\"adobe:ns:meta/\" x:xmptk=\"Adobe XMP Core 4.1-c036 46.276720, Fri Nov 13 2009 15:59:43 \">\n"+
|
||||
" <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\">\n"+
|
||||
" <rdf:Description rdf:about=\"\"\n"+
|
||||
" xmlns:photoshop=\"http://ns.adobe.com/photoshop/1.0/\">\n"+
|
||||
" <photoshop:Source>twelvemonkeys.com</photoshop:Source>\n"+
|
||||
" </rdf:Description>\n"+
|
||||
" <rdf:Description rdf:about=\"\"\n"+
|
||||
" xmlns:dc=\"http://purl.org/dc/elements/1.1/\">\n"+
|
||||
" <dc:format>application/vnd.adobe.photoshop</dc:format>\n"+
|
||||
" </rdf:Description>\n"+
|
||||
" </rdf:RDF>\n"+
|
||||
"</x:xmpmeta>" +
|
||||
"<?xpacket end=\"w\"?>";
|
||||
|
||||
final Random mRandom = new Random(4934638567l);
|
||||
|
||||
private InputStream createRandomStream(final int pLength) {
|
||||
byte[] bytes = new byte[pLength];
|
||||
mRandom.nextBytes(bytes);
|
||||
return new ByteArrayInputStream(bytes);
|
||||
}
|
||||
|
||||
private InputStream createXMPStream(final String pXMP, final String pCharsetName) {
|
||||
try {
|
||||
return new SequenceInputStream(
|
||||
Collections.enumeration(
|
||||
Arrays.asList(
|
||||
createRandomStream(79),
|
||||
new ByteArrayInputStream(pXMP.getBytes(pCharsetName)),
|
||||
createRandomStream(31)
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
catch (UnsupportedEncodingException e) {
|
||||
UnsupportedCharsetException uce = new UnsupportedCharsetException(pCharsetName);
|
||||
uce.initCause(e);
|
||||
throw uce;
|
||||
}
|
||||
}
|
||||
|
||||
public void testScanForUTF8() throws IOException {
|
||||
InputStream stream = createXMPStream(XMP, "UTF-8");
|
||||
|
||||
Reader reader = XMPScanner.scanForXMPPacket(stream);
|
||||
|
||||
assertNotNull(reader);
|
||||
}
|
||||
|
||||
public void testScanForUTF8singleQuote() throws IOException {
|
||||
InputStream stream = createXMPStream(XMP, "UTF-8".replace("\"", "'"));
|
||||
|
||||
Reader reader = XMPScanner.scanForXMPPacket(stream);
|
||||
|
||||
assertNotNull(reader);
|
||||
}
|
||||
|
||||
public void testScanForUTF16BE() throws IOException {
|
||||
InputStream stream = createXMPStream(XMP, "UTF-16BE");
|
||||
|
||||
Reader reader = XMPScanner.scanForXMPPacket(stream);
|
||||
|
||||
assertNotNull(reader);
|
||||
}
|
||||
|
||||
public void testScanForUTF16BEsingleQuote() throws IOException {
|
||||
InputStream stream = createXMPStream(XMP, "UTF-16BE".replace("\"", "'"));
|
||||
|
||||
Reader reader = XMPScanner.scanForXMPPacket(stream);
|
||||
|
||||
assertNotNull(reader);
|
||||
}
|
||||
|
||||
public void testScanForUTF16LE() throws IOException {
|
||||
InputStream stream = createXMPStream(XMP, "UTF-16LE");
|
||||
|
||||
Reader reader = XMPScanner.scanForXMPPacket(stream);
|
||||
|
||||
assertNotNull(reader);
|
||||
}
|
||||
|
||||
public void testScanForUTF16LEsingleQuote() throws IOException {
|
||||
InputStream stream = createXMPStream(XMP, "UTF-16LE".replace("\"", "'"));
|
||||
|
||||
Reader reader = XMPScanner.scanForXMPPacket(stream);
|
||||
|
||||
assertNotNull(reader);
|
||||
}
|
||||
|
||||
// TODO: Default Java installation on OS X don't seem to have UTF-32 installed. Hmmm..
|
||||
// public void testUTF32BE() throws IOException {
|
||||
// InputStream stream = createXMPStream("UTF-32BE");
|
||||
//
|
||||
// Reader reader = XMPScanner.scanForXMPPacket(stream);
|
||||
//
|
||||
// assertNotNull(reader);
|
||||
// }
|
||||
//
|
||||
// public void testUTF32LE() throws IOException {
|
||||
// InputStream stream = createXMPStream("UTF-32LE");
|
||||
//
|
||||
// Reader reader = XMPScanner.scanForXMPPacket(stream);
|
||||
//
|
||||
// assertNotNull(reader);
|
||||
// }
|
||||
}
|
@ -29,6 +29,7 @@
|
||||
<modules>
|
||||
<!-- Support -->
|
||||
<module>core</module>
|
||||
<module>metadata</module>
|
||||
|
||||
<!-- Stand-alone readers/writers -->
|
||||
<module>ico</module>
|
||||
@ -96,6 +97,13 @@
|
||||
<classifier>tests</classifier>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.twelvemonkeys.imageio</groupId>
|
||||
<artifactId>twelvemonkeys-imageio-metadata</artifactId>
|
||||
<version>${imageio.core.version}</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</dependencyManagement>
|
||||
|
||||
|
@ -27,5 +27,9 @@
|
||||
<artifactId>twelvemonkeys-imageio-core</artifactId>
|
||||
<classifier>tests</classifier>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.twelvemonkeys.imageio</groupId>
|
||||
<artifactId>twelvemonkeys-imageio-metadata</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
@ -0,0 +1,103 @@
|
||||
package com.twelvemonkeys.imageio.plugins.psd;
|
||||
|
||||
import org.w3c.dom.Node;
|
||||
|
||||
import javax.imageio.metadata.IIOInvalidTreeException;
|
||||
import javax.imageio.metadata.IIOMetadata;
|
||||
import javax.imageio.metadata.IIOMetadataFormatImpl;
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* AbstractMetadata
|
||||
*
|
||||
* @author <a href="mailto:harald.kuhr@gmail.com">Harald Kuhr</a>
|
||||
* @author last modified by $Author: haraldk$
|
||||
* @version $Id: AbstractMetadata.java,v 1.0 Nov 13, 2009 1:02:12 AM haraldk Exp$
|
||||
*/
|
||||
abstract class AbstractMetadata extends IIOMetadata implements Cloneable {
|
||||
|
||||
protected AbstractMetadata(final boolean pStandardFormatSupported,
|
||||
final String pNativeFormatName, final String pNativeFormatClassName,
|
||||
final String[] pExtraFormatNames, final String[] pExtraFormatClassNames) {
|
||||
super(pStandardFormatSupported, pNativeFormatName, pNativeFormatClassName, pExtraFormatNames, pExtraFormatClassNames);
|
||||
}
|
||||
|
||||
/**
|
||||
* Default implementation returns {@code true}.
|
||||
* Mutable subclasses should override this method.
|
||||
*
|
||||
* @return {@code true}.
|
||||
*/
|
||||
@Override
|
||||
public boolean isReadOnly() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Node getAsTree(final String pFormatName) {
|
||||
validateFormatName(pFormatName);
|
||||
|
||||
if (pFormatName.equals(nativeMetadataFormatName)) {
|
||||
return getNativeTree();
|
||||
}
|
||||
else if (pFormatName.equals(IIOMetadataFormatImpl.standardMetadataFormatName)) {
|
||||
return getStandardTree();
|
||||
}
|
||||
|
||||
// TODO: What about extra formats??
|
||||
throw new AssertionError("Unreachable");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void mergeTree(final String pFormatName, final Node pRoot) throws IIOInvalidTreeException {
|
||||
assertMutable();
|
||||
|
||||
validateFormatName(pFormatName);
|
||||
|
||||
if (!pRoot.getNodeName().equals(nativeMetadataFormatName)) {
|
||||
throw new IIOInvalidTreeException("Root must be " + nativeMetadataFormatName, pRoot);
|
||||
}
|
||||
|
||||
Node node = pRoot.getFirstChild();
|
||||
while (node != null) {
|
||||
// TODO: Merge values from node into this
|
||||
|
||||
// Move to the next sibling
|
||||
node = node.getNextSibling();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reset() {
|
||||
assertMutable();
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that this meta data is mutable.
|
||||
*
|
||||
* @throws IllegalStateException if {@link #isReadOnly()} returns {@code true}.
|
||||
*/
|
||||
protected final void assertMutable() {
|
||||
if (isReadOnly()) {
|
||||
throw new IllegalStateException("Metadata is read-only");
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract Node getNativeTree();
|
||||
|
||||
protected final void validateFormatName(final String pFormatName) {
|
||||
String[] metadataFormatNames = getMetadataFormatNames();
|
||||
|
||||
if (metadataFormatNames != null) {
|
||||
for (String metadataFormatName : metadataFormatNames) {
|
||||
if (metadataFormatName.equals(pFormatName)) {
|
||||
return; // Found, we're ok!
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(
|
||||
String.format("Bad format name: \"%s\". Expected one of %s", pFormatName, Arrays.toString(metadataFormatNames))
|
||||
);
|
||||
}
|
||||
}
|
@ -34,11 +34,11 @@ import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* PSDAlhpaChannelInfo
|
||||
* PSDAlphaChannelInfo
|
||||
*
|
||||
* @author <a href="mailto:harald.kuhr@gmail.com">Harald Kuhr</a>
|
||||
* @author last modified by $Author: haraldk$
|
||||
* @version $Id: PSDAlhpaChannelInfo.java,v 1.0 May 2, 2008 5:33:40 PM haraldk Exp$
|
||||
* @version $Id: PSDAlphaChannelInfo.java,v 1.0 May 2, 2008 5:33:40 PM haraldk Exp$
|
||||
*/
|
||||
class PSDAlphaChannelInfo extends PSDImageResource {
|
||||
List<String> mNames;
|
||||
@ -50,6 +50,7 @@ class PSDAlphaChannelInfo extends PSDImageResource {
|
||||
@Override
|
||||
protected void readData(final ImageInputStream pInput) throws IOException {
|
||||
mNames = new ArrayList<String>();
|
||||
|
||||
long left = mSize;
|
||||
while (left > 0) {
|
||||
String name = PSDUtil.readPascalString(pInput);
|
||||
|
@ -1,13 +1,11 @@
|
||||
package com.twelvemonkeys.imageio.plugins.psd;
|
||||
|
||||
import com.twelvemonkeys.imageio.util.IIOUtil;
|
||||
import com.twelvemonkeys.imageio.metadata.exif.EXIFReader;
|
||||
import com.twelvemonkeys.lang.StringUtil;
|
||||
|
||||
import javax.imageio.IIOException;
|
||||
import javax.imageio.stream.ImageInputStream;
|
||||
import javax.imageio.stream.MemoryCacheImageInputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteOrder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
@ -26,7 +24,8 @@ import java.util.List;
|
||||
*/
|
||||
final class PSDEXIF1Data extends PSDImageResource {
|
||||
// protected byte[] mData;
|
||||
protected Directory mDirectory;
|
||||
// protected Directory mDirectory;
|
||||
protected com.twelvemonkeys.imageio.metadata.Directory mDirectory;
|
||||
|
||||
PSDEXIF1Data(final short pId, final ImageInputStream pInput) throws IOException {
|
||||
super(pId, pInput);
|
||||
@ -36,24 +35,25 @@ final class PSDEXIF1Data extends PSDImageResource {
|
||||
protected void readData(final ImageInputStream pInput) throws IOException {
|
||||
// This is in essence an embedded TIFF file.
|
||||
// TODO: Extract TIFF parsing to more general purpose package
|
||||
// TODO: Instead, read the byte data, store for later parsing (or store offset, and read on request)
|
||||
MemoryCacheImageInputStream stream = new MemoryCacheImageInputStream(IIOUtil.createStreamAdapter(pInput, mSize));
|
||||
|
||||
byte[] bom = new byte[2];
|
||||
stream.readFully(bom);
|
||||
if (bom[0] == 'I' && bom[1] == 'I') {
|
||||
stream.setByteOrder(ByteOrder.LITTLE_ENDIAN);
|
||||
}
|
||||
else if (!(bom[0] == 'M' && bom[1] == 'M')) {
|
||||
throw new IIOException(String.format("Invalid byte order marker '%s'", StringUtil.decode(bom, 0, bom.length, "ASCII")));
|
||||
}
|
||||
|
||||
if (stream.readUnsignedShort() != 42) {
|
||||
throw new IIOException("Wrong TIFF magic in EXIF data.");
|
||||
}
|
||||
|
||||
long directoryOffset = stream.readUnsignedInt();
|
||||
mDirectory = Directory.read(stream, directoryOffset);
|
||||
// TODO: Instead, read the byte data, store for later parsing (or better yet, store offset, and read on request)
|
||||
mDirectory = new EXIFReader().read(pInput);
|
||||
// byte[] bom = new byte[2];
|
||||
// stream.readFully(bom);
|
||||
// if (bom[0] == 'I' && bom[1] == 'I') {
|
||||
// stream.setByteOrder(ByteOrder.LITTLE_ENDIAN);
|
||||
// }
|
||||
// else if (!(bom[0] == 'M' && bom[1] == 'M')) {
|
||||
// throw new IIOException(String.format("Invalid byte order marker '%s'", StringUtil.decode(bom, 0, bom.length, "ASCII")));
|
||||
// }
|
||||
//
|
||||
// if (stream.readUnsignedShort() != 42) {
|
||||
// throw new IIOException("Wrong TIFF magic in EXIF data.");
|
||||
// }
|
||||
//
|
||||
// long directoryOffset = stream.readUnsignedInt();
|
||||
//
|
||||
// // Read TIFF directory
|
||||
// mDirectory = Directory.read(stream, directoryOffset);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -78,11 +78,13 @@ final class PSDEXIF1Data extends PSDImageResource {
|
||||
|
||||
pInput.seek(pOffset);
|
||||
int entryCount = pInput.readUnsignedShort();
|
||||
|
||||
for (int i = 0; i < entryCount; i++) {
|
||||
directory.mEntries.add(Entry.read(pInput));
|
||||
}
|
||||
|
||||
long nextOffset = pInput.readUnsignedInt();
|
||||
|
||||
if (nextOffset != 0) {
|
||||
Directory next = Directory.read(pInput, nextOffset);
|
||||
directory.mEntries.addAll(next.mEntries);
|
||||
@ -91,9 +93,9 @@ final class PSDEXIF1Data extends PSDImageResource {
|
||||
return directory;
|
||||
}
|
||||
|
||||
public Entry get(int pTag) {
|
||||
public Entry get(int pTagId) {
|
||||
for (Entry entry : mEntries) {
|
||||
if (entry.mTag == pTag) {
|
||||
if (entry.mTagId == pTagId) {
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
@ -127,7 +129,7 @@ final class PSDEXIF1Data extends PSDImageResource {
|
||||
1, 1, 2, 4, 8, 4, 8,
|
||||
};
|
||||
|
||||
private int mTag;
|
||||
final int mTagId;
|
||||
/*
|
||||
1 = BYTE 8-bit unsigned integer.
|
||||
2 = ASCII 8-bit byte that contains a 7-bit ASCII code; the last byte
|
||||
@ -153,19 +155,22 @@ final class PSDEXIF1Data extends PSDImageResource {
|
||||
private long mValueOffset;
|
||||
private Object mValue;
|
||||
|
||||
private Entry() {}
|
||||
private Entry(int pTagId) {
|
||||
mTagId = pTagId;
|
||||
}
|
||||
|
||||
public static Entry read(final ImageInputStream pInput) throws IOException {
|
||||
Entry entry = new Entry();
|
||||
Entry entry = new Entry(pInput.readUnsignedShort());
|
||||
|
||||
entry.mTag = pInput.readUnsignedShort();
|
||||
entry.mType = pInput.readShort();
|
||||
entry.mCount = pInput.readInt(); // Number of values
|
||||
|
||||
// TODO: Handle other sub-IFDs
|
||||
if (entry.mTag == EXIF_IFD) {
|
||||
// GPS IFD: 0x8825, Interoperability IFD: 0xA005
|
||||
if (entry.mTagId == EXIF_IFD) {
|
||||
long offset = pInput.readUnsignedInt();
|
||||
pInput.mark();
|
||||
|
||||
try {
|
||||
entry.mValue = Directory.read(pInput, offset);
|
||||
}
|
||||
@ -175,6 +180,7 @@ final class PSDEXIF1Data extends PSDImageResource {
|
||||
}
|
||||
else {
|
||||
int valueLength = entry.getValueLength();
|
||||
|
||||
if (valueLength > 0 && valueLength <= 4) {
|
||||
entry.readValueInLine(pInput);
|
||||
pInput.skipBytes(4 - valueLength);
|
||||
@ -299,22 +305,21 @@ final class PSDEXIF1Data extends PSDImageResource {
|
||||
return -1;
|
||||
}
|
||||
|
||||
private String getTypeName() {
|
||||
public final String getTypeName() {
|
||||
if (mType > 0 && mType <= TYPE_NAMES.length) {
|
||||
return TYPE_NAMES[mType - 1];
|
||||
}
|
||||
|
||||
return "Unknown type";
|
||||
}
|
||||
|
||||
// TODO: Tag names!
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("0x%04x: %s (%s, %d)", mTag, getValueAsString(), getTypeName(), mCount);
|
||||
public final Object getValue() {
|
||||
return mValue;
|
||||
}
|
||||
|
||||
public String getValueAsString() {
|
||||
public final String getValueAsString() {
|
||||
if (mValue instanceof String) {
|
||||
return String.format("\"%s\"", mValue);
|
||||
return String.format("%s", mValue);
|
||||
}
|
||||
|
||||
if (mValue != null && mValue.getClass().isArray()) {
|
||||
@ -338,5 +343,11 @@ final class PSDEXIF1Data extends PSDImageResource {
|
||||
|
||||
return String.valueOf(mValue);
|
||||
}
|
||||
|
||||
// TODO: Tag names!
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("0x%04x: %s (%s, %d)", mTagId, mType == 2 ? String.format("\"%s\"", mValue) : getValueAsString(), getTypeName(), mCount);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -48,16 +48,18 @@ class PSDGlobalLayerMask {
|
||||
final int mKind;
|
||||
|
||||
PSDGlobalLayerMask(final ImageInputStream pInput) throws IOException {
|
||||
mColorSpace = pInput.readUnsignedShort();
|
||||
mColorSpace = pInput.readUnsignedShort(); // Undocumented
|
||||
|
||||
mColor1 = pInput.readUnsignedShort();
|
||||
mColor2 = pInput.readUnsignedShort();
|
||||
mColor3 = pInput.readUnsignedShort();
|
||||
mColor4 = pInput.readUnsignedShort();
|
||||
|
||||
mOpacity = pInput.readUnsignedShort();
|
||||
mOpacity = pInput.readUnsignedShort(); // 0-100
|
||||
|
||||
mKind = pInput.readUnsignedByte();
|
||||
mKind = pInput.readUnsignedByte(); // 0: Selected (ie inverted), 1: Color protected, 128: Use value stored per layer
|
||||
|
||||
// TODO: Variable: Filler zeros
|
||||
|
||||
pInput.readByte(); // Pad
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
package com.twelvemonkeys.imageio.plugins.psd;
|
||||
|
||||
import com.twelvemonkeys.imageio.metadata.iptc.IPTCReader;
|
||||
import com.twelvemonkeys.lang.StringUtil;
|
||||
|
||||
import javax.imageio.IIOException;
|
||||
@ -7,8 +8,13 @@ import javax.imageio.stream.ImageInputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.CharBuffer;
|
||||
import java.nio.charset.*;
|
||||
import java.util.*;
|
||||
import java.nio.charset.CharacterCodingException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.CharsetDecoder;
|
||||
import java.nio.charset.CodingErrorAction;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* PSDIPTCData
|
||||
@ -20,7 +26,7 @@ import java.util.*;
|
||||
final class PSDIPTCData extends PSDImageResource {
|
||||
// TODO: Refactor to be more like PSDEXIF1Data...
|
||||
// TODO: Extract IPTC/EXIF/XMP metadata extraction/parsing to separate module(s)
|
||||
Directory mDirectory;
|
||||
com.twelvemonkeys.imageio.metadata.Directory mDirectory;
|
||||
|
||||
PSDIPTCData(final short pId, final ImageInputStream pInput) throws IOException {
|
||||
super(pId, pInput);
|
||||
@ -28,7 +34,8 @@ final class PSDIPTCData extends PSDImageResource {
|
||||
|
||||
@Override
|
||||
protected void readData(final ImageInputStream pInput) throws IOException {
|
||||
mDirectory = Directory.read(pInput, mSize);
|
||||
// Read IPTC directory
|
||||
mDirectory = new IPTCReader().read(pInput);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -40,17 +47,37 @@ final class PSDIPTCData extends PSDImageResource {
|
||||
}
|
||||
|
||||
static class Entry {
|
||||
private int mTagId;
|
||||
private String mValue;
|
||||
final int mTagId;
|
||||
private Object mValue;
|
||||
|
||||
public Entry(int pTagId, String pValue) {
|
||||
public Entry(final int pTagId, final Object pValue) {
|
||||
mTagId = pTagId;
|
||||
mValue = pValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return (mTagId >> 8) + ":" + (mTagId & 0xff) + ": " + mValue;
|
||||
return String.format("%d:%d: %s", mTagId >> 8, mTagId & 0xff, mValue);
|
||||
}
|
||||
|
||||
public final String getTypeName() {
|
||||
// TODO: Should this really look like EXIF?
|
||||
if (mTagId == IPTC.TAG_RECORD_VERSION) {
|
||||
return "SHORT";
|
||||
}
|
||||
else if (mValue instanceof String) {
|
||||
return "ASCII";
|
||||
}
|
||||
|
||||
return "Unknown type";
|
||||
}
|
||||
|
||||
public final String getValueAsString() {
|
||||
return String.valueOf(mValue);
|
||||
}
|
||||
|
||||
public final Object getValue() {
|
||||
return mValue;
|
||||
}
|
||||
}
|
||||
|
||||
@ -60,6 +87,7 @@ final class PSDIPTCData extends PSDImageResource {
|
||||
private static final int ENCODING_UTF_8 = 0x1b2547;
|
||||
|
||||
private int mEncoding = ENCODING_UNSPECIFIED;
|
||||
|
||||
final List<Entry> mEntries = new ArrayList<Entry>();
|
||||
|
||||
private Directory() {}
|
||||
@ -69,6 +97,16 @@ final class PSDIPTCData extends PSDImageResource {
|
||||
return "Directory" + mEntries.toString();
|
||||
}
|
||||
|
||||
public Entry get(int pTagId) {
|
||||
for (Entry entry : mEntries) {
|
||||
if (entry.mTagId == pTagId) {
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public Iterator<Entry> iterator() {
|
||||
return mEntries.iterator();
|
||||
}
|
||||
@ -81,43 +119,33 @@ final class PSDIPTCData extends PSDImageResource {
|
||||
// For each tag
|
||||
while (pInput.getStreamPosition() < streamEnd) {
|
||||
// Identifies start of a tag
|
||||
byte b = pInput.readByte();
|
||||
if (b != 0x1c) {
|
||||
throw new IIOException("Corrupt IPTC stream segment");
|
||||
byte marker = pInput.readByte();
|
||||
|
||||
if (marker != 0x1c) {
|
||||
throw new IIOException(String.format("Corrupt IPTC stream segment, found 0x%02x (expected 0x1c)", marker));
|
||||
}
|
||||
|
||||
// We need at least four bytes left to read a tag
|
||||
if (pInput.getStreamPosition() + 4 >= streamEnd) {
|
||||
break;
|
||||
}
|
||||
|
||||
int directoryType = pInput.readUnsignedByte();
|
||||
int tagType = pInput.readUnsignedByte();
|
||||
int tagId = pInput.readShort();
|
||||
int tagByteCount = pInput.readUnsignedShort();
|
||||
|
||||
if (pInput.getStreamPosition() + tagByteCount > streamEnd) {
|
||||
throw new IIOException("Data for tag extends beyond end of IPTC segment: " + (tagByteCount + pInput.getStreamPosition() - streamEnd));
|
||||
}
|
||||
|
||||
directory.processTag(pInput, directoryType, tagType, tagByteCount);
|
||||
directory.readEntry(pInput, tagId, tagByteCount);
|
||||
}
|
||||
|
||||
return directory;
|
||||
}
|
||||
|
||||
private void processTag(ImageInputStream pInput, int directoryType, int tagType, int tagByteCount) throws IOException {
|
||||
int tagIdentifier = (directoryType << 8) | tagType;
|
||||
private void readEntry(final ImageInputStream pInput, final int pTagId, final int pLength) throws IOException {
|
||||
Object value = null;
|
||||
|
||||
String str = null;
|
||||
switch (tagIdentifier) {
|
||||
switch (pTagId) {
|
||||
case IPTC.TAG_CODED_CHARACTER_SET:
|
||||
// TODO: Use this encoding!?
|
||||
// TODO: Mapping from ISO 646 to Java supported character sets?
|
||||
// TODO: Move somewhere else?
|
||||
mEncoding = parseEncoding(pInput, tagByteCount);
|
||||
mEncoding = parseEncoding(pInput, pLength);
|
||||
return;
|
||||
case IPTC.TAG_RECORD_VERSION:
|
||||
// short
|
||||
str = Integer.toString(pInput.readUnsignedShort());
|
||||
// A single unsigned short value
|
||||
value = pInput.readUnsignedShort();
|
||||
break;
|
||||
// case IPTC.TAG_RELEASE_DATE:
|
||||
// case IPTC.TAG_EXPIRATION_DATE:
|
||||
@ -144,50 +172,26 @@ final class PSDIPTCData extends PSDImageResource {
|
||||
// }
|
||||
//
|
||||
default:
|
||||
// Skip non-Application fields, as they are typically not human readable
|
||||
if ((pTagId & 0xff00) != IPTC.APPLICATION_RECORD) {
|
||||
pInput.skipBytes(pLength);
|
||||
return;
|
||||
}
|
||||
|
||||
// fall through
|
||||
}
|
||||
|
||||
// Skip non-Application fields, as they are typically not human readable
|
||||
if (directoryType << 8 != IPTC.APPLICATION_RECORD) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If we don't have a value, treat it as a string
|
||||
if (str == null) {
|
||||
if (tagByteCount < 1) {
|
||||
str = "(No value)";
|
||||
if (value == null) {
|
||||
if (pLength < 1) {
|
||||
value = "(No value)";
|
||||
}
|
||||
else {
|
||||
str = String.format("\"%s\"", parseString(pInput, tagByteCount));
|
||||
value = parseString(pInput, pLength);
|
||||
}
|
||||
}
|
||||
|
||||
mEntries.add(new Entry(tagIdentifier, str));
|
||||
|
||||
// if (directory.containsTag(tagIdentifier)) {
|
||||
// // TODO: Does that REALLY help for performance?!
|
||||
// // this fancy string[] business avoids using an ArrayList for performance reasons
|
||||
// String[] oldStrings;
|
||||
// String[] newStrings;
|
||||
// try {
|
||||
// oldStrings = directory.getStringArray(tagIdentifier);
|
||||
// }
|
||||
// catch (MetadataException e) {
|
||||
// oldStrings = null;
|
||||
// }
|
||||
// if (oldStrings == null) {
|
||||
// newStrings = new String[1];
|
||||
// }
|
||||
// else {
|
||||
// newStrings = new String[oldStrings.length + 1];
|
||||
// System.arraycopy(oldStrings, 0, newStrings, 0, oldStrings.length);
|
||||
// }
|
||||
// newStrings[newStrings.length - 1] = str;
|
||||
// directory.setStringArray(tagIdentifier, newStrings);
|
||||
// }
|
||||
// else {
|
||||
// directory.setString(tagIdentifier, str);
|
||||
// }
|
||||
mEntries.add(new Entry(pTagId, value));
|
||||
}
|
||||
|
||||
// private Date getDateForTime(final Directory directory, final int tagIdentifier) {
|
||||
@ -267,22 +271,23 @@ final class PSDIPTCData extends PSDImageResource {
|
||||
// }
|
||||
|
||||
// TODO: Pass encoding as parameter? Use if specified
|
||||
private String parseString(final ImageInputStream pInput, int length) throws IOException {
|
||||
// NOTE: The IPTC "spec" says ISO 646 or ISO 2022 encoding. UTF-8 contains all 646 characters, but not 2022.
|
||||
private String parseString(final ImageInputStream pInput, final int pLength) throws IOException {
|
||||
byte[] data = new byte[pLength];
|
||||
pInput.readFully(data);
|
||||
|
||||
// NOTE: The IPTC specification says character data should use ISO 646 or ISO 2022 encoding.
|
||||
// UTF-8 contains all 646 characters, but not 2022.
|
||||
// This is however close to what libiptcdata does, see: http://libiptcdata.sourceforge.net/docs/iptc-i18n.html
|
||||
// First try to decode using UTF-8 (which seems to be the de-facto standard)
|
||||
String str;
|
||||
Charset charset = Charset.forName("UTF-8");
|
||||
CharsetDecoder decoder = charset.newDecoder();
|
||||
CharBuffer chars;
|
||||
byte[] data = new byte[length];
|
||||
pInput.readFully(data);
|
||||
|
||||
try {
|
||||
// First try to decode using UTF-8 (which seems to be the de-facto standard)
|
||||
// Will fail fast on illegal UTF-8-sequences
|
||||
chars = decoder.onMalformedInput(CodingErrorAction.REPORT)
|
||||
CharBuffer chars = decoder.onMalformedInput(CodingErrorAction.REPORT)
|
||||
.onUnmappableCharacter(CodingErrorAction.REPORT)
|
||||
.decode(ByteBuffer.wrap(data));
|
||||
str = chars.toString();
|
||||
return chars.toString();
|
||||
}
|
||||
catch (CharacterCodingException notUTF8) {
|
||||
if (mEncoding == ENCODING_UTF_8) {
|
||||
@ -291,10 +296,8 @@ final class PSDIPTCData extends PSDImageResource {
|
||||
|
||||
// Fall back to use ISO-8859-1
|
||||
// This will not fail, but may may create wrong fallback-characters
|
||||
str = StringUtil.decode(data, 0, data.length, "ISO8859_1");
|
||||
return StringUtil.decode(data, 0, data.length, "ISO8859_1");
|
||||
}
|
||||
|
||||
return str;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -62,7 +62,9 @@ import java.util.List;
|
||||
* @version $Id: PSDImageReader.java,v 1.0 Apr 29, 2008 4:45:52 PM haraldk Exp$
|
||||
*/
|
||||
// TODO: Implement ImageIO meta data interface
|
||||
// TODO: API for reading separate layers
|
||||
// TODO: Allow reading the extra alpha channels (index after composite data)
|
||||
// TODO: Support for PSDVersionInfo hasRealMergedData=false (no real composite data, layers will be in index 0)
|
||||
// TODO: Support for API for reading separate layers (index after composite data, and optional alpha channels)
|
||||
// TODO: Consider Romain Guy's Java 2D implementation of PS filters for the blending modes in layers
|
||||
// http://www.curious-creature.org/2006/09/20/new-blendings-modes-for-java2d/
|
||||
// See http://www.codeproject.com/KB/graphics/PSDParser.aspx
|
||||
@ -1144,11 +1146,12 @@ public class PSDImageReader extends ImageReaderBase {
|
||||
|
||||
node = metadata.getAsTree(IIOMetadataFormatImpl.standardMetadataFormatName);
|
||||
serializer = new XMLSerializer(System.out, System.getProperty("file.encoding"));
|
||||
serializer.setIndentation(" ");
|
||||
serializer.serialize(node, true);
|
||||
System.out.println();
|
||||
|
||||
node = metadata.getAsTree(PSDMetadata.NATIVE_METADATA_FORMAT_NAME);
|
||||
serializer = new XMLSerializer(System.out, System.getProperty("file.encoding"));
|
||||
// serializer = new XMLSerializer(System.out, System.getProperty("file.encoding"));
|
||||
serializer.serialize(node, true);
|
||||
|
||||
if (imageReader.hasThumbnails(0)) {
|
||||
|
@ -28,6 +28,7 @@
|
||||
|
||||
package com.twelvemonkeys.imageio.plugins.psd;
|
||||
|
||||
import com.twelvemonkeys.imageio.stream.SubImageInputStream;
|
||||
import com.twelvemonkeys.lang.StringUtil;
|
||||
|
||||
import javax.imageio.stream.ImageInputStream;
|
||||
@ -62,11 +63,16 @@ class PSDImageResource {
|
||||
}
|
||||
|
||||
mSize = pInput.readUnsignedInt();
|
||||
readData(pInput);
|
||||
long startPos = pInput.getStreamPosition();
|
||||
|
||||
// TODO: Sanity check reading here?
|
||||
readData(new SubImageInputStream(pInput, mSize));
|
||||
|
||||
// Data is even-padded
|
||||
// NOTE: This should never happen, however it's safer to keep it here to
|
||||
if (pInput.getStreamPosition() != startPos + mSize) {
|
||||
pInput.seek(startPos + mSize);
|
||||
}
|
||||
|
||||
// Data is even-padded (word aligned)
|
||||
if (mSize % 2 != 0) {
|
||||
pInput.read();
|
||||
}
|
||||
|
@ -1,14 +1,13 @@
|
||||
package com.twelvemonkeys.imageio.plugins.psd;
|
||||
|
||||
import com.twelvemonkeys.imageio.metadata.Directory;
|
||||
import com.twelvemonkeys.imageio.metadata.Entry;
|
||||
import com.twelvemonkeys.lang.StringUtil;
|
||||
import com.twelvemonkeys.util.FilterIterator;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Node;
|
||||
import org.xml.sax.InputSource;
|
||||
|
||||
import javax.imageio.metadata.IIOInvalidTreeException;
|
||||
import javax.imageio.metadata.IIOMetadata;
|
||||
import javax.imageio.metadata.IIOMetadataFormatImpl;
|
||||
import javax.imageio.metadata.IIOMetadataNode;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
@ -24,7 +23,7 @@ import java.util.List;
|
||||
* @author last modified by $Author: haraldk$
|
||||
* @version $Id: PSDMetadata.java,v 1.0 Nov 4, 2009 5:28:12 PM haraldk Exp$
|
||||
*/
|
||||
public final class PSDMetadata extends IIOMetadata implements Cloneable {
|
||||
public final class PSDMetadata extends AbstractMetadata {
|
||||
|
||||
// TODO: Decide on image/stream metadata...
|
||||
static final String NATIVE_METADATA_FORMAT_NAME = "com_twelvemonkeys_imageio_psd_image_1.0";
|
||||
@ -60,98 +59,15 @@ public final class PSDMetadata extends IIOMetadata implements Cloneable {
|
||||
|
||||
static final String[] PRINT_SCALE_STYLES = {"centered", "scaleToFit", "userDefined"};
|
||||
|
||||
|
||||
protected PSDMetadata() {
|
||||
// TODO: Allow XMP, EXIF and IPTC as extra formats?
|
||||
super(true, NATIVE_METADATA_FORMAT_NAME, NATIVE_METADATA_FORMAT_CLASS_NAME, null, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isReadOnly() {
|
||||
// TODO: Extract to abstract metadata impl class?
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Node getAsTree(final String pFormatName) {
|
||||
validateFormatName(pFormatName);
|
||||
|
||||
if (pFormatName.equals(nativeMetadataFormatName)) {
|
||||
return getNativeTree();
|
||||
}
|
||||
else if (pFormatName.equals(IIOMetadataFormatImpl.standardMetadataFormatName)) {
|
||||
return getStandardTree();
|
||||
}
|
||||
|
||||
throw new AssertionError("Unreachable");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void mergeTree(final String pFormatName, final Node pRoot) throws IIOInvalidTreeException {
|
||||
// TODO: Extract to abstract metadata impl class?
|
||||
assertMutable();
|
||||
|
||||
validateFormatName(pFormatName);
|
||||
|
||||
if (!pRoot.getNodeName().equals(nativeMetadataFormatName)) {
|
||||
throw new IIOInvalidTreeException("Root must be " + nativeMetadataFormatName, pRoot);
|
||||
}
|
||||
|
||||
Node node = pRoot.getFirstChild();
|
||||
while (node != null) {
|
||||
// TODO: Merge values from node into this
|
||||
|
||||
// Move to the next sibling
|
||||
node = node.getNextSibling();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reset() {
|
||||
// TODO: Extract to abstract metadata impl class?
|
||||
assertMutable();
|
||||
|
||||
throw new UnsupportedOperationException("Method reset not implemented"); // TODO: Implement
|
||||
}
|
||||
|
||||
// TODO: Extract to abstract metadata impl class?
|
||||
private void assertMutable() {
|
||||
if (isReadOnly()) {
|
||||
throw new IllegalStateException("Metadata is read-only");
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Extract to abstract metadata impl class?
|
||||
private void validateFormatName(final String pFormatName) {
|
||||
String[] metadataFormatNames = getMetadataFormatNames();
|
||||
|
||||
if (metadataFormatNames != null) {
|
||||
for (String metadataFormatName : metadataFormatNames) {
|
||||
if (metadataFormatName.equals(pFormatName)) {
|
||||
return; // Found, we're ok!
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(
|
||||
String.format("Bad format name: \"%s\". Expected one of %s", pFormatName, Arrays.toString(metadataFormatNames))
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object clone() {
|
||||
// TODO: Make it a deep clone
|
||||
try {
|
||||
return super.clone();
|
||||
}
|
||||
catch (CloneNotSupportedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
/// Native format support
|
||||
|
||||
private Node getNativeTree() {
|
||||
@Override
|
||||
protected Node getNativeTree() {
|
||||
IIOMetadataNode root = new IIOMetadataNode(NATIVE_METADATA_FORMAT_NAME);
|
||||
|
||||
root.appendChild(createHeaderNode());
|
||||
@ -195,6 +111,18 @@ public final class PSDMetadata extends IIOMetadata implements Cloneable {
|
||||
// TODO: Format spec
|
||||
node = new IIOMetadataNode("ICCProfile");
|
||||
node.setAttribute("colorSpaceType", JAVA_CS[profile.getProfile().getColorSpaceType()]);
|
||||
//
|
||||
// FastByteArrayOutputStream data = new FastByteArrayOutputStream(0);
|
||||
// EncoderStream base64 = new EncoderStream(data, new Base64Encoder(), true);
|
||||
//
|
||||
// try {
|
||||
// base64.write(profile.getProfile().getData());
|
||||
// }
|
||||
// catch (IOException ignore) {
|
||||
// }
|
||||
//
|
||||
// byte[] bytes = data.toByteArray();
|
||||
// node.setAttribute("data", StringUtil.decode(bytes, 0, bytes.length, "ASCII"));
|
||||
node.setUserObject(profile.getProfile());
|
||||
}
|
||||
else if (imageResource instanceof PSDAlphaChannelInfo) {
|
||||
@ -215,10 +143,12 @@ public final class PSDMetadata extends IIOMetadata implements Cloneable {
|
||||
node.setAttribute("colorSpace", DISPLAY_INFO_CS[displayInfo.mColorSpace]);
|
||||
|
||||
StringBuilder builder = new StringBuilder();
|
||||
|
||||
for (short color : displayInfo.mColors) {
|
||||
if (builder.length() > 0) {
|
||||
builder.append(" ");
|
||||
}
|
||||
|
||||
builder.append(Integer.toString(color));
|
||||
}
|
||||
|
||||
@ -324,30 +254,65 @@ public final class PSDMetadata extends IIOMetadata implements Cloneable {
|
||||
// Transcode to XMP? ;-)
|
||||
PSDIPTCData iptc = (PSDIPTCData) imageResource;
|
||||
|
||||
node = new IIOMetadataNode("IPTC");
|
||||
node = new IIOMetadataNode("Directory");
|
||||
node.setAttribute("type", "IPTC");
|
||||
node.setUserObject(iptc.mDirectory);
|
||||
|
||||
for (Entry entry : iptc.mDirectory) {
|
||||
IIOMetadataNode tag = new IIOMetadataNode("Entry");
|
||||
tag.setAttribute("tag", String.format("%d:%02d", (Integer) entry.getIdentifier() >> 8, (Integer) entry.getIdentifier() & 0xff));
|
||||
|
||||
String field = entry.getFieldName();
|
||||
if (field != null) {
|
||||
tag.setAttribute("field", String.format("%s", field));
|
||||
}
|
||||
tag.setAttribute("value", entry.getValueAsString());
|
||||
|
||||
String type = entry.getTypeName();
|
||||
if (type != null) {
|
||||
tag.setAttribute("type", type);
|
||||
}
|
||||
node.appendChild(tag);
|
||||
}
|
||||
}
|
||||
else if (imageResource instanceof PSDEXIF1Data) {
|
||||
// TODO: Revise/rethink this...
|
||||
// Transcode to XMP? ;-)
|
||||
PSDEXIF1Data exif = (PSDEXIF1Data) imageResource;
|
||||
|
||||
node = new IIOMetadataNode("EXIF");
|
||||
node = new IIOMetadataNode("Directory");
|
||||
node.setAttribute("type", "EXIF");
|
||||
// TODO: Set byte[] data instead
|
||||
node.setUserObject(exif.mDirectory);
|
||||
|
||||
appendEntries(node, exif.mDirectory);
|
||||
}
|
||||
else if (imageResource instanceof PSDXMPData) {
|
||||
// TODO: Revise/rethink this... Would it be possible to parse XMP as IIOMetadataNodes? Or is that just stupid...
|
||||
// Or maybe use the Directory approach used by IPTC and EXIF..
|
||||
PSDXMPData xmp = (PSDXMPData) imageResource;
|
||||
|
||||
node = new IIOMetadataNode("XMP");
|
||||
|
||||
try {
|
||||
// BufferedReader reader = new BufferedReader(xmp.getData());
|
||||
// String line;
|
||||
// while ((line = reader.readLine()) != null) {
|
||||
// System.out.println(line);
|
||||
// }
|
||||
//
|
||||
DocumentBuilder builder;
|
||||
Document document;
|
||||
|
||||
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
|
||||
DocumentBuilder builder = factory.newDocumentBuilder();
|
||||
Document document = builder.parse(new InputSource(xmp.getData()));
|
||||
factory.setNamespaceAware(true);
|
||||
builder = factory.newDocumentBuilder();
|
||||
document = builder.parse(new InputSource(xmp.getData()));
|
||||
|
||||
|
||||
// Set the entire XMP document as user data
|
||||
node.setUserObject(document);
|
||||
// node.appendChild(document.getFirstChild());
|
||||
}
|
||||
catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
@ -355,7 +320,13 @@ public final class PSDMetadata extends IIOMetadata implements Cloneable {
|
||||
}
|
||||
else {
|
||||
// Generic resource..
|
||||
node = new IIOMetadataNode(PSDImageResource.resourceTypeForId(imageResource.mId));
|
||||
node = new IIOMetadataNode("ImageResource");
|
||||
String value = PSDImageResource.resourceTypeForId(imageResource.mId);
|
||||
if (!"UnknownResource".equals(value)) {
|
||||
node.setAttribute("name", value);
|
||||
}
|
||||
node.setAttribute("length", String.valueOf(imageResource.mSize));
|
||||
// TODO: Set user object: byte array
|
||||
}
|
||||
|
||||
// TODO: More resources
|
||||
@ -364,9 +335,36 @@ public final class PSDMetadata extends IIOMetadata implements Cloneable {
|
||||
resource.appendChild(node);
|
||||
}
|
||||
|
||||
// TODO: Layers and layer info
|
||||
|
||||
// TODO: Global mask etc..
|
||||
|
||||
return resource;
|
||||
}
|
||||
|
||||
private void appendEntries(IIOMetadataNode pNode, final Directory pDirectory) {
|
||||
for (Entry entry : pDirectory) {
|
||||
IIOMetadataNode tag = new IIOMetadataNode("Entry");
|
||||
tag.setAttribute("tag", String.format("%s", entry.getIdentifier()));
|
||||
|
||||
String field = entry.getFieldName();
|
||||
if (field != null) {
|
||||
tag.setAttribute("field", String.format("%s", field));
|
||||
}
|
||||
|
||||
if (entry.getValue() instanceof Directory) {
|
||||
appendEntries(tag, (Directory) entry.getValue());
|
||||
tag.setAttribute("type", "Directory");
|
||||
}
|
||||
else {
|
||||
tag.setAttribute("value", entry.getValueAsString());
|
||||
tag.setAttribute("type", entry.getTypeName());
|
||||
}
|
||||
|
||||
pNode.appendChild(tag);
|
||||
}
|
||||
}
|
||||
|
||||
/// Standard format support
|
||||
|
||||
@Override
|
||||
@ -461,7 +459,7 @@ public final class PSDMetadata extends IIOMetadata implements Cloneable {
|
||||
|
||||
private String getMultiChannelCS(short pChannels) {
|
||||
if (pChannels < 16) {
|
||||
return Integer.toHexString(pChannels) + "CLR";
|
||||
return String.format("%xCLR", pChannels);
|
||||
}
|
||||
|
||||
throw new UnsupportedOperationException("Standard meta data format does not support more than 15 channels");
|
||||
@ -469,88 +467,101 @@ public final class PSDMetadata extends IIOMetadata implements Cloneable {
|
||||
|
||||
@Override
|
||||
protected IIOMetadataNode getStandardCompressionNode() {
|
||||
IIOMetadataNode compression_node = new IIOMetadataNode("Compression");
|
||||
IIOMetadataNode compressionNode = new IIOMetadataNode("Compression");
|
||||
IIOMetadataNode node; // scratch node
|
||||
|
||||
node = new IIOMetadataNode("CompressionTypeName");
|
||||
String compression;
|
||||
|
||||
switch (mCompression) {
|
||||
case PSD.COMPRESSION_NONE:
|
||||
compression = "none";
|
||||
break;
|
||||
case PSD.COMPRESSION_RLE:
|
||||
compression = "packbits";
|
||||
compression = "PackBits";
|
||||
break;
|
||||
case PSD.COMPRESSION_ZIP:
|
||||
case PSD.COMPRESSION_ZIP_PREDICTION:
|
||||
compression = "zip";
|
||||
compression = "Deflate"; // TODO: ZLib? (TIFF native metadata format specifies both.. :-P)
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Unreachable");
|
||||
}
|
||||
node.setAttribute("value", compression);
|
||||
compression_node.appendChild(node);
|
||||
|
||||
node.setAttribute("value", compression);
|
||||
compressionNode.appendChild(node);
|
||||
|
||||
// TODO: Does it make sense to specify lossless for compression "none"?
|
||||
node = new IIOMetadataNode("Lossless");
|
||||
node.setAttribute("value", "true");
|
||||
compression_node.appendChild(node);
|
||||
compressionNode.appendChild(node);
|
||||
|
||||
return compression_node;
|
||||
return compressionNode;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IIOMetadataNode getStandardDataNode() {
|
||||
IIOMetadataNode data_node = new IIOMetadataNode("Data");
|
||||
IIOMetadataNode dataNode = new IIOMetadataNode("Data");
|
||||
IIOMetadataNode node; // scratch node
|
||||
|
||||
node = new IIOMetadataNode("PlanarConfiguration");
|
||||
node.setAttribute("value", "PlaneInterleaved"); // TODO: Check with spec
|
||||
data_node.appendChild(node);
|
||||
dataNode.appendChild(node);
|
||||
|
||||
node = new IIOMetadataNode("SampleFormat");
|
||||
node.setAttribute("value", mHeader.mMode == PSD.COLOR_MODE_INDEXED ? "Index" : "UnsignedIntegral");
|
||||
data_node.appendChild(node);
|
||||
dataNode.appendChild(node);
|
||||
|
||||
String bitDepth = Integer.toString(mHeader.mBits); // bits per plane
|
||||
|
||||
// TODO: Channels might be 5 for RGB + A + Mask...
|
||||
String[] bps = new String[mHeader.mChannels];
|
||||
Arrays.fill(bps, bitDepth);
|
||||
|
||||
node = new IIOMetadataNode("BitsPerSample");
|
||||
node.setAttribute("value", StringUtil.toCSVString(bps, " "));
|
||||
data_node.appendChild(node);
|
||||
dataNode.appendChild(node);
|
||||
|
||||
// TODO: SampleMSB? Or is network (aka Motorola/big endian) byte order assumed?
|
||||
|
||||
return data_node;
|
||||
return dataNode;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IIOMetadataNode getStandardDimensionNode() {
|
||||
IIOMetadataNode dimension_node = new IIOMetadataNode("Dimension");
|
||||
IIOMetadataNode dimensionNode = new IIOMetadataNode("Dimension");
|
||||
IIOMetadataNode node; // scratch node
|
||||
|
||||
node = new IIOMetadataNode("PixelAspectRatio");
|
||||
// TODO: This is not incorrect wrt resolution info
|
||||
float ratio = 1f;
|
||||
node.setAttribute("value", Float.toString(ratio));
|
||||
dimension_node.appendChild(node);
|
||||
|
||||
// TODO: This is not correct wrt resolution info
|
||||
float aspect = 1f;
|
||||
|
||||
Iterator<PSDPixelAspectRatio> ratios = getResources(PSDPixelAspectRatio.class);
|
||||
if (ratios.hasNext()) {
|
||||
PSDPixelAspectRatio ratio = ratios.next();
|
||||
aspect = (float) ratio.mAspect;
|
||||
}
|
||||
|
||||
node.setAttribute("value", Float.toString(aspect));
|
||||
dimensionNode.appendChild(node);
|
||||
|
||||
node = new IIOMetadataNode("ImageOrientation");
|
||||
node.setAttribute("value", "Normal");
|
||||
dimension_node.appendChild(node);
|
||||
dimensionNode.appendChild(node);
|
||||
|
||||
// TODO: If no PSDResolutionInfo, this might still be available in the EXIF data...
|
||||
Iterator<PSDResolutionInfo> resolutionInfos = getResources(PSDResolutionInfo.class);
|
||||
if (!resolutionInfos.hasNext()) {
|
||||
PSDResolutionInfo resolutionInfo = resolutionInfos.next();
|
||||
|
||||
node = new IIOMetadataNode("HorizontalPixelSize");
|
||||
node.setAttribute("value", Float.toString(asMM(resolutionInfo.mHResUnit, resolutionInfo.mHRes)));
|
||||
dimension_node.appendChild(node);
|
||||
dimensionNode.appendChild(node);
|
||||
|
||||
node = new IIOMetadataNode("VerticalPixelSize");
|
||||
node.setAttribute("value", Float.toString(asMM(resolutionInfo.mVResUnit, resolutionInfo.mVRes)));
|
||||
dimension_node.appendChild(node);
|
||||
dimensionNode.appendChild(node);
|
||||
}
|
||||
|
||||
// TODO:
|
||||
@ -580,7 +591,7 @@ public final class PSDMetadata extends IIOMetadata implements Cloneable {
|
||||
<!-- Data type: Integer -->
|
||||
|
||||
*/
|
||||
return dimension_node;
|
||||
return dimensionNode;
|
||||
}
|
||||
|
||||
private static float asMM(final short pUnit, final float pResolution) {
|
||||
@ -603,18 +614,18 @@ public final class PSDMetadata extends IIOMetadata implements Cloneable {
|
||||
PSDEXIF1Data data = exif.next();
|
||||
|
||||
// Get the EXIF DateTime (aka ModifyDate) tag if present
|
||||
PSDEXIF1Data.Entry dateTime = data.mDirectory.get(0x0132); // TODO: Constant
|
||||
Entry dateTime = data.mDirectory.getEntryById(0x0132); // TODO: Constant
|
||||
if (dateTime != null) {
|
||||
node = new IIOMetadataNode("ImageModificationTime");
|
||||
// Format: "YYYY:MM:DD hh:mm:ss" (with quotes! :-P)
|
||||
node = new IIOMetadataNode("ImageCreationTime"); // As TIFF, but could just as well be ImageModificationTime
|
||||
// Format: "YYYY:MM:DD hh:mm:ss"
|
||||
String value = dateTime.getValueAsString();
|
||||
|
||||
node.setAttribute("year", value.substring(1, 5));
|
||||
node.setAttribute("month", value.substring(6, 8));
|
||||
node.setAttribute("day", value.substring(9, 11));
|
||||
node.setAttribute("hour", value.substring(12, 14));
|
||||
node.setAttribute("minute", value.substring(15, 17));
|
||||
node.setAttribute("second", value.substring(18, 20));
|
||||
node.setAttribute("year", value.substring(0, 4));
|
||||
node.setAttribute("month", value.substring(5, 7));
|
||||
node.setAttribute("day", value.substring(8, 10));
|
||||
node.setAttribute("hour", value.substring(11, 13));
|
||||
node.setAttribute("minute", value.substring(14, 16));
|
||||
node.setAttribute("second", value.substring(17, 19));
|
||||
|
||||
document_node.appendChild(node);
|
||||
}
|
||||
@ -625,61 +636,68 @@ public final class PSDMetadata extends IIOMetadata implements Cloneable {
|
||||
|
||||
@Override
|
||||
protected IIOMetadataNode getStandardTextNode() {
|
||||
// TODO: CaptionDigest?, EXIF, XMP
|
||||
// TODO: TIFF uses
|
||||
// DocumentName, ImageDescription, Make, Model, PageName, Software, Artist, HostComputer, InkNames, Copyright:
|
||||
// /Text/TextEntry@keyword = field name, /Text/TextEntry@value = field value.
|
||||
// Example: TIFF Software field => /Text/TextEntry@keyword = "Software",
|
||||
// /Text/TextEntry@value = Name and version number of the software package(s) used to create the image.
|
||||
|
||||
Iterator<PSDImageResource> textResources = getResources(PSDEXIF1Data.class, PSDXMPData.class);
|
||||
Iterator<PSDImageResource> textResources = getResources(PSDEXIF1Data.class, PSDIPTCData.class, PSDXMPData.class);
|
||||
|
||||
if (!textResources.hasNext()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
IIOMetadataNode text = new IIOMetadataNode("Text");
|
||||
IIOMetadataNode node;
|
||||
|
||||
// TODO: Alpha channel names? (PSDAlphaChannelInfo/PSDUnicodeAlphaNames)
|
||||
// TODO: Reader/writer (PSDVersionInfo)
|
||||
|
||||
while (textResources.hasNext()) {
|
||||
PSDImageResource textResource = textResources.next();
|
||||
|
||||
if (textResource instanceof PSDIPTCData) {
|
||||
PSDIPTCData iptc = (PSDIPTCData) textResource;
|
||||
|
||||
for (Entry entry : iptc.mDirectory) {
|
||||
node = new IIOMetadataNode("TextEntry");
|
||||
|
||||
if (entry.getValue() instanceof String) {
|
||||
node.setAttribute("keyword", String.format("%s", entry.getFieldName()));
|
||||
node.setAttribute("value", entry.getValueAsString());
|
||||
text.appendChild(node);
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (textResource instanceof PSDEXIF1Data) {
|
||||
PSDEXIF1Data exif = (PSDEXIF1Data) textResource;
|
||||
|
||||
// TODO: Use name?
|
||||
appendTextEntriesFlat(text, exif.mDirectory);
|
||||
}
|
||||
else if (textResource instanceof PSDXMPData) {
|
||||
// TODO: Parse XMP (heavy) ONLY if we don't have required fields from IPTC/EXIF?
|
||||
PSDXMPData xmp = (PSDXMPData) textResource;
|
||||
}
|
||||
}
|
||||
|
||||
// int numEntries = tEXt_keyword.size() +
|
||||
// iTXt_keyword.size() + zTXt_keyword.size();
|
||||
// if (numEntries == 0) {
|
||||
// return null;
|
||||
// }
|
||||
//
|
||||
// IIOMetadataNode text_node = new IIOMetadataNode("Text");
|
||||
// IIOMetadataNode node = null; // scratch node
|
||||
//
|
||||
// for (int i = 0; i < tEXt_keyword.size(); i++) {
|
||||
// node = new IIOMetadataNode("TextEntry");
|
||||
// node.setAttribute("keyword", (String)tEXt_keyword.get(i));
|
||||
// node.setAttribute("value", (String)tEXt_text.get(i));
|
||||
// node.setAttribute("encoding", "ISO-8859-1");
|
||||
// node.setAttribute("compression", "none");
|
||||
//
|
||||
// text_node.appendChild(node);
|
||||
// }
|
||||
//
|
||||
// for (int i = 0; i < iTXt_keyword.size(); i++) {
|
||||
// node = new IIOMetadataNode("TextEntry");
|
||||
// node.setAttribute("keyword", iTXt_keyword.get(i));
|
||||
// node.setAttribute("value", iTXt_text.get(i));
|
||||
// node.setAttribute("language",
|
||||
// iTXt_languageTag.get(i));
|
||||
// if (iTXt_compressionFlag.get(i)) {
|
||||
// node.setAttribute("compression", "deflate");
|
||||
// } else {
|
||||
// node.setAttribute("compression", "none");
|
||||
// }
|
||||
//
|
||||
// text_node.appendChild(node);
|
||||
// }
|
||||
//
|
||||
// for (int i = 0; i < zTXt_keyword.size(); i++) {
|
||||
// node = new IIOMetadataNode("TextEntry");
|
||||
// node.setAttribute("keyword", (String)zTXt_keyword.get(i));
|
||||
// node.setAttribute("value", (String)zTXt_text.get(i));
|
||||
// node.setAttribute("compression", "deflate");
|
||||
//
|
||||
// text_node.appendChild(node);
|
||||
// }
|
||||
//
|
||||
// return text_node;
|
||||
return null;
|
||||
return text;
|
||||
}
|
||||
|
||||
private void appendTextEntriesFlat(IIOMetadataNode pNode, Directory pDirectory) {
|
||||
for (Entry entry : pDirectory) {
|
||||
if (entry.getValue() instanceof Directory) {
|
||||
appendTextEntriesFlat(pNode, (Directory) entry.getValue());
|
||||
}
|
||||
else if (entry.getValue() instanceof String) {
|
||||
IIOMetadataNode tag = new IIOMetadataNode("TextEntry");
|
||||
// TODO: Use name!
|
||||
tag.setAttribute("keyword", String.format("%s", entry.getFieldName()));
|
||||
tag.setAttribute("value", entry.getValueAsString());
|
||||
pNode.appendChild(tag);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -693,7 +711,7 @@ public final class PSDMetadata extends IIOMetadata implements Cloneable {
|
||||
IIOMetadataNode node; // scratch node
|
||||
|
||||
node = new IIOMetadataNode("Alpha");
|
||||
node.setAttribute("value", hasAlpha() ? "nonpremultipled" : "none"); // TODO: Check spec
|
||||
node.setAttribute("value", hasAlpha() ? "nonpremultiplied" : "none"); // TODO: Check spec
|
||||
transparency_node.appendChild(node);
|
||||
|
||||
return transparency_node;
|
||||
@ -731,4 +749,15 @@ public final class PSDMetadata extends IIOMetadata implements Cloneable {
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object clone() {
|
||||
// TODO: Make it a deep clone
|
||||
try {
|
||||
return super.clone();
|
||||
}
|
||||
catch (CloneNotSupportedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -44,7 +44,6 @@ public final class PSDMetadataFormat extends IIOMetadataFormatImpl {
|
||||
// columns?
|
||||
addAttribute("Header", "width", DATATYPE_INTEGER, true, null, "1", "30000", true, true);
|
||||
addAttribute("Header", "bits", DATATYPE_INTEGER, true, null, Arrays.asList("1", "8", "16"));
|
||||
// TODO: Consider using more readable names?!
|
||||
addAttribute("Header", "mode", DATATYPE_STRING, true, null, Arrays.asList(PSDMetadata.COLOR_MODES));
|
||||
|
||||
/*
|
||||
|
@ -25,7 +25,7 @@ final class PSDUnicodeAlphaNames extends PSDImageResource {
|
||||
|
||||
long left = mSize;
|
||||
while (left > 0) {
|
||||
String name = PSDUtil.readUTF16String(pInput);
|
||||
String name = PSDUtil.readUnicodeString(pInput);
|
||||
mNames.add(name);
|
||||
left -= name.length() * 2 + 4;
|
||||
}
|
||||
|
@ -60,17 +60,27 @@ final class PSDUtil {
|
||||
}
|
||||
|
||||
// TODO: Proably also useful for PICT reader, move to some common util?
|
||||
// TODO: Is this REALLY different from the previous method? Maybe the pad should not be read..
|
||||
static String readPascalString(final DataInput pInput) throws IOException {
|
||||
int length = pInput.readUnsignedByte();
|
||||
|
||||
if (length == 0) {
|
||||
return "";
|
||||
}
|
||||
|
||||
byte[] bytes = new byte[length];
|
||||
pInput.readFully(bytes);
|
||||
|
||||
return StringUtil.decode(bytes, 0, bytes.length, "ASCII");
|
||||
}
|
||||
|
||||
static String readUTF16String(final DataInput pInput) throws IOException {
|
||||
// TODO: Proably also useful for PICT reader, move to some common util?
|
||||
static String readUnicodeString(final DataInput pInput) throws IOException {
|
||||
int length = pInput.readInt();
|
||||
|
||||
if (length == 0) {
|
||||
return "";
|
||||
}
|
||||
|
||||
byte[] bytes = new byte[length * 2];
|
||||
pInput.readFully(bytes);
|
||||
|
||||
|
@ -35,8 +35,8 @@ final class PSDVersionInfo extends PSDImageResource {
|
||||
mVersion = pInput.readInt();
|
||||
mHasRealMergedData = pInput.readBoolean();
|
||||
|
||||
mWriter = PSDUtil.readUTF16String(pInput);
|
||||
mReader = PSDUtil.readUTF16String(pInput);
|
||||
mWriter = PSDUtil.readUnicodeString(pInput);
|
||||
mReader = PSDUtil.readUnicodeString(pInput);
|
||||
|
||||
mFileVersion = pInput.readInt();
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user