summaryrefslogtreecommitdiff
path: root/isoparser/src/main/java/com/coremedia/iso/boxes/mdat/.svn
diff options
context:
space:
mode:
Diffstat (limited to 'isoparser/src/main/java/com/coremedia/iso/boxes/mdat/.svn')
-rw-r--r--isoparser/src/main/java/com/coremedia/iso/boxes/mdat/.svn/all-wcprops23
-rw-r--r--isoparser/src/main/java/com/coremedia/iso/boxes/mdat/.svn/entries130
-rw-r--r--isoparser/src/main/java/com/coremedia/iso/boxes/mdat/.svn/text-base/DummyMap.java.svn-base84
-rw-r--r--isoparser/src/main/java/com/coremedia/iso/boxes/mdat/.svn/text-base/MediaDataBox.java.svn-base189
-rw-r--r--isoparser/src/main/java/com/coremedia/iso/boxes/mdat/.svn/text-base/SampleList.java.svn-base227
5 files changed, 0 insertions, 653 deletions
diff --git a/isoparser/src/main/java/com/coremedia/iso/boxes/mdat/.svn/all-wcprops b/isoparser/src/main/java/com/coremedia/iso/boxes/mdat/.svn/all-wcprops
deleted file mode 100644
index 238325a..0000000
--- a/isoparser/src/main/java/com/coremedia/iso/boxes/mdat/.svn/all-wcprops
+++ /dev/null
@@ -1,23 +0,0 @@
-K 25
-svn:wc:ra_dav:version-url
-V 76
-/svn/!svn/ver/772/trunk/isoparser/src/main/java/com/coremedia/iso/boxes/mdat
-END
-MediaDataBox.java
-K 25
-svn:wc:ra_dav:version-url
-V 94
-/svn/!svn/ver/772/trunk/isoparser/src/main/java/com/coremedia/iso/boxes/mdat/MediaDataBox.java
-END
-DummyMap.java
-K 25
-svn:wc:ra_dav:version-url
-V 90
-/svn/!svn/ver/377/trunk/isoparser/src/main/java/com/coremedia/iso/boxes/mdat/DummyMap.java
-END
-SampleList.java
-K 25
-svn:wc:ra_dav:version-url
-V 92
-/svn/!svn/ver/671/trunk/isoparser/src/main/java/com/coremedia/iso/boxes/mdat/SampleList.java
-END
diff --git a/isoparser/src/main/java/com/coremedia/iso/boxes/mdat/.svn/entries b/isoparser/src/main/java/com/coremedia/iso/boxes/mdat/.svn/entries
deleted file mode 100644
index 7dc6a8e..0000000
--- a/isoparser/src/main/java/com/coremedia/iso/boxes/mdat/.svn/entries
+++ /dev/null
@@ -1,130 +0,0 @@
-10
-
-dir
-778
-http://mp4parser.googlecode.com/svn/trunk/isoparser/src/main/java/com/coremedia/iso/boxes/mdat
-http://mp4parser.googlecode.com/svn
-
-
-
-2012-09-01T02:22:41.253285Z
-772
-michael.stattmann@gmail.com
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-7decde4b-c250-0410-a0da-51896bc88be6
-
-MediaDataBox.java
-file
-
-
-
-
-2012-09-14T17:27:52.567248Z
-9805155611e85fdee88d368ada02bbdb
-2012-09-01T02:22:41.253285Z
-772
-michael.stattmann@gmail.com
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-7442
-
-DummyMap.java
-file
-
-
-
-
-2012-09-14T17:27:52.567248Z
-911189888371eb0f2ae03f4e019d33c5
-2012-03-05T23:28:24.666173Z
-377
-Sebastian.Annies@gmail.com
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-1722
-
-SampleList.java
-file
-
-
-
-
-2012-09-14T17:27:52.567248Z
-1986183baf9f90328a4a7131cf21897d
-2012-06-10T18:50:38.971172Z
-671
-michael.stattmann@gmail.com
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-9770
-
diff --git a/isoparser/src/main/java/com/coremedia/iso/boxes/mdat/.svn/text-base/DummyMap.java.svn-base b/isoparser/src/main/java/com/coremedia/iso/boxes/mdat/.svn/text-base/DummyMap.java.svn-base
deleted file mode 100644
index ab96023..0000000
--- a/isoparser/src/main/java/com/coremedia/iso/boxes/mdat/.svn/text-base/DummyMap.java.svn-base
+++ /dev/null
@@ -1,84 +0,0 @@
-package com.coremedia.iso.boxes.mdat;
-
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
-/**
- * A SortedSet that contains just one value.
- */
-public class DummyMap<K, V> implements Map<K, V> {
- HashSet<K> keys = new HashSet<K>();
- V value;
-
- public DummyMap(V value) {
- this.value = value;
- }
-
- public Comparator<? super K> comparator() {
- return null; // I don't have any
- }
-
- public void addKeys(K[] keys) {
- Collections.addAll(this.keys, keys);
-
- }
-
- public int size() {
- return keys.size();
- }
-
- public boolean isEmpty() {
- return keys.isEmpty();
- }
-
- public boolean containsKey(Object key) {
- return keys.contains(key);
- }
-
- public boolean containsValue(Object value) {
- return this.value == value;
- }
-
- public V get(Object key) {
- return keys.contains(key) ? value : null;
- }
-
- public V put(K key, V value) {
- assert this.value == value;
- keys.add(key);
- return this.value;
- }
-
- public V remove(Object key) {
- V v = get(key);
- keys.remove(key);
- return v;
- }
-
- public void putAll(Map<? extends K, ? extends V> m) {
- for (K k : m.keySet()) {
- assert m.get(k) == value;
- this.keys.add(k);
- }
- }
-
- public void clear() {
- keys.clear();
- }
-
- public Set<K> keySet() {
- return keys;
- }
-
- public Collection<V> values() {
- throw new UnsupportedOperationException();
- }
-
- public Set<Entry<K, V>> entrySet() {
- throw new UnsupportedOperationException();
- }
-}
diff --git a/isoparser/src/main/java/com/coremedia/iso/boxes/mdat/.svn/text-base/MediaDataBox.java.svn-base b/isoparser/src/main/java/com/coremedia/iso/boxes/mdat/.svn/text-base/MediaDataBox.java.svn-base
deleted file mode 100644
index 5075a15..0000000
--- a/isoparser/src/main/java/com/coremedia/iso/boxes/mdat/.svn/text-base/MediaDataBox.java.svn-base
+++ /dev/null
@@ -1,189 +0,0 @@
-/*
- * Copyright 2008 CoreMedia AG, Hamburg
- *
- * Licensed under the Apache License, Version 2.0 (the License);
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an AS IS BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.coremedia.iso.boxes.mdat;
-
-import com.coremedia.iso.BoxParser;
-import com.coremedia.iso.ChannelHelper;
-import com.coremedia.iso.boxes.Box;
-import com.coremedia.iso.boxes.ContainerBox;
-import com.googlecode.mp4parser.AbstractBox;
-
-import java.io.IOException;
-import java.lang.ref.Reference;
-import java.lang.ref.SoftReference;
-import java.nio.ByteBuffer;
-import java.nio.channels.FileChannel;
-import java.nio.channels.ReadableByteChannel;
-import java.nio.channels.WritableByteChannel;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.logging.Logger;
-
-import static com.googlecode.mp4parser.util.CastUtils.l2i;
-
-/**
- * This box contains the media data. In video tracks, this box would contain video frames. A presentation may
- * contain zero or more Media Data Boxes. The actual media data follows the type field; its structure is described
- * by the metadata (see {@link com.coremedia.iso.boxes.SampleTableBox}).<br>
- * In large presentations, it may be desirable to have more data in this box than a 32-bit size would permit. In this
- * case, the large variant of the size field is used.<br>
- * There may be any number of these boxes in the file (including zero, if all the media data is in other files). The
- * metadata refers to media data by its absolute offset within the file (see {@link com.coremedia.iso.boxes.StaticChunkOffsetBox});
- * so Media Data Box headers and free space may easily be skipped, and files without any box structure may
- * also be referenced and used.
- */
-public final class MediaDataBox implements Box {
- private static Logger LOG = Logger.getLogger(MediaDataBox.class.getName());
-
- public static final String TYPE = "mdat";
- public static final int BUFFER_SIZE = 10 * 1024 * 1024;
- ContainerBox parent;
-
- ByteBuffer header;
-
- // These fields are for the special case of a FileChannel as input.
- private FileChannel fileChannel;
- private long startPosition;
- private long contentSize;
-
-
- private Map<Long, Reference<ByteBuffer>> cache = new HashMap<Long, Reference<ByteBuffer>>();
-
-
- /**
- * If the whole content is just in one mapped buffer keep a strong reference to it so it is
- * not evicted from the cache.
- */
- private ByteBuffer content;
-
- public ContainerBox getParent() {
- return parent;
- }
-
- public void setParent(ContainerBox parent) {
- this.parent = parent;
- }
-
- public String getType() {
- return TYPE;
- }
-
- private static void transfer(FileChannel from, long position, long count, WritableByteChannel to) throws IOException {
- long maxCount = (64 * 1024 * 1024) - (32 * 1024);
- // Transfer data in chunks a bit less than 64MB
- // People state that this is a kind of magic number on Windows.
- // I don't care. The size seems reasonable.
- long offset = 0;
- while (offset < count) {
- offset += from.transferTo(position + offset, Math.min(maxCount, count - offset), to);
- }
- }
-
- public void getBox(WritableByteChannel writableByteChannel) throws IOException {
- if (fileChannel != null) {
- assert checkStillOk();
- transfer(fileChannel, startPosition - header.limit(), contentSize + header.limit(), writableByteChannel);
- } else {
- header.rewind();
- writableByteChannel.write(header);
- writableByteChannel.write(content);
- }
- }
-
- /**
- * If someone use the same file as source and sink it could the case that
- * inserting a few bytes before the mdat results in overwrting data we still
- * need to write this mdat here. This method just makes sure that we haven't already
- * overwritten the mdat contents.
- *
- * @return true if ok
- */
- private boolean checkStillOk() {
- try {
- fileChannel.position(startPosition - header.limit());
- ByteBuffer h2 = ByteBuffer.allocate(header.limit());
- fileChannel.read(h2);
- header.rewind();
- h2.rewind();
- assert h2.equals(header) : "It seems that the content I want to read has already been overwritten.";
- return true;
- } catch (IOException e) {
- e.printStackTrace();
- return false;
- }
-
- }
-
-
- public long getSize() {
- long size = header.limit();
- size += contentSize;
- return size;
- }
-
- public void parse(ReadableByteChannel readableByteChannel, ByteBuffer header, long contentSize, BoxParser boxParser) throws IOException {
- this.header = header;
- this.contentSize = contentSize;
-
- if (readableByteChannel instanceof FileChannel && (contentSize > AbstractBox.MEM_MAP_THRESHOLD)) {
- this.fileChannel = ((FileChannel) readableByteChannel);
- this.startPosition = ((FileChannel) readableByteChannel).position();
- ((FileChannel) readableByteChannel).position(((FileChannel) readableByteChannel).position() + contentSize);
- } else {
- content = ChannelHelper.readFully(readableByteChannel, l2i(contentSize));
- cache.put(0l, new SoftReference<ByteBuffer>(content));
- }
- }
-
- public synchronized ByteBuffer getContent(long offset, int length) {
-
- for (Long chacheEntryOffset : cache.keySet()) {
- if (chacheEntryOffset <= offset && offset <= chacheEntryOffset + BUFFER_SIZE) {
- ByteBuffer cacheEntry = cache.get(chacheEntryOffset).get();
- if ((cacheEntry != null) && ((chacheEntryOffset + cacheEntry.limit()) >= (offset + length))) {
- // CACHE HIT
- cacheEntry.position((int) (offset - chacheEntryOffset));
- ByteBuffer cachedSample = cacheEntry.slice();
- cachedSample.limit(length);
- return cachedSample;
- }
- }
- }
- // CACHE MISS
- ByteBuffer cacheEntry;
- try {
- // Just mapping 10MB at a time. Seems reasonable.
- cacheEntry = fileChannel.map(FileChannel.MapMode.READ_ONLY, startPosition + offset, Math.min(BUFFER_SIZE, contentSize - offset));
- } catch (IOException e1) {
- LOG.fine("Even mapping just 10MB of the source file into the memory failed. " + e1);
- throw new RuntimeException(
- "Delayed reading of mdat content failed. Make sure not to close " +
- "the FileChannel that has been used to create the IsoFile!", e1);
- }
- cache.put(offset, new SoftReference<ByteBuffer>(cacheEntry));
- cacheEntry.position(0);
- ByteBuffer cachedSample = cacheEntry.slice();
- cachedSample.limit(length);
- return cachedSample;
- }
-
-
- public ByteBuffer getHeader() {
- return header;
- }
-
-}
diff --git a/isoparser/src/main/java/com/coremedia/iso/boxes/mdat/.svn/text-base/SampleList.java.svn-base b/isoparser/src/main/java/com/coremedia/iso/boxes/mdat/.svn/text-base/SampleList.java.svn-base
deleted file mode 100644
index a7f7b59..0000000
--- a/isoparser/src/main/java/com/coremedia/iso/boxes/mdat/.svn/text-base/SampleList.java.svn-base
+++ /dev/null
@@ -1,227 +0,0 @@
-package com.coremedia.iso.boxes.mdat;
-
-import com.coremedia.iso.IsoFile;
-import com.coremedia.iso.boxes.*;
-import com.coremedia.iso.boxes.fragment.*;
-
-import java.nio.ByteBuffer;
-import java.util.*;
-
-import static com.googlecode.mp4parser.util.CastUtils.l2i;
-
-/**
- * Creates a list of <code>ByteBuffer</code>s that represent the samples of a given track.
- */
-public class SampleList extends AbstractList<ByteBuffer> {
-
-
- long[] offsets;
- long[] sizes;
-
- IsoFile isoFile;
- HashMap<MediaDataBox, Long> mdatStartCache = new HashMap<MediaDataBox, Long>();
- HashMap<MediaDataBox, Long> mdatEndCache = new HashMap<MediaDataBox, Long>();
- MediaDataBox[] mdats;
-
- /**
- * Gets a sorted random access optimized list of all sample offsets.
- * Basically it is a map from sample number to sample offset.
- *
- * @return the sorted list of sample offsets
- */
- public long[] getOffsetKeys() {
- return offsets;
- }
-
-
- public SampleList(TrackBox trackBox) {
- initIsoFile(trackBox.getIsoFile()); // where are we?
-
- // first we get all sample from the 'normal' MP4 part.
- // if there are none - no problem.
- SampleSizeBox sampleSizeBox = trackBox.getSampleTableBox().getSampleSizeBox();
- ChunkOffsetBox chunkOffsetBox = trackBox.getSampleTableBox().getChunkOffsetBox();
- SampleToChunkBox sampleToChunkBox = trackBox.getSampleTableBox().getSampleToChunkBox();
-
-
- final long[] chunkOffsets = chunkOffsetBox != null ? chunkOffsetBox.getChunkOffsets() : new long[0];
- if (sampleToChunkBox != null && sampleToChunkBox.getEntries().size() > 0 &&
- chunkOffsets.length > 0 && sampleSizeBox != null && sampleSizeBox.getSampleCount() > 0) {
- long[] numberOfSamplesInChunk = sampleToChunkBox.blowup(chunkOffsets.length);
-
- int sampleIndex = 0;
-
- if (sampleSizeBox.getSampleSize() > 0) {
- sizes = new long[l2i(sampleSizeBox.getSampleCount())];
- Arrays.fill(sizes, sampleSizeBox.getSampleSize());
- } else {
- sizes = sampleSizeBox.getSampleSizes();
- }
- offsets = new long[sizes.length];
-
- for (int i = 0; i < numberOfSamplesInChunk.length; i++) {
- long thisChunksNumberOfSamples = numberOfSamplesInChunk[i];
- long sampleOffset = chunkOffsets[i];
- for (int j = 0; j < thisChunksNumberOfSamples; j++) {
- long sampleSize = sizes[sampleIndex];
- offsets[sampleIndex] = sampleOffset;
- sampleOffset += sampleSize;
- sampleIndex++;
- }
- }
-
- }
-
- // Next we add all samples from the fragments
- // in most cases - I've never seen it different it's either normal or fragmented.
- List<MovieExtendsBox> movieExtendsBoxes = trackBox.getParent().getBoxes(MovieExtendsBox.class);
-
- if (movieExtendsBoxes.size() > 0) {
- Map<Long, Long> offsets2Sizes = new HashMap<Long, Long>();
- List<TrackExtendsBox> trackExtendsBoxes = movieExtendsBoxes.get(0).getBoxes(TrackExtendsBox.class);
- for (TrackExtendsBox trackExtendsBox : trackExtendsBoxes) {
- if (trackExtendsBox.getTrackId() == trackBox.getTrackHeaderBox().getTrackId()) {
- for (MovieFragmentBox movieFragmentBox : trackBox.getIsoFile().getBoxes(MovieFragmentBox.class)) {
- offsets2Sizes.putAll(getOffsets(movieFragmentBox, trackBox.getTrackHeaderBox().getTrackId(), trackExtendsBox));
- }
- }
- }
-
- if (sizes == null || offsets == null) {
- sizes = new long[0];
- offsets = new long[0];
- }
-
- splitToArrays(offsets2Sizes);
- }
-
- // We have now a map from all sample offsets to their sizes
- }
-
- private void splitToArrays(Map<Long, Long> offsets2Sizes) {
- List<Long> keys = new ArrayList<Long>(offsets2Sizes.keySet());
- Collections.sort(keys);
-
- long[] nuSizes = new long[sizes.length + keys.size()];
- System.arraycopy(sizes, 0, nuSizes, 0, sizes.length);
- long[] nuOffsets = new long[offsets.length + keys.size()];
- System.arraycopy(offsets, 0, nuOffsets, 0, offsets.length);
- for (int i = 0; i < keys.size(); i++) {
- nuOffsets[i + offsets.length] = keys.get(i);
- nuSizes[i + sizes.length] = offsets2Sizes.get(keys.get(i));
- }
- sizes = nuSizes;
- offsets = nuOffsets;
- }
-
- public SampleList(TrackFragmentBox traf) {
- sizes = new long[0];
- offsets = new long[0];
- Map<Long, Long> offsets2Sizes = new HashMap<Long, Long>();
- initIsoFile(traf.getIsoFile());
-
- final List<MovieFragmentBox> movieFragmentBoxList = isoFile.getBoxes(MovieFragmentBox.class);
-
- final long trackId = traf.getTrackFragmentHeaderBox().getTrackId();
- for (MovieFragmentBox moof : movieFragmentBoxList) {
- final List<TrackFragmentHeaderBox> trackFragmentHeaderBoxes = moof.getTrackFragmentHeaderBoxes();
- for (TrackFragmentHeaderBox tfhd : trackFragmentHeaderBoxes) {
- if (tfhd.getTrackId() == trackId) {
- offsets2Sizes.putAll(getOffsets(moof, trackId, null));
- }
- }
- }
- splitToArrays(offsets2Sizes);
- }
-
- private void initIsoFile(IsoFile isoFile) {
- this.isoFile = isoFile;
- // find all mdats first to be able to use them later with explicitly looking them up
- long currentOffset = 0;
- LinkedList<MediaDataBox> mdats = new LinkedList<MediaDataBox>();
- for (Box b : this.isoFile.getBoxes()) {
- long currentSize = b.getSize();
- if ("mdat".equals(b.getType())) {
- if (b instanceof MediaDataBox) {
- long contentOffset = currentOffset + ((MediaDataBox) b).getHeader().limit();
- mdatStartCache.put((MediaDataBox) b, contentOffset);
- mdatEndCache.put((MediaDataBox) b, contentOffset + currentSize);
- mdats.add((MediaDataBox) b);
- } else {
- throw new RuntimeException("Sample need to be in mdats and mdats need to be instanceof MediaDataBox");
- }
- }
- currentOffset += currentSize;
- }
- this.mdats = mdats.toArray(new MediaDataBox[mdats.size()]);
- }
-
-
- @Override
- public int size() {
- return sizes.length;
- }
-
-
- @Override
- public ByteBuffer get(int index) {
- // it is a two stage lookup: from index to offset to size
- long offset = offsets[index];
- int sampleSize = l2i(sizes[index]);
-
- for (MediaDataBox mediaDataBox : mdats) {
- long start = mdatStartCache.get(mediaDataBox);
- long end = mdatEndCache.get(mediaDataBox);
- if ((start <= offset) && (offset + sampleSize <= end)) {
- return mediaDataBox.getContent(offset - start, sampleSize);
- }
- }
-
- throw new RuntimeException("The sample with offset " + offset + " and size " + sampleSize + " is NOT located within an mdat");
- }
-
- Map<Long, Long> getOffsets(MovieFragmentBox moof, long trackId, TrackExtendsBox trex) {
- Map<Long, Long> offsets2Sizes = new HashMap<Long, Long>();
- List<TrackFragmentBox> traf = moof.getBoxes(TrackFragmentBox.class);
- for (TrackFragmentBox trackFragmentBox : traf) {
- if (trackFragmentBox.getTrackFragmentHeaderBox().getTrackId() == trackId) {
- long baseDataOffset;
- if (trackFragmentBox.getTrackFragmentHeaderBox().hasBaseDataOffset()) {
- baseDataOffset = trackFragmentBox.getTrackFragmentHeaderBox().getBaseDataOffset();
- } else {
- baseDataOffset = moof.getOffset();
- }
-
- for (TrackRunBox trun : trackFragmentBox.getBoxes(TrackRunBox.class)) {
- long sampleBaseOffset = baseDataOffset + trun.getDataOffset();
- final TrackFragmentHeaderBox tfhd = ((TrackFragmentBox) trun.getParent()).getTrackFragmentHeaderBox();
-
- long offset = 0;
- for (TrackRunBox.Entry entry : trun.getEntries()) {
- final long sampleSize;
- if (trun.isSampleSizePresent()) {
- sampleSize = entry.getSampleSize();
- offsets2Sizes.put(offset + sampleBaseOffset, sampleSize);
- offset += sampleSize;
- } else {
- if (tfhd.hasDefaultSampleSize()) {
- sampleSize = tfhd.getDefaultSampleSize();
- offsets2Sizes.put(offset + sampleBaseOffset, sampleSize);
- offset += sampleSize;
- } else {
- if (trex == null) {
- throw new RuntimeException("File doesn't contain trex box but track fragments aren't fully self contained. Cannot determine sample size.");
- }
- sampleSize = trex.getDefaultSampleSize();
- offsets2Sizes.put(offset + sampleBaseOffset, sampleSize);
- offset += sampleSize;
- }
- }
- }
- }
- }
- }
- return offsets2Sizes;
- }
-
-} \ No newline at end of file