Ver Fonte

webview 引入

Ryan8057 há 2 anos atrás
pai
commit
a378a75a90
100 ficheiros alterados com 12265 adições e 6 exclusões
  1. 3 1
      BaseLibrary/build.gradle
  2. 7 2
      BaseLibrary/src/main/AndroidManifest.xml
  3. 1 1
      BaseLibrary/src/main/java/com/cooleshow/base/constanst/Constants.java
  4. 103 0
      BaseLibrary/src/main/java/com/cooleshow/base/recorder/AudioChunk.java
  5. 104 0
      BaseLibrary/src/main/java/com/cooleshow/base/recorder/AudioRecordConfig.java
  6. 113 0
      BaseLibrary/src/main/java/com/cooleshow/base/recorder/BaseDataRecorder.java
  7. 17 0
      BaseLibrary/src/main/java/com/cooleshow/base/recorder/MsRecorder.java
  8. 17 0
      BaseLibrary/src/main/java/com/cooleshow/base/recorder/PcmRecorder.java
  9. 134 0
      BaseLibrary/src/main/java/com/cooleshow/base/recorder/PcmToWavUtil.java
  10. 204 0
      BaseLibrary/src/main/java/com/cooleshow/base/recorder/PullTransport.java
  11. 36 0
      BaseLibrary/src/main/java/com/cooleshow/base/recorder/Recorder.java
  12. 98 0
      BaseLibrary/src/main/java/com/cooleshow/base/recorder/WavHeader.java
  13. 56 0
      BaseLibrary/src/main/java/com/cooleshow/base/recorder/WavRecorder.java
  14. 3 0
      BaseLibrary/src/main/java/com/cooleshow/base/router/RouterPath.kt
  15. 17 0
      BaseLibrary/src/main/java/com/cooleshow/base/service/MusicPlayContract.java
  16. 181 0
      BaseLibrary/src/main/java/com/cooleshow/base/service/MusicService.java
  17. 26 0
      BaseLibrary/src/main/java/com/cooleshow/base/service/PlayMusicReceiver.java
  18. 36 0
      BaseLibrary/src/main/java/com/cooleshow/base/service/PlayMusicService.java
  19. 5 0
      BaseLibrary/src/main/java/com/cooleshow/base/utils/HeadsetPlugListener.java
  20. 48 0
      BaseLibrary/src/main/java/com/cooleshow/base/utils/HeadsetPlugReceiver.java
  21. 6 0
      BaseLibrary/src/main/java/com/cooleshow/base/utils/helper/upload/UploadHelper.java
  22. 61 0
      BaseLibrary/src/main/java/com/cooleshow/base/websocket/JWebSocketClient.java
  23. 147 0
      BaseLibrary/src/main/java/com/cooleshow/base/widgets/ColorLinearLayout.java
  24. BIN
      BaseLibrary/src/main/res/drawable-xxhdpi/ic_accompany_permissions_centre.png
  25. BIN
      BaseLibrary/src/main/res/drawable-xxhdpi/ic_accompany_permissions_title.png
  26. BIN
      BaseLibrary/src/main/res/drawable-xxhdpi/ic_gray_close.png
  27. 9 0
      BaseLibrary/src/main/res/drawable/bg_grayf2_45dp_shape.xml
  28. 7 0
      BaseLibrary/src/main/res/drawable/bg_play_metronome_gray_dots_shape.xml
  29. 12 0
      BaseLibrary/src/main/res/drawable/bg_play_metronome_green_dots_shape.xml
  30. 7 0
      BaseLibrary/src/main/res/drawable/bg_play_metronome_white_dots_shape.xml
  31. 6 0
      BaseLibrary/src/main/res/drawable/btn_primary_default_shape.xml
  32. 113 0
      BaseLibrary/src/main/res/layout/accompany_permissions_popu.xml
  33. 68 0
      BaseLibrary/src/main/res/layout/dialog_student_precount.xml
  34. BIN
      BaseLibrary/src/main/res/raw/feeble.wav
  35. BIN
      BaseLibrary/src/main/res/raw/midstrong.wav
  36. 7 0
      BaseLibrary/src/main/res/values/attrs.xml
  37. 2 1
      BaseLibrary/src/main/res/values/colors.xml
  38. 1 1
      BaseLibrary/src/main/res/values/dimens.xml
  39. 2 0
      camerakit/.gitignore
  40. 33 0
      camerakit/build.gradle
  41. 26 0
      camerakit/src/main/AndroidManifest.xml
  42. 17 0
      camerakit/src/main/CMakeLists.txt
  43. 20 0
      camerakit/src/main/cpp/CMakeLists.txt
  44. 206 0
      camerakit/src/main/cpp/JniJpegTransformer.cpp
  45. 170 0
      camerakit/src/main/cpp/JniYuvOperator.cpp
  46. 214 0
      camerakit/src/main/cpp/camerakit/CameraSurfaceTexture.cpp
  47. 52 0
      camerakit/src/main/cpp/camerakit/CameraSurfaceTexture.hpp
  48. 204 0
      camerakit/src/main/cpp/camerakit/CameraSurfaceView.cpp
  49. 52 0
      camerakit/src/main/cpp/camerakit/CameraSurfaceView.hpp
  50. 110 0
      camerakit/src/main/cpp/jni_camera_surface_texture.cpp
  51. 119 0
      camerakit/src/main/cpp/jni_camera_surface_view.cpp
  52. 46 0
      camerakit/src/main/cpp/libjpeg/include/bmp.h
  53. 134 0
      camerakit/src/main/cpp/libjpeg/include/cderror.h
  54. 187 0
      camerakit/src/main/cpp/libjpeg/include/cdjpeg.h
  55. 131 0
      camerakit/src/main/cpp/libjpeg/include/config.h
  56. 198 0
      camerakit/src/main/cpp/libjpeg/include/cpu-features.h
  57. 47 0
      camerakit/src/main/cpp/libjpeg/include/jchuff.h
  58. 62 0
      camerakit/src/main/cpp/libjpeg/include/jconfig.h
  59. 184 0
      camerakit/src/main/cpp/libjpeg/include/jdct.h
  60. 235 0
      camerakit/src/main/cpp/libjpeg/include/jdhuff.h
  61. 314 0
      camerakit/src/main/cpp/libjpeg/include/jerror.h
  62. 91 0
      camerakit/src/main/cpp/libjpeg/include/jinclude.h
  63. 198 0
      camerakit/src/main/cpp/libjpeg/include/jmemsys.h
  64. 446 0
      camerakit/src/main/cpp/libjpeg/include/jmorecfg.h
  65. 26 0
      camerakit/src/main/cpp/libjpeg/include/jpegcomp.h
  66. 460 0
      camerakit/src/main/cpp/libjpeg/include/jpegint.h
  67. 1611 0
      camerakit/src/main/cpp/libjpeg/include/jpeglib.h
  68. 666 0
      camerakit/src/main/cpp/libjpeg/include/jsimd.h
  69. 199 0
      camerakit/src/main/cpp/libjpeg/include/jsimdcfg.inc.h
  70. 102 0
      camerakit/src/main/cpp/libjpeg/include/jsimddct.h
  71. 36 0
      camerakit/src/main/cpp/libjpeg/include/jversion.h
  72. 47 0
      camerakit/src/main/cpp/libjpeg/include/tjutil.h
  73. 217 0
      camerakit/src/main/cpp/libjpeg/include/transupp.h
  74. 897 0
      camerakit/src/main/cpp/libjpeg/include/turbojpeg.h
  75. 25 0
      camerakit/src/main/cpp/main.cpp
  76. 4 0
      camerakit/src/main/java/com/wonderkiln/camerakit/CameraConfig.java
  77. 65 0
      camerakit/src/main/java/com/wonderkiln/camerakit/CameraKit.java
  78. 4 0
      camerakit/src/main/java/com/wonderkiln/camerakit/CameraKitController.java
  79. 5 0
      camerakit/src/main/java/com/wonderkiln/camerakit/CameraKitHandler.java
  80. 11 0
      camerakit/src/main/java/com/wonderkiln/camerakit/CameraProperties.java
  81. 577 0
      camerakit/src/main/java/com/wonderkiln/camerakit/CameraView.java
  82. 76 0
      camerakit/src/main/java/com/wonderkiln/camerakit/FocusMarkerLayout.java
  83. 1147 0
      camerakit/src/main/java/com/wonderkiln/camerakit/api16/Camera1.java
  84. 104 0
      camerakit/src/main/java/com/wonderkiln/camerakit/api16/ConstantMapper.java
  85. 56 0
      camerakit/src/main/java/com/wonderkiln/camerakit/api16/ProcessStillTask.java
  86. 9 0
      camerakit/src/main/java/com/wonderkiln/camerakit/api21/Camera2.java
  87. 90 0
      camerakit/src/main/java/com/wonderkiln/camerakit/base/CameraImpl.java
  88. 89 0
      camerakit/src/main/java/com/wonderkiln/camerakit/base/CameraViewLayout.java
  89. 83 0
      camerakit/src/main/java/com/wonderkiln/camerakit/base/PreviewImpl.java
  90. 92 0
      camerakit/src/main/java/com/wonderkiln/camerakit/base/SurfaceViewContainer.java
  91. 112 0
      camerakit/src/main/java/com/wonderkiln/camerakit/base/SurfaceViewPreview.java
  92. 26 0
      camerakit/src/main/java/com/wonderkiln/camerakit/events/CameraKitError.java
  93. 69 0
      camerakit/src/main/java/com/wonderkiln/camerakit/events/CameraKitEvent.java
  94. 5 0
      camerakit/src/main/java/com/wonderkiln/camerakit/events/CameraKitEventCallback.java
  95. 8 0
      camerakit/src/main/java/com/wonderkiln/camerakit/events/CameraKitEventListener.java
  96. 21 0
      camerakit/src/main/java/com/wonderkiln/camerakit/events/CameraKitEventListenerAdapter.java
  97. 23 0
      camerakit/src/main/java/com/wonderkiln/camerakit/events/CameraKitImage.java
  98. 18 0
      camerakit/src/main/java/com/wonderkiln/camerakit/events/CameraKitVideo.java
  99. 120 0
      camerakit/src/main/java/com/wonderkiln/camerakit/events/EventDispatcher.java
  100. 12 0
      camerakit/src/main/java/com/wonderkiln/camerakit/events/OnCameraKitEvent.java

+ 3 - 1
BaseLibrary/build.gradle

@@ -177,5 +177,7 @@ dependencies {
 
     api  'com.umeng.umsdk:share-sina:7.1.7'//新浪微博完整版
     api 'io.github.sinaweibosdk:core:11.11.1@aar'//新浪微博官方SDK依赖库,必选*/
-
+    api project(path: ':camerakit')
+    api 'org.java-websocket:Java-WebSocket:1.5.1'
+    api project(path: ':midiplaylib')
 }

+ 7 - 2
BaseLibrary/src/main/AndroidManifest.xml

@@ -2,11 +2,16 @@
 <manifest xmlns:android="http://schemas.android.com/apk/res/android"
     package="com.cooleshow.base">
 
+    <uses-permission android:name="android.permission.BLUETOOTH_CONNECT" />
     <uses-permission android:name="android.permission.INTERNET" />
     <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
     <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
+    <uses-permission android:name="android.permission.RECORD_AUDIO" />
     <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
     <uses-permission android:name="android.permission.MANAGE_EXTERNAL_STORAGE"/>
-    <application />
-
+    <application >
+        <service
+            android:name=".service.PlayMusicService">
+        </service>
+    </application>
 </manifest>

+ 1 - 1
BaseLibrary/src/main/java/com/cooleshow/base/constanst/Constants.java

@@ -12,5 +12,5 @@ public class Constants {
     public static final String COLEXIUAPPA = "COLEXIUAPPA";//H5 js接口注册interfaceName
     public static final int DEFAULT_DATA_SIZE = 10;//加载更多默认一页请求数据
     public static final String WHITE_BOARD_ORIENTATION = "WHITE_BOARD_ORIENTATION";
-
+    public  static  String HEADSET_PLUE_TAG = "";
 }

+ 103 - 0
BaseLibrary/src/main/java/com/cooleshow/base/recorder/AudioChunk.java

@@ -0,0 +1,103 @@
+package com.cooleshow.base.recorder;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+/**
+ * An AudioChunk is a audio data wrapper.
+ * 音频数据包装器
+ *
+ * @author maple
+ * @time 2018/4/10.
+ */
+public interface AudioChunk {
+    // 获取最大峰值(振幅)
+    double maxAmplitude();
+
+    // 获取byte类型数据
+    byte[] toBytes();
+
+    // 获取short类型数据
+    short[] toShorts();
+
+
+    abstract class AbstractAudioChunk implements AudioChunk {
+        private static final double REFERENCE = 0.6;
+
+        @Override
+        public double maxAmplitude() {
+            int nMaxAmp = 0;
+            for (short sh : toShorts()) {
+                if (sh > nMaxAmp) {
+                    nMaxAmp = sh;
+                }
+            }
+            if (nMaxAmp > 0) {
+                return Math.abs(20 * Math.log10(nMaxAmp / REFERENCE));
+            } else {
+                return 0;
+            }
+        }
+    }
+
+    /**
+     * byte类型数据包装器
+     */
+    class Bytes extends AbstractAudioChunk {
+        private byte[] bytes;
+
+        Bytes(byte[] bytes) {
+            this.bytes = bytes;
+        }
+
+        @Override
+        public byte[] toBytes() {
+            return bytes;
+        }
+
+        @Override
+        public short[] toShorts() {
+            short[] shorts = new short[bytes.length / 2];
+            ByteBuffer.wrap(bytes).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(shorts);
+            return shorts;
+        }
+    }
+
+    /**
+     * short类型数据包装器
+     */
+    class Shorts extends AbstractAudioChunk {
+        private static final short SILENCE_THRESHOLD = 2700;// 沉默阀值(低于该值的不记录)
+        private short[] shorts;
+
+        Shorts(short[] bytes) {
+            this.shorts = bytes;
+        }
+
+        // 是否超过沉默值
+        boolean isOverSilence() {
+            for (short sh : shorts) {
+                if (sh > SILENCE_THRESHOLD || sh < -SILENCE_THRESHOLD) {
+                    return true;
+                }
+            }
+            return false;
+        }
+
+        @Override
+        public byte[] toBytes() {
+            byte[] buffer = new byte[shorts.length * 2];
+            for (int i = 0; i < shorts.length; i++) {
+                buffer[2 * i] = (byte) (shorts[i] & 0x00FF);
+                buffer[2 * i + 1] = (byte) ((shorts[i] & 0xFF00) >> 8);
+            }
+            return buffer;
+        }
+
+        @Override
+        public short[] toShorts() {
+            return shorts;
+        }
+
+    }
+}

+ 104 - 0
BaseLibrary/src/main/java/com/cooleshow/base/recorder/AudioRecordConfig.java

@@ -0,0 +1,104 @@
+package com.cooleshow.base.recorder;
+
+import android.media.AudioFormat;
+import android.media.MediaRecorder;
+
+/**
+ * 录音参数配置
+ *
+ * @author maple
+ * @time 2018/4/10.
+ */
+public class AudioRecordConfig {
+    /**
+     * 音频源,详见 {@link MediaRecorder.AudioSource}
+     */
+    private int audioSource = MediaRecorder.AudioSource.MIC;
+
+    /**
+     * 采样率 赫兹
+     * 44100Hz 所有设备均可用
+     * 22050Hz  16000Hz  11025Hz
+     */
+    private int sampleRateInHz = 44100;
+
+    /**
+     * 音频通道(声道数)
+     * {@link AudioFormat#CHANNEL_IN_MONO} 单声道
+     * {@link AudioFormat#CHANNEL_IN_STEREO} 立体声,所有设备可用
+     */
+    private int channelConfig = AudioFormat.CHANNEL_IN_MONO;
+
+    /**
+     * 音频数据格式
+     * {@link AudioFormat#ENCODING_PCM_8BIT},每个样本8位
+     * {@link AudioFormat#ENCODING_PCM_16BIT},每个样本16位,保证所有设备支持
+     * {@link AudioFormat#ENCODING_PCM_FLOAT},每个样本 单精度Float
+     */
+    private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
+
+
+    public AudioRecordConfig() {
+    }
+
+    public AudioRecordConfig(int audioSource, int sampleRateInHz, int channelConfig, int audioFormat) {
+        this.audioSource = audioSource;
+        this.sampleRateInHz = sampleRateInHz;
+        this.channelConfig = channelConfig;
+        this.audioFormat = audioFormat;
+    }
+
+    public byte bitsPerSample() {
+        if (audioFormat == AudioFormat.ENCODING_PCM_16BIT) {
+            return 16;
+        } else if (audioFormat == AudioFormat.ENCODING_PCM_8BIT) {
+            return 8;
+        } else {
+            return 16;
+        }
+    }
+
+    // -------------------------- get/set ----------------------------------
+
+    public int getChannelConfig() {
+        return channelConfig;
+    }
+
+    public void setChannelConfig(int channelConfig) {
+        this.channelConfig = channelConfig;
+    }
+
+    public int getAudioSource() {
+        return audioSource;
+    }
+
+    public void setAudioSource(int audioSource) {
+        this.audioSource = audioSource;
+    }
+
+    public int getSampleRateInHz() {
+        return sampleRateInHz;
+    }
+
+    public void setSampleRateInHz(int sampleRateInHz) {
+        this.sampleRateInHz = sampleRateInHz;
+    }
+
+    public int getAudioFormat() {
+        return audioFormat;
+    }
+
+    public void setAudioFormat(int audioFormat) {
+        this.audioFormat = audioFormat;
+    }
+
+    @Override
+    public String toString() {
+        return "录音参数配置: \n{" +
+                "audioSource=" + audioSource +
+                ", sampleRateInHz=" + sampleRateInHz +
+                ", channelConfig=" + channelConfig +
+                ", audioFormat=" + audioFormat +
+                '}';
+    }
+}

+ 113 - 0
BaseLibrary/src/main/java/com/cooleshow/base/recorder/BaseDataRecorder.java

@@ -0,0 +1,113 @@
+package com.cooleshow.base.recorder;
+
+import android.annotation.SuppressLint;
+import android.content.Context;
+import android.media.AudioDeviceInfo;
+import android.media.AudioManager;
+import android.media.AudioRecord;
+import android.os.Build;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+/**
+ * Base Recorder (Only record the original audio data.)
+ *
+ * @author maple
+ * @time 2018/4/10.
+ */
+public class BaseDataRecorder implements Recorder {
+    protected PullTransport pullTransport;
+    protected AudioRecordConfig config;
+    protected int bufferSizeInBytes;// 缓冲区大小
+    protected File file;
+
+    private AudioRecord audioRecord;
+    private OutputStream outputStream;
+    private ExecutorService executorService = Executors.newSingleThreadExecutor();
+
+
+    protected BaseDataRecorder(File file, AudioRecordConfig config, PullTransport pullTransport) {
+        this.file = file;
+        this.config = config;
+        this.pullTransport = pullTransport;
+        // 计算缓冲区大小
+        this.bufferSizeInBytes = AudioRecord.getMinBufferSize(
+                config.getSampleRateInHz(),
+                config.getChannelConfig(),
+                config.getAudioFormat()
+        );
+    }
+
+    @Override
+    public void startRecording(Context context) {
+        executorService.submit(new Runnable() {
+            @Override
+            public void run() {
+                startRecord(context);
+            }
+        });
+    }
+
+    @SuppressLint("MissingPermission")
+    private void startRecord(Context context) {
+        try {
+            if (audioRecord == null) {
+                audioRecord = new AudioRecord(config.getAudioSource(), config.getSampleRateInHz(),
+                        config.getChannelConfig(), config.getAudioFormat(), bufferSizeInBytes);
+            }
+            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+                AudioManager audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+                for (AudioDeviceInfo device : audioManager.getDevices(AudioManager.GET_DEVICES_INPUTS)) {
+                    if (AudioDeviceInfo.TYPE_BUILTIN_MIC == device.getType()) {
+                        audioRecord.setPreferredDevice(device);
+                        break;
+                    }
+
+                }
+            }
+//            if (outputStream == null) {
+//                outputStream = new FileOutputStream(file);
+//            }
+            audioRecord.startRecording();
+            pullTransport.isEnableToBePulled(true);
+            pullTransport.startPoolingAndWriting(audioRecord, bufferSizeInBytes, outputStream);
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+
+    @Override
+    public void pauseRecording() {
+        pullTransport.isEnableToBePulled(false);
+    }
+
+    @Override
+    public void resumeRecording(Context context) {
+        startRecording(context);
+    }
+
+    @Override
+    public void stopRecording() {
+        pauseRecording();
+
+        if (audioRecord != null) {
+            audioRecord.stop();
+            audioRecord.release();
+            audioRecord = null;
+        }
+        if (outputStream != null) {
+            try {
+                outputStream.flush();
+                outputStream.close();
+                outputStream = null;
+            } catch (IOException e) {
+                e.printStackTrace();
+            }
+        }
+    }
+
+}

+ 17 - 0
BaseLibrary/src/main/java/com/cooleshow/base/recorder/MsRecorder.java

@@ -0,0 +1,17 @@
+package com.cooleshow.base.recorder;
+
+import java.io.File;
+
+public class MsRecorder {
+
+    private MsRecorder() {
+    }
+
+    public static Recorder pcm(File file, AudioRecordConfig config, PullTransport pullTransport) {
+        return new PcmRecorder(file, config, pullTransport);
+    }
+
+    public static Recorder wav(File file, AudioRecordConfig config, PullTransport pullTransport) {
+        return new WavRecorder(file, config, pullTransport);
+    }
+}

+ 17 - 0
BaseLibrary/src/main/java/com/cooleshow/base/recorder/PcmRecorder.java

@@ -0,0 +1,17 @@
+package com.cooleshow.base.recorder;
+
+import java.io.File;
+
+/**
+ * Pcm格式的音频记录器
+ *
+ * @author maple
+ * @time 2018/4/10.
+ */
+public class PcmRecorder extends BaseDataRecorder {
+
+    public PcmRecorder(File file, AudioRecordConfig config, PullTransport pullTransport) {
+        super(file, config, pullTransport);
+    }
+
+}

+ 134 - 0
BaseLibrary/src/main/java/com/cooleshow/base/recorder/PcmToWavUtil.java

@@ -0,0 +1,134 @@
+package com.cooleshow.base.recorder;
+
+import android.media.AudioFormat;
+import android.media.AudioRecord;
+
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+public class PcmToWavUtil {
+
+
+    /**
+     * 缓存的音频大小
+     */
+    private int mBufferSize;
+    /**
+     * 采样率
+     */
+    private int mSampleRate;
+    /**
+     * 声道数
+     */
+    private int mChannel;
+
+
+    /**
+     * @param sampleRate sample rate、采样率
+     * @param channel channel、声道
+     * @param encoding Audio data format、音频格式
+     */
+    PcmToWavUtil(int sampleRate, int channel, int encoding) {
+        this.mSampleRate = sampleRate;
+        this.mChannel = channel;
+        this.mBufferSize = AudioRecord.getMinBufferSize(mSampleRate, mChannel, encoding);
+    }
+
+
+    /**
+     * pcm文件转wav文件
+     *
+     * @param inFilename 源文件路径
+     * @param outFilename 目标文件路径
+     */
+    public void pcmToWav(String inFilename, String outFilename) {
+        FileInputStream in;
+        FileOutputStream out;
+        long totalAudioLen;
+        long totalDataLen;
+        long longSampleRate = mSampleRate;
+        int channels = mChannel == AudioFormat.CHANNEL_IN_MONO ? 1 : 2;
+        long byteRate = 16 * mSampleRate * channels / 8;
+        byte[] data = new byte[mBufferSize];
+        try {
+            in = new FileInputStream(inFilename);
+            out = new FileOutputStream(outFilename);
+            totalAudioLen = in.getChannel().size();
+            totalDataLen = totalAudioLen + 36;
+
+            writeWaveFileHeader(out, totalAudioLen, totalDataLen,
+                    longSampleRate, channels, byteRate);
+            while (in.read(data) != -1) {
+                out.write(data);
+            }
+            in.close();
+            out.close();
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+    }
+
+
+    /**
+     * 加入wav文件头
+     */
+    private void writeWaveFileHeader(FileOutputStream out, long totalAudioLen,
+                                     long totalDataLen, long longSampleRate, int channels, long byteRate)
+            throws IOException {
+        byte[] header = new byte[44];
+        // RIFF/WAVE header
+        header[0] = 'R';
+        header[1] = 'I';
+        header[2] = 'F';
+        header[3] = 'F';
+        header[4] = (byte) (totalDataLen & 0xff);
+        header[5] = (byte) ((totalDataLen >> 8) & 0xff);
+        header[6] = (byte) ((totalDataLen >> 16) & 0xff);
+        header[7] = (byte) ((totalDataLen >> 24) & 0xff);
+        //WAVE
+        header[8] = 'W';
+        header[9] = 'A';
+        header[10] = 'V';
+        header[11] = 'E';
+        // 'fmt ' chunk
+        header[12] = 'f';
+        header[13] = 'm';
+        header[14] = 't';
+        header[15] = ' ';
+        // 4 bytes: size of 'fmt ' chunk
+        header[16] = 16;
+        header[17] = 0;
+        header[18] = 0;
+        header[19] = 0;
+        // format = 1
+        header[20] = 1;
+        header[21] = 0;
+        header[22] = (byte) channels;
+        header[23] = 0;
+        header[24] = (byte) (longSampleRate & 0xff);
+        header[25] = (byte) ((longSampleRate >> 8) & 0xff);
+        header[26] = (byte) ((longSampleRate >> 16) & 0xff);
+        header[27] = (byte) ((longSampleRate >> 24) & 0xff);
+        header[28] = (byte) (byteRate & 0xff);
+        header[29] = (byte) ((byteRate >> 8) & 0xff);
+        header[30] = (byte) ((byteRate >> 16) & 0xff);
+        header[31] = (byte) ((byteRate >> 24) & 0xff);
+        // block align
+        header[32] = (byte) (2 * 16 / 8);
+        header[33] = 0;
+        // bits per sample
+        header[34] = 16;
+        header[35] = 0;
+        //data
+        header[36] = 'd';
+        header[37] = 'a';
+        header[38] = 't';
+        header[39] = 'a';
+        header[40] = (byte) (totalAudioLen & 0xff);
+        header[41] = (byte) ((totalAudioLen >> 8) & 0xff);
+        header[42] = (byte) ((totalAudioLen >> 16) & 0xff);
+        header[43] = (byte) ((totalAudioLen >> 24) & 0xff);
+        out.write(header, 0, 44);
+    }
+}

+ 204 - 0
BaseLibrary/src/main/java/com/cooleshow/base/recorder/PullTransport.java

@@ -0,0 +1,204 @@
+package com.cooleshow.base.recorder;
+
+import android.media.AudioRecord;
+import android.os.Handler;
+import android.os.Looper;
+import android.util.Log;
+
+import java.io.IOException;
+import java.io.OutputStream;
+
+/**
+ * 录音数据 拉取 运输机
+ * <p>
+ * 此类表示「录音机」和「输出文件」之间的总线。
+ * 基本上它只是从{@link AudioRecord}中提取数据,并将其传输到{@link OutputStream},以写入输出文件。
+ * 可以对每次音频数据拉取过程进行监听{@link OnAudioChunkPulledListener}和处理{@link OnSilenceListener},
+ *
+ * @author maple
+ * @time 2018/4/10.
+ */
+public interface PullTransport {
+    // 是否开启拉取
+    void isEnableToBePulled(boolean enabledToBePulled);
+
+    // 开始推送数据和写入文件
+    void startPoolingAndWriting(AudioRecord audioRecord, int pullSizeInBytes, OutputStream outputStream) throws IOException;
+
+    /**
+     * 设置【沉默】监听器
+     */
+    interface OnSilenceListener {
+        /**
+         * @param silenceTime 沉默时间
+         * @param discardTime 丢弃时间
+         */
+        void onSilence(long silenceTime, long discardTime);
+    }
+
+    /**
+     * 设置【音频数据块】拉取监听器
+     */
+    interface OnAudioChunkPulledListener {
+        /**
+         * 拉取 音频原始数据
+         *
+         * @param audioChunk 音频数据块
+         */
+        void onAudioChunkPulled(AudioChunk audioChunk);
+    }
+
+    abstract class AbstractPullTransport implements PullTransport {
+        volatile boolean pull;
+        OnAudioChunkPulledListener onAudioChunkPulledListener;
+        Handler handler = new Handler(Looper.getMainLooper());
+
+        AbstractPullTransport() {
+        }
+
+        @Override
+        public void isEnableToBePulled(boolean enabledToBePulled) {
+            this.pull = enabledToBePulled;
+        }
+
+        // 推送 音频原始数据块
+        void postPullEvent(final AudioChunk audioChunk) {
+            if (onAudioChunkPulledListener != null) {
+                handler.post(new Runnable() {
+                    @Override
+                    public void run() {
+                        onAudioChunkPulledListener.onAudioChunkPulled(audioChunk);
+                    }
+                });
+            }
+        }
+    }
+
+    class Default extends AbstractPullTransport {
+
+        public Default() {
+            super();
+        }
+
+        /**
+         * 音频数据推送监听,不间断的回调录音数据。
+         */
+        public Default setOnAudioChunkPulledListener(OnAudioChunkPulledListener onAudioChunkPulledListener) {
+            this.onAudioChunkPulledListener = onAudioChunkPulledListener;
+            return this;
+        }
+
+        @Override
+        public void startPoolingAndWriting(AudioRecord audioRecord, int pullSizeInBytes, OutputStream outputStream) throws IOException {
+            Log.d("接收音频数据", "startPoolingAndWriting() called with: audioRecord = [" + audioRecord + "], pullSizeInBytes = [" + pullSizeInBytes + "], outputStream = [" + outputStream + "]");
+            Log.i("接收音频数据", "startPoolingAndWriting byte: " +  new byte[pullSizeInBytes]);
+            AudioChunk audioChunk = new AudioChunk.Bytes(new byte[pullSizeInBytes]);
+            while (pull) {
+                int count = audioRecord.read(audioChunk.toBytes(), 0, pullSizeInBytes);
+                if (AudioRecord.ERROR_INVALID_OPERATION != count && AudioRecord.ERROR_BAD_VALUE != count) {
+                    postPullEvent(audioChunk);// 推送原始音频数据块
+                    Log.i("接收音频数据", "startPoolingAndWriting: " +  audioChunk.toBytes().toString());
+//                    outputStream.write(audioChunk.toBytes());// 将数据写入文件
+                }
+            }
+        }
+    }
+
+    /**
+     * 降噪模式(只记录有声音的部分)
+     */
+
+    class Noise extends AbstractPullTransport {
+        private OnSilenceListener onSilenceListener;
+        private long pushTimeThreshold = 500;// 忽略沉默时间的阀值。小于该值的沉默时间将不推送
+        private long silenceTimeThreshold = 200;// 可容忍的沉默时间,该时间内正常记录
+        private long startSilenceMoment = 0;// 首次沉默时间点
+        private long silenceTime = 0;// 已沉默的时间
+        private int writeCountAfterSilence = 0;// 噪音后,正常记录次数
+
+        public Noise() {
+            super();
+        }
+
+        /**
+         * 音频数据推送监听,不间断的回调录音数据。
+         */
+        public Noise setOnAudioChunkPulledListener(OnAudioChunkPulledListener onAudioChunkPulledListener) {
+            this.onAudioChunkPulledListener = onAudioChunkPulledListener;
+            return this;
+        }
+
+        /**
+         * 沉默监听,当沉默 >1s 时,回调已沉默的最大时间。
+         */
+        public Noise setOnSilenceListener(OnSilenceListener onSilenceListener) {
+            this.onSilenceListener = onSilenceListener;
+            return this;
+        }
+
+        /**
+         * 设置可容忍的沉默时间。在可容忍时间内,仍然写入录音数据。
+         */
+        public Noise setSilenceTimeThreshold(long silenceTimeThreshold) {
+            this.silenceTimeThreshold = silenceTimeThreshold;
+            return this;
+        }
+
+        /**
+         * 设置忽略沉默时间的阀值。已沉默时间小于该值时,不向UI推送
+         */
+        public Noise setPushTimeThreshold(long pushTimeThreshold) {
+            this.pushTimeThreshold = pushTimeThreshold;
+            return this;
+        }
+
+        @Override
+        public void startPoolingAndWriting(AudioRecord audioRecord, int pullSizeInBytes, OutputStream outputStream) throws IOException {
+            AudioChunk.Shorts audioChunk = new AudioChunk.Shorts(new short[pullSizeInBytes]);
+            while (pull) {
+                int count = audioRecord.read(audioChunk.toShorts(), 0, pullSizeInBytes);
+                if (AudioRecord.ERROR_INVALID_OPERATION != count && AudioRecord.ERROR_BAD_VALUE != count) {
+                    postPullEvent(audioChunk);// 推送原始音频数据块
+                    Log.i("接收音频数据", "startPoolingAndWriting2: " +  audioChunk);
+                    if (audioChunk.isOverSilence()) {// 是否超过沉默阀值
+                        outputStream.write(audioChunk.toBytes());
+                        writeCountAfterSilence++;
+                        if (silenceTime > pushTimeThreshold) {
+                            if (writeCountAfterSilence >= 3) {
+                                // 超过1s的无声,且,之前正常记录3次。向UI推送静默时间
+                                writeCountAfterSilence = 0;
+                                postSilenceEvent(silenceTime, (silenceTime - silenceTimeThreshold));
+                            }
+                        }
+                        startSilenceMoment = 0;
+                        silenceTime = 0;
+                    } else {
+                        if (startSilenceMoment == 0) {// 开始沉默
+                            startSilenceMoment = System.currentTimeMillis();
+                        }
+                        silenceTime = System.currentTimeMillis() - startSilenceMoment;// 已沉默时间
+                        if (silenceTime < silenceTimeThreshold) {
+                            // 200ms 以内的无声区域仍然记录。
+                            outputStream.write(audioChunk.toBytes());
+                        }
+                    }
+
+                }
+            }
+        }
+
+        // 推送 沉默时间
+        private void postSilenceEvent(final long silenceTime, final long discardTime) {
+            if (onSilenceListener != null) {
+                handler.post(new Runnable() {
+                    @Override
+                    public void run() {
+                        onSilenceListener.onSilence(silenceTime, discardTime);
+                    }
+                });
+            }
+        }
+
+    }
+
+}

+ 36 - 0
BaseLibrary/src/main/java/com/cooleshow/base/recorder/Recorder.java

@@ -0,0 +1,36 @@
+package com.cooleshow.base.recorder;
+
+import android.content.Context;
+
+/**
+ * 录音机接口
+ * <p>
+ * 实现该接口的类将提供:开始「startRecording」、暂停「pauseRecording」、继续「resumeRecording」、停止「stopRecording」方法。
+ *
+ * @author maple
+ * @time 2018/4/10.
+ */
+public interface Recorder {
+
+    /**
+     * 开始
+     */
+    void startRecording(Context context);
+
+    /**
+     * 暂停
+     */
+    void pauseRecording();
+
+    /**
+     * 继续
+     */
+    void resumeRecording(Context context);
+
+    /**
+     * 停止
+     */
+    void stopRecording();
+
+
+}

+ 98 - 0
BaseLibrary/src/main/java/com/cooleshow/base/recorder/WavHeader.java

@@ -0,0 +1,98 @@
+package com.cooleshow.base.recorder;
+
+import android.media.AudioFormat;
+
+/**
+ * WAV文件头工具类
+ *
+ * @author maple
+ * @time 2018/4/10.
+ */
+public class WavHeader {
+    private AudioRecordConfig config;// wav录音配置参数
+    private long totalAudioLength;// 音频数据总长度
+
+    WavHeader(AudioRecordConfig config, long totalAudioLength) {
+        this.config = config;
+        this.totalAudioLength = totalAudioLength;
+    }
+
+    /**
+     * 返回WAV文件头的byte数组
+     */
+    public byte[] toBytes() {
+        long sampleRateInHz = config.getSampleRateInHz();
+        int channels = (config.getChannelConfig() == AudioFormat.CHANNEL_IN_MONO ? 1 : 2);
+        byte bitsPerSample = config.bitsPerSample();
+        return wavFileHeader(
+                totalAudioLength - 44,
+                totalAudioLength - 44 + 36,
+                sampleRateInHz,
+                channels,
+                bitsPerSample * sampleRateInHz * channels / 8,
+                bitsPerSample
+        );
+    }
+
+    /**
+     * 获取wav文件头
+     *
+     * @param totalAudioLen  -
+     * @param totalDataLen   -
+     * @param longSampleRate - 采样率
+     * @param channels       - 通道数
+     * @param byteRate       -
+     * @param bitsPerSample  - 16/8 bit
+     * @return
+     */
+    private byte[] wavFileHeader(long totalAudioLen, long totalDataLen, long longSampleRate,
+                                 int channels, long byteRate, byte bitsPerSample) {
+        byte[] header = new byte[44];
+        header[0] = 'R'; // RIFF/WAVE header
+        header[1] = 'I';
+        header[2] = 'F';
+        header[3] = 'F';
+        header[4] = (byte) (totalDataLen & 0xff);
+        header[5] = (byte) ((totalDataLen >> 8) & 0xff);
+        header[6] = (byte) ((totalDataLen >> 16) & 0xff);
+        header[7] = (byte) ((totalDataLen >> 24) & 0xff);
+        header[8] = 'W';
+        header[9] = 'A';
+        header[10] = 'V';
+        header[11] = 'E';
+        header[12] = 'f'; // 'fmt ' chunk
+        header[13] = 'm';
+        header[14] = 't';
+        header[15] = ' ';
+        header[16] = 16; // 4 bytes: size of 'fmt ' chunk
+        header[17] = 0;
+        header[18] = 0;
+        header[19] = 0;
+        header[20] = 1; // format = 1
+        header[21] = 0;
+        header[22] = (byte) channels;
+        header[23] = 0;
+        header[24] = (byte) (longSampleRate & 0xff);
+        header[25] = (byte) ((longSampleRate >> 8) & 0xff);
+        header[26] = (byte) ((longSampleRate >> 16) & 0xff);
+        header[27] = (byte) ((longSampleRate >> 24) & 0xff);
+        header[28] = (byte) (byteRate & 0xff);
+        header[29] = (byte) ((byteRate >> 8) & 0xff);
+        header[30] = (byte) ((byteRate >> 16) & 0xff);
+        header[31] = (byte) ((byteRate >> 24) & 0xff);
+        header[32] = (byte) (channels * (bitsPerSample / 8)); //
+        // block align
+        header[33] = 0;
+        header[34] = bitsPerSample; // bits per sample
+        header[35] = 0;
+        header[36] = 'd';
+        header[37] = 'a';
+        header[38] = 't';
+        header[39] = 'a';
+        header[40] = (byte) (totalAudioLen & 0xff);
+        header[41] = (byte) ((totalAudioLen >> 8) & 0xff);
+        header[42] = (byte) ((totalAudioLen >> 16) & 0xff);
+        header[43] = (byte) ((totalAudioLen >> 24) & 0xff);
+        return header;
+    }
+}

+ 56 - 0
BaseLibrary/src/main/java/com/cooleshow/base/recorder/WavRecorder.java

@@ -0,0 +1,56 @@
+package com.cooleshow.base.recorder;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+
+/**
+ * Wav格式的音频记录器
+ *
+ * @author maple
+ * @time 2018/4/10.
+ */
+public class WavRecorder extends BaseDataRecorder {
+
+    /**
+     * 构造方法
+     *
+     * @param file          保存录音的文件
+     * @param config        录音参数配置
+     * @param pullTransport 数据推送器,
+     */
+    public WavRecorder(File file, AudioRecordConfig config, PullTransport pullTransport) {
+        super(file, config, pullTransport);
+    }
+
+    @Override
+    public void stopRecording() {
+//        try {
+            super.stopRecording();
+//            writeWavHeader();
+//        } catch (IOException e) {
+//            throw new RuntimeException("Error in applying wav header", e);
+//        }
+    }
+
+    /**
+     * 写入wav文件头
+     */
+    private void writeWavHeader() throws IOException {
+        RandomAccessFile wavFile = randomAccessFile(file);
+        wavFile.seek(0); // to the beginning
+        wavFile.write(new WavHeader(config, file.length()).toBytes());
+        wavFile.close();
+    }
+
+    private RandomAccessFile randomAccessFile(File file) {
+        RandomAccessFile randomAccessFile;
+        try {
+            randomAccessFile = new RandomAccessFile(file, "rw");
+        } catch (FileNotFoundException e) {
+            throw new RuntimeException(e);
+        }
+        return randomAccessFile;
+    }
+}

+ 3 - 0
BaseLibrary/src/main/java/com/cooleshow/base/router/RouterPath.kt

@@ -45,6 +45,9 @@ object RouterPath {
     class WebCenter {
         companion object {
             const val ACTIVITY_HTML = "/ui/activity/web/HtmlActivity"
+            const val ACTIVITY_ACCOMPANY_HTML = "/ui/activity/web/AccompanyActivity"
+            const val ACTIVITY_HORIZONTAL_SCREEN_HTML = "/ui/activity/web/HtmlHorizontalScreenActivity"
+
         }
     }
 

+ 17 - 0
BaseLibrary/src/main/java/com/cooleshow/base/service/MusicPlayContract.java

@@ -0,0 +1,17 @@
+package com.cooleshow.base.service;
+
+public interface MusicPlayContract {
+
+    void setOnPreparedListener();
+
+    void setOnErrorListener();
+
+    void setOnCompletionListener();
+
+    void setOnPauseListener();
+
+    void onPlay(boolean isPlay);
+
+
+
+}

+ 181 - 0
BaseLibrary/src/main/java/com/cooleshow/base/service/MusicService.java

@@ -0,0 +1,181 @@
+package com.cooleshow.base.service;
+
+import android.app.Service;
+import android.content.Intent;
+import android.media.MediaPlayer;
+import android.media.PlaybackParams;
+import android.os.Binder;
+import android.os.Build;
+import android.os.IBinder;
+import android.util.Log;
+
+import java.io.IOException;
+
+public class MusicService extends Service {
+    MusicPlayContract contract;
+
+
+    public String path = "";
+
+    private MediaPlayer mMediaPlayer;
+    private PlaybackParams playParam;
+
+    @Override
+    public IBinder onBind(Intent intent) {
+        //当执行完了onCreate后,就会执行onBind把操作歌曲的方法返回
+
+        return new MyBinder();
+    }
+
+
+    @Override
+    public void onCreate() {
+        super.onCreate();
+        //这里只执行一次,用于准备播放器
+        Log.e("服务", "准备播放音乐");
+    }
+
+    //该方法包含关于歌曲的操作
+    public class MyBinder extends Binder {
+
+        //判断是否处于播放状态
+        public boolean isPlaying() {
+            return null == mMediaPlayer ? false : mMediaPlayer.isPlaying();
+        }
+
+
+        public void setPlayParam(PlaybackParams param){
+            playParam = param;
+
+        }
+        //播放或暂停歌曲
+        public void onPlay(boolean isPlaying, String musicPath) {
+            if (path.equals(musicPath)) {
+                if (!isPlaying) {
+                    if (mMediaPlayer == null) {
+                        startPlaying(musicPath);
+                    } else {
+                        resumePlaying();
+                    }
+                } else {
+                    pausePlaying();
+                }
+            } else {
+                startPlaying(musicPath); //start from beginning
+            }
+
+        }
+
+        //播放
+        public void startPlaying(String musicPath) {
+            path = musicPath;
+            try {
+                if (mMediaPlayer == null) {
+                    mMediaPlayer = new MediaPlayer();
+                    mMediaPlayer.setOnPreparedListener(mp -> {
+                        mp.start();
+                        if (contract != null) {
+                            contract.setOnPreparedListener();
+                            contract.onPlay(true);
+
+                        }
+                    });
+                    mMediaPlayer.setOnErrorListener((mediaPlayer, i, i1) -> {
+                        if (contract != null) {
+                            contract.setOnErrorListener();
+                            contract.onPlay(false);
+                        }
+                        return false;
+                    });
+                    mMediaPlayer.setOnCompletionListener(mediaPlayer -> {
+                        if (contract != null) {
+                            contract.setOnCompletionListener();
+                            contract.onPlay(false);
+                        }
+                    });
+
+
+                }
+                mMediaPlayer.reset();
+                mMediaPlayer.setDataSource(musicPath);
+                if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && playParam != null) {
+                    try {
+                        if (mMediaPlayer != null) {
+                            mMediaPlayer.setPlaybackParams(playParam);
+                        }
+                    }catch (Exception e){
+                        e.printStackTrace();
+                    }
+                }
+                //准备资源
+                mMediaPlayer.prepare();
+            } catch (IOException e) {
+                e.printStackTrace();
+            }
+
+        }
+
+        public void resumePlaying() {
+
+            if (mMediaPlayer != null) {
+                mMediaPlayer.start();
+                if (contract != null) {
+                    contract.setOnPreparedListener();
+                    contract.onPlay(true);
+                }
+            }
+
+        }
+
+        public void stopPlaying() {
+
+            if (mMediaPlayer == null) {
+                return;
+            }
+            mMediaPlayer.stop();
+            mMediaPlayer.reset();
+            mMediaPlayer.release();
+            mMediaPlayer = null;
+            if (contract != null) {
+                contract.setOnCompletionListener();
+                contract.onPlay(false);
+            }
+        }
+
+
+        public void pausePlaying() {
+            if (null != mMediaPlayer) {
+                mMediaPlayer.pause();
+                if (null != contract) {
+                    contract.setOnPauseListener();
+                    contract.onPlay(false);
+                }
+            }
+
+        }
+
+        //返回歌曲的长度,单位为毫秒
+        public int getDuration() {
+            return null == mMediaPlayer ? 0 : mMediaPlayer.getDuration();
+        }
+
+        //返回歌曲目前的进度,单位为毫秒
+        public int getCurrenPostion() {
+            return null == mMediaPlayer ? 0 : mMediaPlayer.getCurrentPosition();
+        }
+
+        //设置歌曲播放的进度,单位为毫秒
+        public void seekTo(int mesc) {
+            if (mMediaPlayer != null) {
+                mMediaPlayer.seekTo(mesc);
+            }
+        }
+
+        public void setCallback(MusicPlayContract callback) {
+            contract = callback;
+        }
+    }
+
+
+}
+

+ 26 - 0
BaseLibrary/src/main/java/com/cooleshow/base/service/PlayMusicReceiver.java

@@ -0,0 +1,26 @@
+package com.cooleshow.base.service;
+
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+
+import com.cooleshow.base.constanst.Constants;
+
+
+public class PlayMusicReceiver extends BroadcastReceiver {
+    @Override
+    public void onReceive(Context context, Intent intent) {
+        String action = intent.getAction();
+        if (action.equals(Intent.ACTION_HEADSET_PLUG)) {
+            if (intent.hasExtra("state")) {
+                if (intent.getIntExtra("state", 0) == 0) {
+                    //无耳机插入
+                    Constants.HEADSET_PLUE_TAG = "";
+                } else if (intent.getIntExtra("state", 0) == 1) {
+                    //有耳机插入
+                    Constants.HEADSET_PLUE_TAG = "有线耳机";
+                }
+            }
+        }
+    }
+}

+ 36 - 0
BaseLibrary/src/main/java/com/cooleshow/base/service/PlayMusicService.java

@@ -0,0 +1,36 @@
+package com.cooleshow.base.service;
+
+import android.app.Service;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.os.IBinder;
+
+public class PlayMusicService extends Service {
+    private final PlayMusicReceiver receiver = new PlayMusicReceiver();
+    private static final String TAG = "PlayMusicService";
+    @Override
+    public IBinder onBind(Intent intent)
+    {
+        return null;
+    }
+
+    @Override
+    public void onCreate()
+    {
+//        registerReceiver(mReceiver, intentFilter);//注册广播
+        registerReceiver(receiver, new IntentFilter(Intent.ACTION_HEADSET_PLUG));
+        super.onCreate();
+
+    }
+
+    @Override
+    public void onDestroy()
+    {
+
+        unregisterReceiver(receiver);
+        super.onDestroy();
+//        Log.i(TAG, "Destroy Service");
+    }
+
+
+}

+ 5 - 0
BaseLibrary/src/main/java/com/cooleshow/base/utils/HeadsetPlugListener.java

@@ -0,0 +1,5 @@
+package com.cooleshow.base.utils;
+
+public interface HeadsetPlugListener {
+    void onHeadsetPlug(boolean isPlug, String type);// true说明有耳机 false说明没有耳机
+}

+ 48 - 0
BaseLibrary/src/main/java/com/cooleshow/base/utils/HeadsetPlugReceiver.java

@@ -0,0 +1,48 @@
+package com.cooleshow.base.utils;
+
+import android.annotation.SuppressLint;
+import android.bluetooth.BluetoothAdapter;
+import android.bluetooth.BluetoothHeadset;
+import android.bluetooth.BluetoothProfile;
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+import android.os.Build;
+
+public class HeadsetPlugReceiver extends BroadcastReceiver {
+
+    private HeadsetPlugListener mHeadsetPlugListener;
+
+    public HeadsetPlugReceiver(HeadsetPlugListener headsetPlugListener) {
+        this.mHeadsetPlugListener = headsetPlugListener;
+    }
+
+    @Override
+    public void onReceive(Context context, Intent intent) {
+        String action = intent.getAction();
+        //判断是否有蓝牙设备连接
+        if (BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED.equals(action)) {
+            BluetoothAdapter adapter = BluetoothAdapter.getDefaultAdapter();
+            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
+                @SuppressLint("MissingPermission") int state = adapter.getProfileConnectionState(BluetoothProfile.HEADSET);
+                if (BluetoothProfile.STATE_CONNECTED == state) {
+                    mHeadsetPlugListener.onHeadsetPlug(true,"蓝牙耳机");
+                }
+                if (BluetoothProfile.STATE_DISCONNECTED == state) {
+                    mHeadsetPlugListener.onHeadsetPlug(false, "蓝牙耳机");
+                }
+            }
+            //判断是否有有线耳机连接
+        } else if (Intent.ACTION_HEADSET_PLUG.equals(action)) {
+            if (intent.hasExtra("state")) {
+                if (intent.getIntExtra("state", 0) == 0) {
+                    // 外放
+                    mHeadsetPlugListener.onHeadsetPlug(false, "有线耳机");
+                } else if (intent.getIntExtra("state", 0) == 1) {
+                    // 耳机
+                    mHeadsetPlugListener.onHeadsetPlug(true, "有线耳机");
+                }
+            }
+        }
+    }
+}

+ 6 - 0
BaseLibrary/src/main/java/com/cooleshow/base/utils/helper/upload/UploadHelper.java

@@ -100,6 +100,12 @@ public class UploadHelper {
         }
         String name = FileUtils.getFileName(file);
         long timeStr = TimeUtils.getNowMills();
+        if (name.contains(".")){
+            String[] split = name.split("\\.");
+            name=timeStr+"."+split[split.length-1];
+        }else {
+            name=""+timeStr;
+        }
         String lastName = timeStr + "_" + name;
         String month = TimeUtils.date2String(TimeUtils.getNowDate(), TimeUtils.getSafeDateFormat("yyyy-MM"));
         String key = "androidtest/" + month + "/" + lastName;

+ 61 - 0
BaseLibrary/src/main/java/com/cooleshow/base/websocket/JWebSocketClient.java

@@ -0,0 +1,61 @@
+package com.cooleshow.base.websocket;
+
+
+import org.java_websocket.client.WebSocketClient;
+import org.java_websocket.drafts.Draft_6455;
+import org.java_websocket.handshake.ServerHandshake;
+
+import java.net.URI;
+import java.nio.ByteBuffer;
+import java.util.Map;
+
+public class JWebSocketClient extends WebSocketClient {
+    StringBuilder sb;
+    public JWebSocketClient(URI serverUri, Map<String, String> httpHeaders) {
+        super(serverUri, new Draft_6455(), httpHeaders);
+    }
+
+    @Override
+    public void onOpen(ServerHandshake handshakedata) {
+//        sb = new StringBuilder();
+//        sb.append("onOpen at time:");
+//        sb.append(new Date());
+//        sb.append("服务器状态:");
+//        sb.append(handshakedata.getHttpStatusMessage());
+//        LOG.e("JWebSocketClient" + "onOpen:" + sb.toString());
+    }
+
+    @Override
+    public void onMessage(String message) {
+    }
+
+    @Override
+    public void onMessage(ByteBuffer bytes) {
+//        LOG.e("JWebSocketClient" + "onMessage" + bytes.toString());
+    }
+
+    @Override
+    public void onClose(int code, String reason, boolean remote) {
+//        sb = new StringBuilder();
+//        sb.append("onClose at time:");
+//        sb.append(new Date());
+//        sb.append("\n");
+//        sb.append("onClose info:");
+//        sb.append(code);
+//        sb.append(reason);
+//        sb.append(remote);
+//        LOG.e("JWebSocketClient" + "onClose:" + sb.toString());
+
+
+    }
+
+    @Override
+    public void onError(Exception ex) {
+//        sb = new StringBuilder();
+//        sb.append("onError at time:");
+//        sb.append(new Date());
+//        sb.append("\n");
+//        sb.append(ex);
+//        LOG.e("JWebSocketClient" + "onError:" + sb.toString());
+    }
+}

+ 147 - 0
BaseLibrary/src/main/java/com/cooleshow/base/widgets/ColorLinearLayout.java

@@ -0,0 +1,147 @@
+package com.cooleshow.base.widgets;
+
+import android.content.Context;
+import android.content.res.TypedArray;
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Paint;
+import android.graphics.Rect;
+import android.graphics.RectF;
+import android.util.AttributeSet;
+import android.widget.LinearLayout;
+
+import com.cooleshow.base.R;
+
+
+public class ColorLinearLayout extends LinearLayout {
+    private int ctvBackgroundColor;
+    private int cllBorderColor;
+    private int cllBorderWidth;
+
+    public void setCllBorderColor(int cllBorderColor) {
+        this.cllBorderColor = cllBorderColor;
+        init();
+        postInvalidate();
+    }
+
+    public void setCllBorderWidth(int cllBorderWidth) {
+        this.cllBorderWidth = cllBorderWidth;
+        init();
+        postInvalidate();
+    }
+
+    public void setmCornerSize(int mCornerSize) {
+        this.mCornerSize = mCornerSize;
+    }
+
+    /**
+     * 圆角大小
+     */
+    private int mCornerSize;
+
+    /**
+     * 绘制时控制文本绘制的范围
+     */
+    private Rect mtitleBound;
+    private Paint mtitlePaint;
+
+    public ColorLinearLayout(Context context, AttributeSet attrs) {
+        this(context, attrs, 0);
+    }
+
+    public ColorLinearLayout(Context context) {
+        this(context, null);
+    }
+
+    public void setCtvBackgroundColor(int ctvBackgroundColor) {
+        this.ctvBackgroundColor = ctvBackgroundColor;
+        init();
+        postInvalidate();
+    }
+
+    /**
+     * 获得我自定义的样式属性
+     *
+     * @param context
+     * @param attrs
+     * @param defStyle
+     */
+    public ColorLinearLayout(Context context, AttributeSet attrs, int defStyle) {
+        super(context, attrs, defStyle);
+        setWillNotDraw(false);
+        /**
+         * 获得我们所定义的自定义样式属性
+         */
+        TypedArray a = context.getTheme().obtainStyledAttributes(attrs, R.styleable.ColorLinearLayout, defStyle, 0);
+        int n = a.getIndexCount();
+        for (int i = 0; i < n; i++) {
+            int attr = a.getIndex(i);
+            if (attr == R.styleable.ColorLinearLayout_cllBackground) {//默认为白色
+                ctvBackgroundColor = a.getColor(attr, Color.WHITE);
+            } else if (attr == R.styleable.ColorLinearLayout_cllCornerSize) {//默认圆角为0
+                mCornerSize = a.getDimensionPixelSize(attr, 0);
+            }
+            if (attr == R.styleable.ColorLinearLayout_cllBorderColor) {//默认圆角为0
+                cllBorderColor = a.getColor(attr, Color.WHITE);
+            }
+            if (attr == R.styleable.ColorLinearLayout_cllBorderWidth) {//默认圆角为0
+                cllBorderWidth = a.getDimensionPixelSize(attr, 0);
+            }
+
+        }
+        a.recycle();
+        mtitleBound = new Rect();
+        paint = new Paint(Paint.FILTER_BITMAP_FLAG);
+        paint.setAntiAlias(true);
+        paint.setColor(ctvBackgroundColor);
+        paint2 = new Paint(Paint.FILTER_BITMAP_FLAG);
+        paint2.setColor(cllBorderColor);
+        paint2.setAntiAlias(true);
+        paint2.setStrokeWidth(cllBorderWidth);
+        paint2.setStyle(Paint.Style.STROKE);
+    }
+
+    public void setBackgroundColor(int ctvBackgroundColor){
+        this.ctvBackgroundColor = ctvBackgroundColor;
+        paint.setColor(this.ctvBackgroundColor);
+        this.invalidate();
+
+    }
+    private void  init(){
+        paint = new Paint(Paint.FILTER_BITMAP_FLAG);
+        paint.setAntiAlias(true);
+        paint.setColor(ctvBackgroundColor);
+        paint2 = new Paint(Paint.FILTER_BITMAP_FLAG);
+        paint2.setColor(cllBorderColor);
+        paint2.setAntiAlias(true);
+        paint2.setStrokeWidth(cllBorderWidth);
+        paint2.setStyle(Paint.Style.STROKE);
+    }
+
+    Paint paint;
+    Paint paint2;
+    RectF rec;
+    RectF rec2;
+
+    @Override
+    protected void onSizeChanged(int w, int h, int oldw, int oldh) {
+        super.onSizeChanged(w, h, oldw, oldh);
+        rec = new RectF(cllBorderWidth*1.0f/2, cllBorderWidth*1.0f/2,
+                getMeasuredWidth()-cllBorderWidth*1.0f/2,
+                getMeasuredHeight()-cllBorderWidth*1.0f/2);
+        rec2 = new RectF(cllBorderWidth*1.0f/2, cllBorderWidth*1.0f/2,
+                getMeasuredWidth()-cllBorderWidth*1.0f/2,
+                getMeasuredHeight()-cllBorderWidth*1.0f/2);
+    }
+
+    @Override
+    protected void onDraw(Canvas canvas) {
+        if (cllBorderWidth != 0) {
+            canvas.drawRoundRect(rec,mCornerSize,mCornerSize, paint);
+            canvas.drawRoundRect(rec2,mCornerSize,mCornerSize, paint2);
+        } else {
+            canvas.drawRoundRect(rec, mCornerSize, mCornerSize, paint);
+        }
+        super.onDraw(canvas);
+    }
+}

BIN
BaseLibrary/src/main/res/drawable-xxhdpi/ic_accompany_permissions_centre.png


BIN
BaseLibrary/src/main/res/drawable-xxhdpi/ic_accompany_permissions_title.png


BIN
BaseLibrary/src/main/res/drawable-xxhdpi/ic_gray_close.png


+ 9 - 0
BaseLibrary/src/main/res/drawable/bg_grayf2_45dp_shape.xml

@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="utf-8"?>
+<shape xmlns:android="http://schemas.android.com/apk/res/android"
+
+       android:shape="rectangle">
+    <solid android:color="@color/gray_F2F4F8"/>
+    <corners android:radius="@dimen/dp_45"/>
+
+
+</shape>

+ 7 - 0
BaseLibrary/src/main/res/drawable/bg_play_metronome_gray_dots_shape.xml

@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="utf-8"?>
+<shape xmlns:android="http://schemas.android.com/apk/res/android"
+    android:shape="oval">
+<size android:width="@dimen/dp_23"
+    android:height="@dimen/dp_23"/>
+    <solid android:color="#959595" />
+</shape>

+ 12 - 0
BaseLibrary/src/main/res/drawable/bg_play_metronome_green_dots_shape.xml

@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="utf-8"?>
+<shape xmlns:android="http://schemas.android.com/apk/res/android"
+    android:shape="oval">
+    <size
+        android:width="@dimen/dp_23"
+        android:height="@dimen/dp_23" />
+    <gradient
+        android:angle="0"
+        android:endColor="#00FFEF"
+        android:startColor="#2DC4BE" />
+
+</shape>

+ 7 - 0
BaseLibrary/src/main/res/drawable/bg_play_metronome_white_dots_shape.xml

@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="utf-8"?>
+<shape xmlns:android="http://schemas.android.com/apk/res/android"
+    android:shape="oval">
+<size android:width="@dimen/dp_23"
+    android:height="@dimen/dp_23"/>
+    <solid android:color="@color/white" />
+</shape>

+ 6 - 0
BaseLibrary/src/main/res/drawable/btn_primary_default_shape.xml

@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="utf-8"?>
+<shape xmlns:android="http://schemas.android.com/apk/res/android"
+       android:shape="rectangle">
+    <solid android:color="@color/colorPrimary"/>
+    <corners android:radius="45dp"/>
+</shape>

+ 113 - 0
BaseLibrary/src/main/res/layout/accompany_permissions_popu.xml

@@ -0,0 +1,113 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:app="http://schemas.android.com/apk/res-auto"
+    xmlns:tools="http://schemas.android.com/tools"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent"
+    android:gravity="center"
+    android:orientation="vertical"
+    tools:ignore="MissingDefaultResource">
+
+    <androidx.constraintlayout.widget.ConstraintLayout
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content">
+
+
+        <androidx.constraintlayout.widget.ConstraintLayout
+            android:layout_width="@dimen/dp_264"
+            android:layout_height="wrap_content"
+            android:layout_marginTop="@dimen/dp_5"
+            android:background="@drawable/bg_white_18dp"
+            app:layout_constraintEnd_toEndOf="parent"
+            app:layout_constraintStart_toStartOf="parent"
+            app:layout_constraintTop_toTopOf="@id/tv_title">
+
+
+            <ImageView
+                android:id="@+id/iv_centre"
+                android:layout_width="@dimen/dp_156"
+                android:layout_height="@dimen/dp_94"
+                android:layout_marginTop="@dimen/dp_41"
+                android:background="@drawable/ic_accompany_permissions_centre"
+                app:layout_constraintEnd_toEndOf="parent"
+                app:layout_constraintStart_toStartOf="parent"
+                app:layout_constraintTop_toTopOf="parent" />
+
+            <TextView
+                android:id="@+id/tv_content"
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:layout_marginStart="@dimen/dp_15"
+                android:layout_marginTop="@dimen/dp_21"
+                android:layout_marginEnd="@dimen/dp_15"
+                android:gravity="center"
+                android:lineSpacingExtra="@dimen/dp_5"
+                android:textColor="@color/gray_ff80"
+                android:textSize="@dimen/dp_14"
+                app:layout_constraintEnd_toEndOf="parent"
+                app:layout_constraintStart_toStartOf="parent"
+                app:layout_constraintTop_toBottomOf="@id/iv_centre" />
+
+            <TextView
+                android:id="@+id/btn_cancel"
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:layout_marginStart="@dimen/dp_20"
+                android:layout_marginTop="@dimen/dp_24"
+                android:layout_marginEnd="@dimen/dp_30"
+                android:layout_marginBottom="@dimen/dp_15"
+                android:background="@drawable/bg_grayf2_45dp_shape"
+                android:paddingStart="@dimen/dp_30"
+                android:paddingTop="@dimen/dp_7"
+                android:paddingEnd="@dimen/dp_30"
+                android:paddingBottom="@dimen/dp_7"
+                android:text="不允许"
+                android:textColor="@color/colorPrimary"
+                android:textSize="@dimen/dp_14"
+                app:layout_constraintBottom_toBottomOf="parent"
+                app:layout_constraintEnd_toStartOf="@id/btn_commit"
+                app:layout_constraintStart_toStartOf="parent"
+                app:layout_constraintTop_toBottomOf="@id/tv_content" />
+
+            <TextView
+                android:id="@+id/btn_commit"
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:layout_marginStart="@dimen/dp_10"
+                android:layout_marginTop="@dimen/dp_24"
+                android:layout_marginEnd="@dimen/dp_20"
+                android:layout_marginBottom="@dimen/dp_15"
+                android:background="@drawable/btn_primary_default_shape"
+                android:paddingStart="@dimen/dp_30"
+                android:paddingTop="@dimen/dp_7"
+                android:paddingEnd="@dimen/dp_30"
+                android:paddingBottom="@dimen/dp_7"
+                android:text="去开启"
+                android:textColor="@color/white"
+                android:textSize="@dimen/dp_14"
+                app:layout_constraintBottom_toBottomOf="parent"
+                app:layout_constraintEnd_toEndOf="parent"
+                app:layout_constraintStart_toEndOf="@id/btn_cancel"
+                app:layout_constraintTop_toBottomOf="@id/tv_content" />
+
+        </androidx.constraintlayout.widget.ConstraintLayout>
+
+        <TextView
+            android:id="@+id/tv_title"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:layout_marginBottom="@dimen/dp_5"
+            android:background="@drawable/ic_accompany_permissions_title"
+            android:gravity="center"
+            android:paddingBottom="@dimen/dp_5"
+            android:text="提示"
+            android:textColor="@color/white"
+            android:textSize="@dimen/dp_16"
+            app:layout_constraintEnd_toEndOf="parent"
+            app:layout_constraintStart_toStartOf="parent"
+            app:layout_constraintTop_toTopOf="parent" />
+
+
+    </androidx.constraintlayout.widget.ConstraintLayout>
+
+</LinearLayout>

+ 68 - 0
BaseLibrary/src/main/res/layout/dialog_student_precount.xml

@@ -0,0 +1,68 @@
+<?xml version="1.0" encoding="utf-8"?>
+<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:app="http://schemas.android.com/apk/res-auto"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent"
+    android:orientation="horizontal">
+
+    <com.cooleshow.base.widgets.ColorLinearLayout
+        android:layout_centerInParent="true"
+        android:id="@+id/cll_dialog_student_precount"
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        android:gravity="center"
+        app:cllBackground="@color/transparent"
+        app:cllCornerSize="@dimen/dp_5">
+
+    <ImageView
+        android:id="@+id/iv_1"
+        android:layout_width="@dimen/dp_23"
+        android:layout_height="@dimen/dp_23"
+        android:src="@drawable/bg_play_metronome_gray_dots_shape" />
+
+    <ImageView
+        android:id="@+id/iv_2"
+        android:layout_width="@dimen/dp_23"
+        android:layout_height="@dimen/dp_23"
+        android:layout_marginLeft="@dimen/dp_10"
+        android:src="@drawable/bg_play_metronome_gray_dots_shape" />
+
+    <ImageView
+        android:id="@+id/iv_3"
+        android:layout_width="@dimen/dp_23"
+        android:layout_height="@dimen/dp_23"
+        android:layout_marginLeft="@dimen/dp_10"
+        android:src="@drawable/bg_play_metronome_gray_dots_shape" />
+
+    <ImageView
+        android:id="@+id/iv_4"
+        android:layout_width="@dimen/dp_23"
+        android:layout_height="@dimen/dp_23"
+        android:layout_marginLeft="@dimen/dp_10"
+        android:src="@drawable/bg_play_metronome_gray_dots_shape" />
+
+    <ImageView
+        android:id="@+id/iv_5"
+        android:layout_width="@dimen/dp_23"
+        android:layout_height="@dimen/dp_23"
+        android:layout_marginLeft="@dimen/dp_10"
+        android:src="@drawable/bg_play_metronome_gray_dots_shape" />
+
+    <ImageView
+        android:id="@+id/iv_6"
+        android:layout_width="@dimen/dp_23"
+        android:layout_height="@dimen/dp_23"
+        android:layout_marginLeft="@dimen/dp_10"
+        android:src="@drawable/bg_play_metronome_gray_dots_shape" />
+    </com.cooleshow.base.widgets.ColorLinearLayout>
+
+    <ImageView
+        android:padding="@dimen/dp_5"
+        android:layout_marginTop="@dimen/dp_20"
+        android:layout_marginRight="@dimen/dp_20"
+        android:id="@+id/iv_dialog_student_precount"
+        android:layout_alignParentRight="true"
+        android:layout_width="@dimen/dp_30"
+        android:layout_height="@dimen/dp_30"
+        android:src="@drawable/ic_gray_close"/>
+</RelativeLayout>

BIN
BaseLibrary/src/main/res/raw/feeble.wav


BIN
BaseLibrary/src/main/res/raw/midstrong.wav


+ 7 - 0
BaseLibrary/src/main/res/values/attrs.xml

@@ -32,4 +32,11 @@
         <attr name="qmui_is_touch_select_mode_enabled" format="boolean" />
     </declare-styleable>
     <attr name="QMUIRadiusImageViewStyle" format="reference" />
+
+    <declare-styleable name="ColorLinearLayout">
+        <attr name="cllBackground" format="color"/>
+        <attr name="cllCornerSize" format="dimension"/>
+        <attr name="cllBorderColor" format="color"/>
+        <attr name="cllBorderWidth" format="dimension"/>
+    </declare-styleable>
 </resources>

+ 2 - 1
BaseLibrary/src/main/res/values/colors.xml

@@ -108,5 +108,6 @@
     <color name="color_b2b2b2">#B2B2B2</color>
     <color name="color_90ffffff">#90ffffff</color>
     <color name="color_e2e2e2">#e2e2e2</color>
-
+    <color name="gray_ff80">#ff808080</color>
+    <color name="gray_F2F4F8">#F2F4F8</color>
 </resources>

+ 1 - 1
BaseLibrary/src/main/res/values/dimens.xml

@@ -1,7 +1,7 @@
 <?xml version="1.0" encoding="utf-8"?>
 <resources>
 
-    <dimen name="statusbar_view_height">0dp</dimen>
+    <dimen name="statusbar_view_height">35dp</dimen>
     <dimen name="common_divider_height">1px</dimen>
     <dimen name="common_divider_width">1px</dimen>
     <dimen name="common_header_bar_height">50dp</dimen>

+ 2 - 0
camerakit/.gitignore

@@ -0,0 +1,2 @@
+/build
+.externalNativeBuild

+ 33 - 0
camerakit/build.gradle

@@ -0,0 +1,33 @@
+apply plugin: 'com.android.library'
+android {
+    compileSdkVersion rootProject.ext.android['compileSdkVersion']
+    defaultConfig {
+        minSdkVersion rootProject.ext.android.minSdkVersion
+        targetSdkVersion rootProject.ext.android.targetSdkVersion
+        externalNativeBuild {
+            cmake {
+                cppFlags '-std=c++11'
+            }
+        }
+    }
+    externalNativeBuild {
+        cmake {
+            path 'src/main/cpp/CMakeLists.txt'
+        }
+    }
+
+    compileOptions {
+        sourceCompatibility JavaVersion.VERSION_1_8
+        targetCompatibility JavaVersion.VERSION_1_8
+    }
+}
+
+dependencies {
+    implementation 'androidx.annotation:annotation:1.0.1'
+    implementation 'com.camerakit:jpegkit:0.2.2'
+    testImplementation 'junit:junit:4.12'
+    implementation 'com.google.android.gms:play-services-vision:10.2.1'
+}
+
+
+

+ 26 - 0
camerakit/src/main/AndroidManifest.xml

@@ -0,0 +1,26 @@
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:tools="http://schemas.android.com/tools"
+    package="com.camerakit">
+
+
+
+    <uses-feature
+        android:name="android.hardware.camera"
+        android:required="false" />
+    <uses-feature
+        android:name="android.hardware.camera.front"
+        android:required="false" />
+
+    <uses-feature
+        android:name="android.hardware.camera.autofocus"
+        android:required="false" />
+
+    <uses-feature
+        android:name="android.hardware.microphone"
+        android:required="false" />
+
+    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
+    <uses-permission android:name="android.permission.CAMERA" />
+    <uses-permission android:name="android.permission.RECORD_AUDIO" />
+
+</manifest>

+ 17 - 0
camerakit/src/main/CMakeLists.txt

@@ -0,0 +1,17 @@
+cmake_minimum_required(VERSION 3.4.1)
+
+set(JPEG_INC_DIR ${CMAKE_SOURCE_DIR}/cpp/libjpeg/include)
+set(JPEG_BUILD_DIR ${CMAKE_SOURCE_DIR}/jniLibs)
+
+include_directories(${JPEG_INC_DIR})
+
+add_library(libjpeg STATIC IMPORTED)
+set_target_properties(libjpeg PROPERTIES IMPORTED_LOCATION ${JPEG_BUILD_DIR}/${ANDROID_ABI}/libjpeg-turbo.a)
+
+add_library(jpegTransformer SHARED ${CMAKE_SOURCE_DIR}/cpp/JniJpegTransformer.cpp)
+add_library(yuvOperator SHARED ${CMAKE_SOURCE_DIR}/cpp/JniYuvOperator.cpp)
+
+find_library(log-lib log)
+find_library(jnigraphics-lib jnigraphics)
+
+target_link_libraries(jpegTransformer yuvOperator libjpeg ${log-lib} ${jnigraphics-lib})

+ 20 - 0
camerakit/src/main/cpp/CMakeLists.txt

@@ -0,0 +1,20 @@
+cmake_minimum_required(VERSION 3.4.1)
+set(CMAKE_CXX_STANDARD 14)
+
+add_library(camerakit-core SHARED
+        camerakit/CameraSurfaceTexture.cpp
+        camerakit/CameraSurfaceTexture.hpp
+        camerakit/CameraSurfaceView.cpp
+        camerakit/CameraSurfaceView.hpp)
+
+target_link_libraries(camerakit-core
+        PUBLIC -llog
+        PUBLIC -lGLESv2)
+
+add_library(camerakit SHARED
+        jni_camera_surface_texture.cpp
+        jni_camera_surface_view.cpp
+        main.cpp)
+
+target_link_libraries(camerakit
+        PUBLIC camerakit-core)

+ 206 - 0
camerakit/src/main/cpp/JniJpegTransformer.cpp

@@ -0,0 +1,206 @@
+#include <jni.h>
+#include <stdio.h>
+#include <cstring>
+#include <unistd.h>
+#include <turbojpeg.h>
+
+extern "C"
+{
+JNIEXPORT jobject JNICALL
+Java_com_wonderkiln_camerakit_JpegTransformer_jniStoreJpeg
+        (JNIEnv *env, jobject obj, jbyteArray jpeg, jint jpegSize);
+
+JNIEXPORT jbyteArray JNICALL
+Java_com_wonderkiln_camerakit_JpegTransformer_jniCommit
+        (JNIEnv *env, jobject obj, jobject handle);
+
+JNIEXPORT void JNICALL
+Java_com_wonderkiln_camerakit_JpegTransformer_jniReleaseJpeg
+        (JNIEnv *env, jobject obj, jobject handle);
+
+JNIEXPORT jint JNICALL
+Java_com_wonderkiln_camerakit_JpegTransformer_jniGetWidth
+        (JNIEnv *env, jobject obj, jobject handle);
+
+JNIEXPORT jint JNICALL
+Java_com_wonderkiln_camerakit_JpegTransformer_jniGetHeight
+        (JNIEnv *env, jobject obj, jobject handle);
+
+JNIEXPORT void JNICALL
+Java_com_wonderkiln_camerakit_JpegTransformer_jniRotate
+        (JNIEnv *env, jobject obj, jobject handle, jint degrees);
+
+JNIEXPORT void JNICALL
+Java_com_wonderkiln_camerakit_JpegTransformer_jniFlipHorizontal
+        (JNIEnv *env, jobject obj, jobject handle);
+
+JNIEXPORT void JNICALL
+Java_com_wonderkiln_camerakit_JpegTransformer_jniFlipVertical
+        (JNIEnv *env, jobject obj, jobject handle);
+
+JNIEXPORT void JNICALL
+Java_com_wonderkiln_camerakit_JpegTransformer_jniCrop
+        (JNIEnv *env, jobject obj, jobject handle, jint left, jint top, jint right, jint bottom);
+}
+
+class JpegTransformer {
+public:
+    unsigned char *jpeg;
+    unsigned long jpegSize;
+
+    int width;
+    int height;
+
+    bool flipHorizontal;
+    bool flipVertical;
+
+    int rotation;
+
+    int cropLeft = 0;
+    int cropTop = 0;
+    int cropWidth = 0;
+    int cropHeight = 0;
+
+    JpegTransformer() {
+        jpeg = NULL;
+        jpegSize = 0;
+
+        flipHorizontal = false;
+        flipVertical = false;
+
+        rotation = 0;
+
+        cropLeft = 0;
+        cropTop = 0;
+        cropWidth = 0;
+        cropHeight = 0;
+    }
+};
+
+JNIEXPORT jobject JNICALL
+Java_com_wonderkiln_camerakit_JpegTransformer_jniStoreJpeg
+        (JNIEnv *env, jobject obj, jbyteArray jpeg, jint jpegSize) {
+    tjhandle tjHandle = tjInitDecompress();
+    jbyte *jpegBuffer = env->GetByteArrayElements(jpeg, 0);
+
+    if (!jpegBuffer) return NULL;
+
+    int width, height, jpegSubsamp;
+    int status = tjDecompressHeader2(
+            tjHandle,
+            (unsigned char *) jpegBuffer,
+            (unsigned long) jpegSize,
+            &width,
+            &height,
+            &jpegSubsamp
+    );
+
+    if (status != 0) {
+        return NULL;
+    }
+
+    JpegTransformer *jpegTransformer = new JpegTransformer();
+    jpegTransformer->width = width;
+    jpegTransformer->height = height;
+    jpegTransformer->jpeg = (unsigned char *) jpegBuffer;
+    jpegTransformer->jpegSize = (unsigned long) jpegSize;
+
+    tjDestroy(tjHandle);
+    return env->NewDirectByteBuffer(jpegTransformer, 0);
+}
+
+JNIEXPORT jbyteArray JNICALL
+Java_com_wonderkiln_camerakit_JpegTransformer_jniCommit
+        (JNIEnv *env, jobject obj, jobject handle) {
+    JpegTransformer *jpegTransformer = (JpegTransformer *) env->GetDirectBufferAddress(handle);
+
+    unsigned char *jpeg = jpegTransformer->jpeg;
+    unsigned long jpegSize = jpegTransformer->jpegSize;
+
+    tjhandle tjHandle = tjInitTransform();
+
+    if (jpegTransformer->flipVertical) {
+        tjtransform *transform = new tjtransform();
+        transform->op = TJXOP_VFLIP;
+        tjTransform(tjHandle, jpeg, jpegSize, 1, &jpeg, &jpegSize, transform, 0);
+    }
+
+    if (jpegTransformer->flipHorizontal) {
+        tjtransform *transform = new tjtransform();
+        transform->op = TJXOP_HFLIP;
+        tjTransform(tjHandle, jpeg, jpegSize, 1, &jpeg, &jpegSize, transform, 0);
+    }
+
+    tjtransform *transform = new tjtransform();
+    if (jpegTransformer->rotation == 90) {
+        transform->op = TJXOP_ROT90;
+    } else if (jpegTransformer->rotation == 180) {
+        transform->op = TJXOP_ROT180;
+    } else if (jpegTransformer->rotation == 270) {
+        transform->op = TJXOP_ROT270;
+    }
+
+    if (jpegTransformer->cropWidth > 0) {
+        tjregion cropRegion;
+        cropRegion.x = jpegTransformer->cropLeft - (jpegTransformer->cropLeft % 16);
+        cropRegion.y = jpegTransformer->cropTop - (jpegTransformer->cropTop % 16);
+        cropRegion.w = jpegTransformer->cropWidth;
+        cropRegion.h = jpegTransformer->cropHeight;
+
+        transform->r = cropRegion;
+        transform->options = TJXOPT_CROP;
+    }
+
+    tjTransform(tjHandle, jpeg, jpegSize, 1, &jpeg, &jpegSize, transform, 0);
+    tjDestroy(tjHandle);
+
+    jbyteArray array = env->NewByteArray((jsize) jpegSize);
+    env->SetByteArrayRegion(array, 0, (jsize) jpegSize, reinterpret_cast<jbyte *>(jpeg));
+
+    return array;
+}
+
+JNIEXPORT jint JNICALL
+Java_com_wonderkiln_camerakit_JpegTransformer_jniGetWidth
+        (JNIEnv *env, jobject obj, jobject handle) {
+    JpegTransformer *jpegTransformer = (JpegTransformer *) env->GetDirectBufferAddress(handle);
+    return jpegTransformer->width;
+}
+
+JNIEXPORT jint JNICALL
+Java_com_wonderkiln_camerakit_JpegTransformer_jniGetHeight
+        (JNIEnv *env, jobject obj, jobject handle) {
+    JpegTransformer *jpegTransformer = (JpegTransformer *) env->GetDirectBufferAddress(handle);
+    return jpegTransformer->height;
+}
+
+JNIEXPORT void JNICALL
+Java_com_wonderkiln_camerakit_JpegTransformer_jniRotate
+        (JNIEnv *env, jobject obj, jobject handle, jint degrees) {
+    JpegTransformer *jpegTransformer = (JpegTransformer *) env->GetDirectBufferAddress(handle);
+    jpegTransformer->rotation = degrees;
+}
+
+JNIEXPORT void JNICALL
+Java_com_wonderkiln_camerakit_JpegTransformer_jniFlipHorizontal
+        (JNIEnv *env, jobject obj, jobject handle) {
+    JpegTransformer *jpegTransformer = (JpegTransformer *) env->GetDirectBufferAddress(handle);
+    jpegTransformer->flipHorizontal = true;
+}
+
+JNIEXPORT void JNICALL
+Java_com_wonderkiln_camerakit_JpegTransformer_jniFlipVertical
+        (JNIEnv *env, jobject obj, jobject handle) {
+    JpegTransformer *jpegTransformer = (JpegTransformer *) env->GetDirectBufferAddress(handle);
+    jpegTransformer->flipVertical = true;
+}
+
+JNIEXPORT void JNICALL
+Java_com_wonderkiln_camerakit_JpegTransformer_jniCrop
+        (JNIEnv *env, jobject obj, jobject handle, jint left, jint top, jint width, jint height) {
+    JpegTransformer *jpegTransformer = (JpegTransformer *) env->GetDirectBufferAddress(handle);
+    jpegTransformer->cropLeft = left;
+    jpegTransformer->cropTop = top;
+    jpegTransformer->cropWidth = width;
+    jpegTransformer->cropHeight = height;
+}

+ 170 - 0
camerakit/src/main/cpp/JniYuvOperator.cpp

@@ -0,0 +1,170 @@
+#include <jni.h>
+#include <stdio.h>
+#include <android/bitmap.h>
+#include <cstring>
+#include <unistd.h>
+#include <vector>
+
+#define  LOG_TAG    "DEBUG"
+#define  LOGD(...)  __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)
+#define  LOGE(...)  __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
+
+extern "C"
+{
+JNIEXPORT jobject JNICALL
+Java_com_wonderkiln_camerakit_YuvOperator_jniStoreYuvData(JNIEnv *env, jobject obj,
+                                                          jbyteArray yuv420sp, jint width,
+                                                          jint height);
+JNIEXPORT void JNICALL
+Java_com_wonderkiln_camerakit_YuvOperator_jniRotateYuvCcw90(JNIEnv *env, jobject obj,
+                                                            jobject handle);
+JNIEXPORT void JNICALL
+Java_com_wonderkiln_camerakit_YuvOperator_jniRotateYuvCw90(JNIEnv *env, jobject obj,
+                                                           jobject handle);
+JNIEXPORT void JNICALL
+Java_com_wonderkiln_camerakit_YuvOperator_jniRotateYuv180(JNIEnv *env, jobject obj,
+                                                          jobject handle);
+JNIEXPORT jobject JNICALL
+Java_com_wonderkiln_camerakit_YuvOperator_jniGetYuvData(JNIEnv *env,
+                                                        jobject obj,
+                                                        jobject handle);
+JNIEXPORT void JNICALL
+Java_com_wonderkiln_camerakit_YuvOperator_jniFreeYuvData(JNIEnv *env, jobject obj,
+                                                         jobject handle);
+}
+
+class JniYuvOperator {
+public:
+    unsigned char *_storedYuvData;
+    int _width;
+    int _height;
+    int _length;
+
+    JniYuvOperator() {
+        _storedYuvData = NULL;
+    }
+};
+
+JNIEXPORT jobject JNICALL
+Java_com_wonderkiln_camerakit_YuvOperator_jniStoreYuvData(JNIEnv *env, jobject obj,
+                                                          jbyteArray yuv420sp, jint width,
+                                                          jint height) {
+    int length = env->GetArrayLength(yuv420sp);
+    unsigned char *yuv = new unsigned char[length];
+    env->GetByteArrayRegion(yuv420sp, 0, length, reinterpret_cast<jbyte *>(yuv));
+
+    JniYuvOperator *yuvOperator = new JniYuvOperator();
+    yuvOperator->_storedYuvData = yuv;
+    yuvOperator->_width = width;
+    yuvOperator->_height = height;
+    yuvOperator->_length = length;
+    return env->NewDirectByteBuffer(yuvOperator, 0);
+}
+
+JNIEXPORT void JNICALL
+Java_com_wonderkiln_camerakit_YuvOperator_jniRotateYuvCcw90(JNIEnv *env, jobject obj,
+                                                            jobject handle) {
+    JniYuvOperator *yuvOperator = (JniYuvOperator *) env->GetDirectBufferAddress(handle);
+    unsigned char *yuv = yuvOperator->_storedYuvData;
+    int width = yuvOperator->_width;
+    int height = yuvOperator->_height;
+    int length = yuvOperator->_length;
+
+    std::vector<unsigned char> yuvCopy(yuv, yuv + length);
+
+    int n = 0;
+    int uvHeight = height >> 1;
+    int wh = width * height;
+    for (int j = width - 1; j >= 0; j--) {
+        for (int i = 0; i < height; i++) {
+            yuv[n++] = yuvCopy[width * i + j];
+        }
+    }
+
+    for (int j = width - 1; j > 0; j -= 2) {
+        for (int i = 0; i < uvHeight; i++) {
+            yuv[n++] = yuvCopy[wh + width * i + j - 1];
+            yuv[n++] = yuvCopy[wh + width * i + j];
+        }
+    }
+}
+
+JNIEXPORT void JNICALL
+Java_com_wonderkiln_camerakit_YuvOperator_jniRotateYuvCw90(JNIEnv *env, jobject obj,
+                                                           jobject handle) {
+    JniYuvOperator *yuvOperator = (JniYuvOperator *) env->GetDirectBufferAddress(handle);
+    unsigned char *yuv = yuvOperator->_storedYuvData;
+    int width = yuvOperator->_width;
+    int height = yuvOperator->_height;
+    int length = yuvOperator->_length;
+
+
+    std::vector<unsigned char> yuvCopy(yuv, yuv + length);
+
+    int wh = width * height;
+    int k = 0;
+    for (int i = 0; i < width; i++) {
+        for (int j = height - 1; j >= 0; j--) {
+            yuv[k] = yuvCopy[width * j + i];
+            k++;
+        }
+    }
+    for (int i = 0; i < width; i += 2) {
+        for (int j = height / 2 - 1; j >= 0; j--) {
+            yuv[k] = yuvCopy[wh + width * j + i];
+            yuv[k + 1] = yuvCopy[wh + width * j + i + 1];
+            k += 2;
+        }
+    }
+}
+
+JNIEXPORT void JNICALL
+Java_com_wonderkiln_camerakit_YuvOperator_jniRotateYuv180(JNIEnv *env, jobject obj,
+                                                          jobject handle) {
+    JniYuvOperator *yuvOperator = (JniYuvOperator *) env->GetDirectBufferAddress(handle);
+    unsigned char *yuv = yuvOperator->_storedYuvData;
+    int width = yuvOperator->_width;
+    int height = yuvOperator->_height;
+    int length = yuvOperator->_length;
+
+    std::vector<unsigned char> yuvCopy(yuv, yuv + length);
+
+    int n = 0;
+    int uh = height >> 1;
+    int wh = width * height;
+    for (int j = height - 1; j >= 0; j--) {
+        for (int i = width - 1; i >= 0; i--) {
+            yuv[n++] = yuvCopy[width * j + i];
+        }
+    }
+
+    for (int j = uh - 1; j >= 0; j--) {
+        for (int i = width - 1; i > 0; i -= 2) {
+            yuv[n] = yuvCopy[wh + width * j + i - 1];
+            yuv[n + 1] = yuvCopy[wh + width * j + i];
+            n += 2;
+        }
+    }
+}
+
+JNIEXPORT jobject JNICALL
+Java_com_wonderkiln_camerakit_YuvOperator_jniGetYuvData(JNIEnv *env, jobject obj,
+                                                        jobject handle) {
+    JniYuvOperator *yuvOperator = (JniYuvOperator *) env->GetDirectBufferAddress(handle);
+    unsigned char *yuvData = yuvOperator->_storedYuvData;
+    int length = yuvOperator->_length;
+
+    jbyteArray array = env->NewByteArray(length);
+    env->SetByteArrayRegion(array, 0, length, reinterpret_cast<jbyte *>(yuvData));
+    return array;
+}
+
+JNIEXPORT void JNICALL
+Java_com_wonderkiln_camerakit_YuvOperator_jniFreeYuvData(JNIEnv *env, jobject obj,
+                                                         jobject handle) {
+    JniYuvOperator *yuvOperator = (JniYuvOperator *) env->GetDirectBufferAddress(handle);
+    if (yuvOperator->_storedYuvData == NULL) return;
+    delete[] yuvOperator->_storedYuvData;
+    yuvOperator->_storedYuvData = NULL;
+    delete yuvOperator;
+}

+ 214 - 0
camerakit/src/main/cpp/camerakit/CameraSurfaceTexture.cpp

@@ -0,0 +1,214 @@
+#include "CameraSurfaceTexture.hpp"
+
+namespace camerakit {
+
+  CameraSurfaceTexture::CameraSurfaceTexture(GLuint inputTexture, GLuint outputTexture)
+      : width(0),
+        height(0) {
+
+      this->inputTexture = inputTexture;
+      this->outputTexture = outputTexture;
+
+      glBindTexture(GL_TEXTURE_EXTERNAL_OES, inputTexture);
+      glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+      glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+      glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+      glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+
+      glGenBuffers(1, &vertexBuffer);
+      glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
+      glBufferData(GL_ARRAY_BUFFER, 24 * sizeof(GLfloat), VertexData(), GL_STATIC_DRAW);
+
+      GLuint program = CreateProgram(VertexShaderCode(), FragmentShaderCode());
+      if (!program) {
+          // TODO: throw here
+          return;
+      }
+
+      glUseProgram(program);
+
+      GLint aPosition = glGetAttribLocation(program, "aPosition");
+      GLint aTexCoord = glGetAttribLocation(program, "aTexCoord");
+      GLint uTransformMatrix = glGetUniformLocation(program, "uTransformMatrix");
+      GLint uRotationMatrix = glGetUniformLocation(program, "uRotationMatrix");
+
+      glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
+
+      if (glGetError() != GL_NO_ERROR) {
+          glDeleteProgram(program);
+
+          // TODO: throw here
+          return;
+      }
+
+      this->program = program;
+      this->aPosition = aPosition;
+      this->aTexCoord = aTexCoord;
+      this->uTransformMatrix = uTransformMatrix;
+      this->uRotationMatrix = uRotationMatrix;
+  }
+
+  CameraSurfaceTexture::~CameraSurfaceTexture() {
+      if (vertexBuffer != 0) {
+          glDeleteBuffers(1, &vertexBuffer);
+          vertexBuffer = 0;
+      }
+  }
+
+  void CameraSurfaceTexture::setSize(int width, int height) {
+      this->width = width;
+      this->height = height;
+
+      if (glIsFramebuffer(framebuffer)) {
+          glDeleteFramebuffers(1, &framebuffer);
+      }
+
+      glGenFramebuffers(1, &framebuffer);
+      glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
+      glBindTexture(GL_TEXTURE_2D, outputTexture);
+
+      glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
+
+      glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+      glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+      glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+      glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+
+      glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, outputTexture, 0);
+      glBindFramebuffer(GL_FRAMEBUFFER, 0);
+  }
+
+  void CameraSurfaceTexture::updateTexImage(float* transformMatrix, float* rotationMatrix) {
+      glViewport(0, 0, width, height);
+
+      glBindTexture(GL_TEXTURE_2D, 0);
+      glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
+
+      glDisable(GL_BLEND);
+      glBindTexture(GL_TEXTURE_EXTERNAL_OES, inputTexture);
+
+      glUseProgram(program);
+      glUniformMatrix4fv(uTransformMatrix, 1, GL_FALSE, transformMatrix);
+      glUniformMatrix4fv(uRotationMatrix, 1, GL_FALSE, rotationMatrix);
+      glVertexAttribPointer(aPosition, 4, GL_FLOAT, GL_FALSE, 6 * sizeof(GLfloat), (const GLvoid*) (0 * sizeof(GLfloat)));
+      glEnableVertexAttribArray(aPosition);
+      glVertexAttribPointer(aTexCoord, 2, GL_FLOAT, GL_FALSE, 6 * sizeof(GLfloat), (const GLvoid*) (4 * sizeof(GLfloat)));
+      glEnableVertexAttribArray(aTexCoord);
+      glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
+      glDrawElements(GL_TRIANGLE_STRIP, 4, GL_UNSIGNED_SHORT, VertexIndices());
+  }
+
+  const GLfloat* CameraSurfaceTexture::VertexData() {
+      static const GLfloat vertexData[] = {
+          -1.0f, -1.0f, 0.0, 1.0, 0.0f, 0.0f,
+          +1.0f, -1.0f, 0.0, 1.0, 1.0f, 0.0f,
+          -1.0f, +1.0f, 0.0, 1.0, 0.0f, 1.0f,
+          +1.0f, +1.0f, 0.0, 1.0, 1.0f, 1.0f,
+      };
+
+      return vertexData;
+  }
+
+  const GLushort* CameraSurfaceTexture::VertexIndices() {
+      static const GLushort vertexIndices[] = {
+          0, 1, 2, 3
+      };
+
+      return vertexIndices;
+  }
+
+  const char* CameraSurfaceTexture::VertexShaderCode() {
+      static const char vertexShader[] =
+          "uniform mat4 uTransformMatrix;\n"
+          "uniform mat4 uRotationMatrix;\n"
+          "attribute vec4 aPosition;\n"
+          "attribute vec4 aTexCoord;\n"
+          "varying vec2 vTexCoord;\n"
+          "void main() {\n"
+          "    gl_Position = uRotationMatrix * aPosition;\n"
+          "    vTexCoord = (uTransformMatrix * aTexCoord).xy;\n"
+          "}\n";
+
+      return vertexShader;
+  }
+
+  const char* CameraSurfaceTexture::FragmentShaderCode() {
+      static const char fragmentShader[] =
+          "#extension GL_OES_EGL_image_external:require\n"
+          "precision mediump float;\n"
+          "uniform samplerExternalOES uTexture;\n"
+          "varying vec2 vTexCoord;\n"
+          "void main() {\n"
+          "    gl_FragColor = texture2D(uTexture, vTexCoord);\n"
+          "}\n";
+
+      return fragmentShader;
+  }
+
+  GLuint CameraSurfaceTexture::LoadShader(GLenum shaderType, const char* shaderCode) {
+      GLuint shader = glCreateShader(shaderType);
+      if (shader) {
+          glShaderSource(shader, 1, &shaderCode, NULL);
+          glCompileShader(shader);
+          GLint compileStatus = GL_FALSE;
+          glGetShaderiv(shader, GL_COMPILE_STATUS, &compileStatus);
+          if (!compileStatus) {
+              GLint infoLength = 0;
+              glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLength);
+              if (infoLength) {
+                  char* infoBuffer = (char*) malloc((size_t) infoLength);
+                  if (infoBuffer) {
+                      glGetShaderInfoLog(shader, infoLength, NULL, infoBuffer);
+                      // todo: output log
+                      free(infoBuffer);
+                  }
+              }
+              glDeleteShader(shader);
+              shader = 0;
+          }
+      }
+      return shader;
+  }
+
+  GLuint CameraSurfaceTexture::CreateProgram(const char* vertexShaderCode,
+                                             const char* fragmentShaderCode) {
+      GLuint vertexShader = LoadShader(GL_VERTEX_SHADER, vertexShaderCode);
+      if (!vertexShader) {
+          return 0;
+      }
+
+      GLuint fragmentShader = LoadShader(GL_FRAGMENT_SHADER, fragmentShaderCode);
+      if (!fragmentShader) {
+          return 0;
+      }
+
+      GLuint program = glCreateProgram();
+      if (program) {
+          glAttachShader(program, vertexShader);
+          // TODO: check error and throw if needed
+
+          glAttachShader(program, fragmentShader);
+          // TODO: check error and throw if needed
+
+          glLinkProgram(program);
+          GLint linkStatus = GL_FALSE;
+          glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
+          if (!linkStatus) {
+              GLint infoLength = 0;
+              glGetProgramiv(program, GL_INFO_LOG_LENGTH, &infoLength);
+              if (infoLength) {
+                  char* infoBuffer = (char*) malloc((size_t) infoLength);
+                  if (infoBuffer) {
+                      glGetProgramInfoLog(program, infoLength, NULL, infoBuffer);
+                      // todo: output log
+                      free(infoBuffer);
+                  }
+              }
+              glDeleteProgram(program);
+              program = 0;
+          }
+      }
+      return program;
+  }
+
+}

+ 52 - 0
camerakit/src/main/cpp/camerakit/CameraSurfaceTexture.hpp

@@ -0,0 +1,52 @@
+#pragma once
+
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+#include <android/log.h>
+
+#include <stdlib.h>
+
+#define  LOG_TAG "CameraSurfaceTexture"
+#define  LOG_DEBUG(...)  __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)
+#define  LOG_ERROR(...)  __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
+
+namespace camerakit {
+
+  class CameraSurfaceTexture {
+
+    public:
+      CameraSurfaceTexture(GLuint inputTexture, GLuint outputTexture);
+      virtual ~CameraSurfaceTexture();
+
+    public:
+      void setSize(int width, int height);
+      void updateTexImage(float* transformMatrix, float* rotationMatrix);
+
+    private:
+      int width;
+      int height;
+
+      GLuint inputTexture;
+      GLuint outputTexture;
+      GLuint framebuffer;
+      GLuint vertexBuffer;
+
+      GLuint program;
+      GLint aPosition;
+      GLint aTexCoord;
+      GLint uTransformMatrix;
+      GLint uRotationMatrix;
+
+    private:
+      static const GLfloat* VertexData();
+      static const GLushort* VertexIndices();
+
+      static const char* VertexShaderCode();
+      static const char* FragmentShaderCode();
+
+      static GLuint LoadShader(GLenum shaderType, const char* shaderCode);
+      static GLuint CreateProgram(const char* vertexShaderCode, const char* fragmentShaderCode);
+
+  };
+
+}

+ 204 - 0
camerakit/src/main/cpp/camerakit/CameraSurfaceView.cpp

@@ -0,0 +1,204 @@
+#include "CameraSurfaceView.hpp"
+
+namespace camerakit {
+
+  CameraSurfaceView::CameraSurfaceView()
+          : surfaceWidth(0),
+            surfaceHeight(0) {
+  }
+
+  CameraSurfaceView::~CameraSurfaceView() {
+      if (vertexBuffer != 0) {
+          glDeleteBuffers(1, &vertexBuffer);
+          vertexBuffer = 0;
+      }
+  }
+
+  void CameraSurfaceView::onSurfaceCreated() {
+      glGenBuffers(1, &vertexBuffer);
+      glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
+      glBufferData(GL_ARRAY_BUFFER, 24 * sizeof(GLfloat), VertexData(), GL_STATIC_DRAW);
+
+      GLuint program = CreateProgram(VertexShaderCode(), FragmentShaderCode());
+      if (!program) {
+          // TODO: throw here
+          return;
+      }
+
+      glUseProgram(program);
+
+      GLint aPosition = glGetAttribLocation(program, "aPosition");
+      GLint aTexCoord = glGetAttribLocation(program, "aTexCoord");
+
+      glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
+
+      if (glGetError() != GL_NO_ERROR) {
+          glDeleteProgram(program);
+
+          // TODO: throw here
+          return;
+      }
+
+      this->program = program;
+      this->aPosition = aPosition;
+      this->aTexCoord = aTexCoord;
+  }
+
+  void CameraSurfaceView::onSurfaceChanged(int width, int height) {
+      this->surfaceWidth = width;
+      this->surfaceHeight = height;
+  }
+
+  void CameraSurfaceView::onDrawFrame() {
+      glBindFramebuffer(GL_FRAMEBUFFER, 0);
+
+      glClearColor(0.0, 0.0, 0.0, 1.0);
+      glClear(GL_COLOR_BUFFER_BIT);
+  }
+
+  void CameraSurfaceView::drawTexture(GLuint texture, int textureWidth, int textureHeight) {
+      glBindFramebuffer(GL_FRAMEBUFFER, 0);
+      glBindTexture(GL_TEXTURE_2D, texture);
+
+      int viewportX = 0;
+      int viewportY = 0;
+      int viewportWidth = surfaceWidth;
+      int viewportHeight = surfaceHeight;
+
+      int candidateWidth = (int) (((float) textureWidth / (float) textureHeight) * surfaceHeight);
+      int candidateHeight = (int) (((float) textureHeight / (float) textureWidth) * surfaceWidth);
+
+      if (candidateWidth > surfaceWidth) {
+          viewportX = -1 * (candidateWidth - surfaceWidth) / 2;
+          viewportWidth = candidateWidth;
+      } else if (candidateHeight > surfaceHeight) {
+          viewportY = -1 * (candidateHeight - surfaceHeight) / 2;
+          viewportHeight = candidateHeight;
+      }
+
+      glViewport(viewportX, viewportY, viewportWidth, viewportHeight);
+
+      glUseProgram(program);
+      glVertexAttribPointer(aPosition, 4, GL_FLOAT, GL_FALSE, 6 * sizeof(GLfloat), (const GLvoid*) (0 * sizeof(GLfloat)));
+      glEnableVertexAttribArray(aPosition);
+      glVertexAttribPointer(aTexCoord, 2, GL_FLOAT, GL_FALSE, 6 * sizeof(GLfloat), (const GLvoid*) (4 * sizeof(GLfloat)));
+      glEnableVertexAttribArray(aTexCoord);
+      glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
+      glDrawElements(GL_TRIANGLE_STRIP, 4, GL_UNSIGNED_SHORT, VertexIndices());
+      glFlush();
+  }
+
+  void CameraSurfaceView::abandon() {
+  }
+
+  const GLfloat* CameraSurfaceView::VertexData() {
+      static const GLfloat vertexData[] = {
+              -1.0f, -1.0f, 0.0, 1.0, 0.0f, 0.0f,
+              +1.0f, -1.0f, 0.0, 1.0, 1.0f, 0.0f,
+              -1.0f, +1.0f, 0.0, 1.0, 0.0f, 1.0f,
+              +1.0f, +1.0f, 0.0, 1.0, 1.0f, 1.0f,
+      };
+
+      return vertexData;
+  }
+
+  const GLushort* CameraSurfaceView::VertexIndices() {
+      static const GLushort vertexIndices[] = {
+              0, 1, 2, 3
+      };
+
+      return vertexIndices;
+  }
+
+  const char* CameraSurfaceView::VertexShaderCode() {
+      static const char vertexShader[] =
+              "attribute vec4 aPosition;\n"
+              "attribute vec4 aTexCoord;\n"
+              "varying vec2 vTexCoord;\n"
+              "void main() {\n"
+              "    gl_Position = aPosition;\n"
+              "    vTexCoord = aTexCoord.xy;\n"
+              "}\n";
+
+      return vertexShader;
+  }
+
+  const char* CameraSurfaceView::FragmentShaderCode() {
+      static const char fragmentShader[] =
+              "precision mediump float;\n"
+              "uniform sampler2D uTexture;\n"
+              "varying vec2 vTexCoord;\n"
+              "void main() {\n"
+              "    vec4 color = texture2D(uTexture, vTexCoord);\n"
+              "    gl_FragColor = color;\n"
+              "}\n";
+
+      return fragmentShader;
+  }
+
+  GLuint CameraSurfaceView::LoadShader(GLenum shaderType, const char* shaderCode) {
+      GLuint shader = glCreateShader(shaderType);
+      if (shader) {
+          glShaderSource(shader, 1, &shaderCode, NULL);
+          glCompileShader(shader);
+          GLint compileStatus = GL_FALSE;
+          glGetShaderiv(shader, GL_COMPILE_STATUS, &compileStatus);
+          if (!compileStatus) {
+              GLint infoLength = 0;
+              glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLength);
+              if (infoLength) {
+                  char* infoBuffer = (char*) malloc((size_t) infoLength);
+                  if (infoBuffer) {
+                      glGetShaderInfoLog(shader, infoLength, NULL, infoBuffer);
+                      // todo: output log
+                      free(infoBuffer);
+                  }
+              }
+              glDeleteShader(shader);
+              shader = 0;
+          }
+      }
+      return shader;
+  }
+
+  GLuint CameraSurfaceView::CreateProgram(const char* vertexShaderCode, const char* fragmentShaderCode) {
+      GLuint vertexShader = LoadShader(GL_VERTEX_SHADER, vertexShaderCode);
+      if (!vertexShader) {
+          return 0;
+      }
+
+      GLuint fragmentShader = LoadShader(GL_FRAGMENT_SHADER, fragmentShaderCode);
+      if (!fragmentShader) {
+          return 0;
+      }
+
+      GLuint program = glCreateProgram();
+      if (program) {
+          glAttachShader(program, vertexShader);
+          // TODO: check error and throw if needed
+
+          glAttachShader(program, fragmentShader);
+          // TODO: check error and throw if needed
+
+          glLinkProgram(program);
+          GLint linkStatus = GL_FALSE;
+          glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
+          if (!linkStatus) {
+              GLint infoLength = 0;
+              glGetProgramiv(program, GL_INFO_LOG_LENGTH, &infoLength);
+              if (infoLength) {
+                  char* infoBuffer = (char*) malloc((size_t) infoLength);
+                  if (infoBuffer) {
+                      glGetProgramInfoLog(program, infoLength, NULL, infoBuffer);
+                      // todo: output log
+                      free(infoBuffer);
+                  }
+              }
+              glDeleteProgram(program);
+              program = 0;
+          }
+      }
+      return program;
+  }
+
+}

+ 52 - 0
camerakit/src/main/cpp/camerakit/CameraSurfaceView.hpp

@@ -0,0 +1,52 @@
+#pragma once
+
+#include "CameraSurfaceTexture.hpp"
+
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+#include <android/log.h>
+
+#include <stdlib.h>
+#include <memory.h>
+
+#define  LOG_TAG "CameraSurfaceView"
+#define  LOG_DEBUG(...)  __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)
+#define  LOG_ERROR(...)  __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
+
+namespace camerakit {
+
+  class CameraSurfaceView {
+
+    public:
+      CameraSurfaceView();
+      virtual ~CameraSurfaceView();
+
+    public:
+      void onSurfaceCreated();
+      void onSurfaceChanged(int width, int height);
+      void onDrawFrame();
+      void drawTexture(GLuint texture, int textureWidth, int textureHeight);
+      void abandon();
+
+    private:
+      int surfaceWidth;
+      int surfaceHeight;
+
+      GLuint vertexBuffer;
+      GLuint program;
+      GLint aPosition;
+      GLint aTexCoord;
+
+    private:
+      static const GLfloat* VertexData();
+      static const GLushort* VertexIndices();
+
+      static const char* VertexShaderCode();
+      static const char* FragmentShaderCode();
+
+      static GLuint LoadShader(GLenum shaderType, const char* shaderCode);
+      static GLuint CreateProgram(const char* vertexShaderCode, const char* fragmentShaderCode);
+
+  };
+
+}

+ 110 - 0
camerakit/src/main/cpp/jni_camera_surface_texture.cpp

@@ -0,0 +1,110 @@
+/**
+ *
+ */
+
+#include <jni.h>
+
+#include "camerakit/CameraSurfaceTexture.hpp"
+
+namespace camerakit {
+
+  struct fields_t {
+      jfieldID nativeHandle;
+  };
+
+  static fields_t fields;
+
+  // ----------------------------------------------------------------------------------------
+
+  static void CameraSurfaceTexture_setCameraSurfaceTexture(JNIEnv* env, jobject thiz, const CameraSurfaceTexture* cameraSurfaceTexture) {
+      CameraSurfaceTexture* const currentObject
+          = (CameraSurfaceTexture*) env->GetLongField(thiz, fields.nativeHandle);
+
+      if (currentObject) {
+          // TODO: stop old
+      }
+
+      env->SetLongField(thiz, fields.nativeHandle, reinterpret_cast<jlong>(cameraSurfaceTexture));
+  }
+
+  static CameraSurfaceTexture* CameraSurfaceTexture_getCameraSurfaceTexture(JNIEnv* env, jobject thiz) {
+      long handle = (long) env->GetLongField(thiz, fields.nativeHandle);
+      return reinterpret_cast<CameraSurfaceTexture*>(handle);
+  }
+
+  // ----------------------------------------------------------------------------------------
+
+  static void CameraSurfaceTexture_init(JNIEnv* env, jobject thiz, jint inputTexture, jint outputTexture) {
+      CameraSurfaceTexture* cameraSurfaceTexture = new CameraSurfaceTexture(inputTexture, outputTexture);
+      CameraSurfaceTexture_setCameraSurfaceTexture(env, thiz, cameraSurfaceTexture);
+
+      jclass clazz = env->GetObjectClass(thiz);
+      if (clazz == NULL) {
+          // TODO: throw error
+          return;
+      }
+  }
+
+  static void CameraSurfaceTexture_setSize(JNIEnv* env, jobject thiz, jint width, jint height) {
+      CameraSurfaceTexture* cameraSurfaceTexture = CameraSurfaceTexture_getCameraSurfaceTexture(env, thiz);
+      cameraSurfaceTexture->setSize(width, height);
+  }
+
+  static void CameraSurfaceTexture_updateTexImage(JNIEnv* env, jobject thiz, jfloatArray transformMatrix, jfloatArray extraTransformMatrix) {
+      CameraSurfaceTexture* cameraSurfaceTexture = CameraSurfaceTexture_getCameraSurfaceTexture(env, thiz);
+      jfloat* matrix = env->GetFloatArrayElements(transformMatrix, 0);
+      jfloat* extraMatrix = env->GetFloatArrayElements(extraTransformMatrix, 0);
+      cameraSurfaceTexture->updateTexImage(matrix, extraMatrix);
+      env->ReleaseFloatArrayElements(transformMatrix, matrix, 0);
+      env->ReleaseFloatArrayElements(extraTransformMatrix, extraMatrix, 0);
+  }
+
+  static void CameraSurfaceTexture_finalize(JNIEnv* env, jobject thiz) {
+      CameraSurfaceTexture* cameraSurfaceTexture = CameraSurfaceTexture_getCameraSurfaceTexture(env, thiz);
+
+      // TODO: stop resources
+
+      CameraSurfaceTexture_setCameraSurfaceTexture(env, thiz, 0);
+  }
+
+
+  static void CameraSurfaceTexture_release(JNIEnv* env, jobject thiz) {
+      CameraSurfaceTexture* cameraSurfaceTexture = CameraSurfaceTexture_getCameraSurfaceTexture(env, thiz);
+  }
+
+  // ----------------------------------------------------------------------------------------
+
+  namespace jni {
+
+    const char* const classPathName = "com/camerakit/preview/CameraSurfaceTexture";
+
+    static JNINativeMethod methods[] = {
+        {"nativeInit",           "(II)V",   (void*) CameraSurfaceTexture_init},
+        {"nativeSetSize",        "(II)V",   (void*) CameraSurfaceTexture_setSize},
+        {"nativeUpdateTexImage", "([F[F)V", (void*) CameraSurfaceTexture_updateTexImage},
+        {"nativeFinalize",       "()V",     (void*) CameraSurfaceTexture_finalize},
+        {"nativeRelease",        "()V",     (void*) CameraSurfaceTexture_release}
+    };
+
+    int register_CameraSurfaceTexture(JNIEnv* env) {
+        jclass clazz = env->FindClass(classPathName);
+        if (clazz == NULL) {
+            // TODO: throw
+            return -1;
+        }
+
+        jfieldID nativeHandle = env->GetFieldID(clazz, "nativeHandle", "J");
+        if (nativeHandle == NULL) {
+            // TODO: throw
+            return -1;
+        }
+
+        fields.nativeHandle = nativeHandle;
+
+        int result = env->RegisterNatives(clazz, methods, sizeof(methods) / sizeof(methods[0]));
+        return result;
+    }
+
+  }
+
+}

+ 119 - 0
camerakit/src/main/cpp/jni_camera_surface_view.cpp

@@ -0,0 +1,119 @@
+/**
+ *
+ */
+
+#include <jni.h>
+
+#include "camerakit/CameraSurfaceView.hpp"
+
+namespace camerakit {
+
+  struct fields_t {
+      jfieldID nativeHandle;
+  };
+
+  static fields_t fields;
+
+  // ----------------------------------------------------------------------------------------
+
+  static void CameraSurfaceView_setCameraSurfaceView(JNIEnv* env, jobject thiz, const CameraSurfaceView* cameraSurfaceView) {
+      CameraSurfaceView* const currentObject
+              = (CameraSurfaceView*) env->GetLongField(thiz, fields.nativeHandle);
+
+      if (currentObject) {
+          // TODO: stop old
+      }
+
+      env->SetLongField(thiz, fields.nativeHandle, reinterpret_cast<jlong>(cameraSurfaceView));
+  }
+
+  static CameraSurfaceView* CameraSurfaceView_getCameraSurfaceView(JNIEnv* env, jobject thiz) {
+      long handle = (long) env->GetLongField(thiz, fields.nativeHandle);
+      return reinterpret_cast<CameraSurfaceView*>(handle);
+  }
+
+  // ----------------------------------------------------------------------------------------
+
+  static void CameraSurfaceView_init(JNIEnv* env, jobject thiz) {
+      CameraSurfaceView* cameraSurfaceView = new CameraSurfaceView();
+      CameraSurfaceView_setCameraSurfaceView(env, thiz, cameraSurfaceView);
+
+      jclass clazz = env->GetObjectClass(thiz);
+      if (clazz == NULL) {
+          // TODO: throw error
+          return;
+      }
+  }
+
+  static void CameraSurfaceView_onSurfaceCreated(JNIEnv* env, jobject thiz) {
+      CameraSurfaceView* cameraSurfaceView = CameraSurfaceView_getCameraSurfaceView(env, thiz);
+      cameraSurfaceView->onSurfaceCreated();
+  }
+
+  static void CameraSurfaceView_onSurfaceChanged(JNIEnv* env, jobject thiz, jint width, jint height) {
+      CameraSurfaceView* cameraSurfaceView = CameraSurfaceView_getCameraSurfaceView(env, thiz);
+      cameraSurfaceView->onSurfaceChanged(width, height);
+  }
+
+  static void CameraSurfaceView_onDrawFrame(JNIEnv* env, jobject thiz) {
+      CameraSurfaceView* cameraSurfaceView = CameraSurfaceView_getCameraSurfaceView(env, thiz);
+      cameraSurfaceView->onDrawFrame();
+  }
+
+  static void CameraSurfaceView_drawTexture(JNIEnv* env, jobject thiz, jint texture, jint textureWidth, jint textureHeight) {
+      CameraSurfaceView* cameraSurfaceView = CameraSurfaceView_getCameraSurfaceView(env, thiz);
+      cameraSurfaceView->drawTexture((GLuint) texture, textureWidth, textureHeight);
+  }
+
+  static void CameraSurfaceView_finalize(JNIEnv* env, jobject thiz) {
+      CameraSurfaceView* cameraSurfaceView = CameraSurfaceView_getCameraSurfaceView(env, thiz);
+
+      // TODO: stop resources
+
+      CameraSurfaceView_setCameraSurfaceView(env, thiz, 0);
+  }
+
+
+  static void CameraSurfaceView_release(JNIEnv* env, jobject thiz) {
+      CameraSurfaceView* cameraSurfaceView = CameraSurfaceView_getCameraSurfaceView(env, thiz);
+      cameraSurfaceView->abandon();
+  }
+
+  // ----------------------------------------------------------------------------------------
+
+  namespace jni {
+
+    const char* const classPathName = "com/camerakit/preview/CameraSurfaceView";
+
+    static JNINativeMethod methods[] = {
+            {"nativeInit",             "()V",    (void*) CameraSurfaceView_init},
+            {"nativeOnSurfaceCreated", "()V",    (void*) CameraSurfaceView_onSurfaceCreated},
+            {"nativeOnSurfaceChanged", "(II)V",  (void*) CameraSurfaceView_onSurfaceChanged},
+            {"nativeOnDrawFrame",      "()V",    (void*) CameraSurfaceView_onDrawFrame},
+            {"nativeDrawTexture",      "(III)V", (void*) CameraSurfaceView_drawTexture},
+            {"nativeFinalize",         "()V",    (void*) CameraSurfaceView_finalize},
+            {"nativeRelease",          "()V",    (void*) CameraSurfaceView_release}
+    };
+
+    int register_CameraSurfaceView(JNIEnv* env) {
+        jclass clazz = env->FindClass(classPathName);
+        if (clazz == NULL) {
+            // TODO: throw
+            return -1;
+        }
+
+        jfieldID nativeHandle = env->GetFieldID(clazz, "nativeHandle", "J");
+        if (nativeHandle == NULL) {
+            // TODO: throw
+            return -1;
+        }
+
+        fields.nativeHandle = nativeHandle;
+
+        int result = env->RegisterNatives(clazz, methods, sizeof(methods) / sizeof(methods[0]));
+        return result;
+    }
+
+  }
+
+}

+ 46 - 0
camerakit/src/main/cpp/libjpeg/include/bmp.h

@@ -0,0 +1,46 @@
+/*
+ * Copyright (C)2011 D. R. Commander.  All Rights Reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright notice,
+ *   this list of conditions and the following disclaimer.
+ * - Redistributions in binary form must reproduce the above copyright notice,
+ *   this list of conditions and the following disclaimer in the documentation
+ *   and/or other materials provided with the distribution.
+ * - Neither the name of the libjpeg-turbo Project nor the names of its
+ *   contributors may be used to endorse or promote products derived from this
+ *   software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS",
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef __BMP_H__
+#define __BMP_H__
+
+#include "./turbojpeg.h"
+
+int loadbmp(char *filename, unsigned char **buf, int *w, int *h, int pf,
+	int bottomup);
+
+int savebmp(char *filename, unsigned char *buf, int w, int h, int pf,
+	int bottomup);
+
+const char *bmpgeterr(void);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif

+ 134 - 0
camerakit/src/main/cpp/libjpeg/include/cderror.h

@@ -0,0 +1,134 @@
+/*
+ * cderror.h
+ *
+ * Copyright (C) 1994-1997, Thomas G. Lane.
+ * Modified 2009 by Guido Vollbeding.
+ * This file is part of the Independent JPEG Group's software.
+ * For conditions of distribution and use, see the accompanying README file.
+ *
+ * This file defines the error and message codes for the cjpeg/djpeg
+ * applications.  These strings are not needed as part of the JPEG library
+ * proper.
+ * Edit this file to add new codes, or to translate the message strings to
+ * some other language.
+ */
+
+/*
+ * To define the enum list of message codes, include this file without
+ * defining macro JMESSAGE.  To create a message string table, include it
+ * again with a suitable JMESSAGE definition (see jerror.c for an example).
+ */
+#ifndef JMESSAGE
+#ifndef CDERROR_H
+#define CDERROR_H
+/* First time through, define the enum list */
+#define JMAKE_ENUM_LIST
+#else
+/* Repeated inclusions of this file are no-ops unless JMESSAGE is defined */
+#define JMESSAGE(code,string)
+#endif /* CDERROR_H */
+#endif /* JMESSAGE */
+
+#ifdef JMAKE_ENUM_LIST
+
+typedef enum {
+
+#define JMESSAGE(code,string)	code ,
+
+#endif /* JMAKE_ENUM_LIST */
+
+JMESSAGE(JMSG_FIRSTADDONCODE=1000, NULL) /* Must be first entry! */
+
+#ifdef BMP_SUPPORTED
+JMESSAGE(JERR_BMP_BADCMAP, "Unsupported BMP colormap format")
+JMESSAGE(JERR_BMP_BADDEPTH, "Only 8- and 24-bit BMP files are supported")
+JMESSAGE(JERR_BMP_BADHEADER, "Invalid BMP file: bad header length")
+JMESSAGE(JERR_BMP_BADPLANES, "Invalid BMP file: biPlanes not equal to 1")
+JMESSAGE(JERR_BMP_COLORSPACE, "BMP output must be grayscale or RGB")
+JMESSAGE(JERR_BMP_COMPRESSED, "Sorry, compressed BMPs not yet supported")
+JMESSAGE(JERR_BMP_EMPTY, "Empty BMP image")
+JMESSAGE(JERR_BMP_NOT, "Not a BMP file - does not start with BM")
+JMESSAGE(JTRC_BMP, "%ux%u 24-bit BMP image")
+JMESSAGE(JTRC_BMP_MAPPED, "%ux%u 8-bit colormapped BMP image")
+JMESSAGE(JTRC_BMP_OS2, "%ux%u 24-bit OS2 BMP image")
+JMESSAGE(JTRC_BMP_OS2_MAPPED, "%ux%u 8-bit colormapped OS2 BMP image")
+#endif /* BMP_SUPPORTED */
+
+#ifdef GIF_SUPPORTED
+JMESSAGE(JERR_GIF_BUG, "GIF output got confused")
+JMESSAGE(JERR_GIF_CODESIZE, "Bogus GIF codesize %d")
+JMESSAGE(JERR_GIF_COLORSPACE, "GIF output must be grayscale or RGB")
+JMESSAGE(JERR_GIF_IMAGENOTFOUND, "Too few images in GIF file")
+JMESSAGE(JERR_GIF_NOT, "Not a GIF file")
+JMESSAGE(JTRC_GIF, "%ux%ux%d GIF image")
+JMESSAGE(JTRC_GIF_BADVERSION,
+	 "Warning: unexpected GIF version number '%c%c%c'")
+JMESSAGE(JTRC_GIF_EXTENSION, "Ignoring GIF extension block of type 0x%02x")
+JMESSAGE(JTRC_GIF_NONSQUARE, "Caution: nonsquare pixels in input")
+JMESSAGE(JWRN_GIF_BADDATA, "Corrupt data in GIF file")
+JMESSAGE(JWRN_GIF_CHAR, "Bogus char 0x%02x in GIF file, ignoring")
+JMESSAGE(JWRN_GIF_ENDCODE, "Premature end of GIF image")
+JMESSAGE(JWRN_GIF_NOMOREDATA, "Ran out of GIF bits")
+#endif /* GIF_SUPPORTED */
+
+#ifdef PPM_SUPPORTED
+JMESSAGE(JERR_PPM_COLORSPACE, "PPM output must be grayscale or RGB")
+JMESSAGE(JERR_PPM_NONNUMERIC, "Nonnumeric data in PPM file")
+JMESSAGE(JERR_PPM_NOT, "Not a PPM/PGM file")
+JMESSAGE(JTRC_PGM, "%ux%u PGM image")
+JMESSAGE(JTRC_PGM_TEXT, "%ux%u text PGM image")
+JMESSAGE(JTRC_PPM, "%ux%u PPM image")
+JMESSAGE(JTRC_PPM_TEXT, "%ux%u text PPM image")
+#endif /* PPM_SUPPORTED */
+
+#ifdef RLE_SUPPORTED
+JMESSAGE(JERR_RLE_BADERROR, "Bogus error code from RLE library")
+JMESSAGE(JERR_RLE_COLORSPACE, "RLE output must be grayscale or RGB")
+JMESSAGE(JERR_RLE_DIMENSIONS, "Image dimensions (%ux%u) too large for RLE")
+JMESSAGE(JERR_RLE_EMPTY, "Empty RLE file")
+JMESSAGE(JERR_RLE_EOF, "Premature EOF in RLE header")
+JMESSAGE(JERR_RLE_MEM, "Insufficient memory for RLE header")
+JMESSAGE(JERR_RLE_NOT, "Not an RLE file")
+JMESSAGE(JERR_RLE_TOOMANYCHANNELS, "Cannot handle %d output channels for RLE")
+JMESSAGE(JERR_RLE_UNSUPPORTED, "Cannot handle this RLE setup")
+JMESSAGE(JTRC_RLE, "%ux%u full-color RLE file")
+JMESSAGE(JTRC_RLE_FULLMAP, "%ux%u full-color RLE file with map of length %d")
+JMESSAGE(JTRC_RLE_GRAY, "%ux%u grayscale RLE file")
+JMESSAGE(JTRC_RLE_MAPGRAY, "%ux%u grayscale RLE file with map of length %d")
+JMESSAGE(JTRC_RLE_MAPPED, "%ux%u colormapped RLE file with map of length %d")
+#endif /* RLE_SUPPORTED */
+
+#ifdef TARGA_SUPPORTED
+JMESSAGE(JERR_TGA_BADCMAP, "Unsupported Targa colormap format")
+JMESSAGE(JERR_TGA_BADPARMS, "Invalid or unsupported Targa file")
+JMESSAGE(JERR_TGA_COLORSPACE, "Targa output must be grayscale or RGB")
+JMESSAGE(JTRC_TGA, "%ux%u RGB Targa image")
+JMESSAGE(JTRC_TGA_GRAY, "%ux%u grayscale Targa image")
+JMESSAGE(JTRC_TGA_MAPPED, "%ux%u colormapped Targa image")
+#else
+JMESSAGE(JERR_TGA_NOTCOMP, "Targa support was not compiled")
+#endif /* TARGA_SUPPORTED */
+
+JMESSAGE(JERR_BAD_CMAP_FILE,
+	 "Color map file is invalid or of unsupported format")
+JMESSAGE(JERR_TOO_MANY_COLORS,
+	 "Output file format cannot handle %d colormap entries")
+JMESSAGE(JERR_UNGETC_FAILED, "ungetc failed")
+#ifdef TARGA_SUPPORTED
+JMESSAGE(JERR_UNKNOWN_FORMAT,
+	 "Unrecognized input file format --- perhaps you need -targa")
+#else
+JMESSAGE(JERR_UNKNOWN_FORMAT, "Unrecognized input file format")
+#endif
+JMESSAGE(JERR_UNSUPPORTED_FORMAT, "Unsupported output file format")
+
+#ifdef JMAKE_ENUM_LIST
+
+  JMSG_LASTADDONCODE
+} ADDON_MESSAGE_CODE;
+
+#undef JMAKE_ENUM_LIST
+#endif /* JMAKE_ENUM_LIST */
+
+/* Zap JMESSAGE macro so that future re-inclusions do nothing by default */
+#undef JMESSAGE

+ 187 - 0
camerakit/src/main/cpp/libjpeg/include/cdjpeg.h

@@ -0,0 +1,187 @@
+/*
+ * cdjpeg.h
+ *
+ * Copyright (C) 1994-1997, Thomas G. Lane.
+ * This file is part of the Independent JPEG Group's software.
+ * For conditions of distribution and use, see the accompanying README file.
+ *
+ * This file contains common declarations for the sample applications
+ * cjpeg and djpeg.  It is NOT used by the core JPEG library.
+ */
+
+#define JPEG_CJPEG_DJPEG	/* define proper options in jconfig.h */
+#define JPEG_INTERNAL_OPTIONS	/* cjpeg.c,djpeg.c need to see xxx_SUPPORTED */
+#include "jinclude.h"
+#include "jpeglib.h"
+#include "jerror.h"		/* get library error codes too */
+#include "cderror.h"		/* get application-specific error codes */
+
+
+/*
+ * Object interface for cjpeg's source file decoding modules
+ */
+
+typedef struct cjpeg_source_struct * cjpeg_source_ptr;
+
+struct cjpeg_source_struct {
+  JMETHOD(void, start_input, (j_compress_ptr cinfo,
+			      cjpeg_source_ptr sinfo));
+  JMETHOD(JDIMENSION, get_pixel_rows, (j_compress_ptr cinfo,
+				       cjpeg_source_ptr sinfo));
+  JMETHOD(void, finish_input, (j_compress_ptr cinfo,
+			       cjpeg_source_ptr sinfo));
+
+  FILE *input_file;
+
+  JSAMPARRAY buffer;
+  JDIMENSION buffer_height;
+};
+
+
+/*
+ * Object interface for djpeg's output file encoding modules
+ */
+
+typedef struct djpeg_dest_struct * djpeg_dest_ptr;
+
+struct djpeg_dest_struct {
+  /* start_output is called after jpeg_start_decompress finishes.
+   * The color map will be ready at this time, if one is needed.
+   */
+  JMETHOD(void, start_output, (j_decompress_ptr cinfo,
+			       djpeg_dest_ptr dinfo));
+  /* Emit the specified number of pixel rows from the buffer. */
+  JMETHOD(void, put_pixel_rows, (j_decompress_ptr cinfo,
+				 djpeg_dest_ptr dinfo,
+				 JDIMENSION rows_supplied));
+  /* Finish up at the end of the image. */
+  JMETHOD(void, finish_output, (j_decompress_ptr cinfo,
+				djpeg_dest_ptr dinfo));
+
+  /* Target file spec; filled in by djpeg.c after object is created. */
+  FILE * output_file;
+
+  /* Output pixel-row buffer.  Created by module init or start_output.
+   * Width is cinfo->output_width * cinfo->output_components;
+   * height is buffer_height.
+   */
+  JSAMPARRAY buffer;
+  JDIMENSION buffer_height;
+};
+
+
+/*
+ * cjpeg/djpeg may need to perform extra passes to convert to or from
+ * the source/destination file format.  The JPEG library does not know
+ * about these passes, but we'd like them to be counted by the progress
+ * monitor.  We use an expanded progress monitor object to hold the
+ * additional pass count.
+ */
+
+struct cdjpeg_progress_mgr {
+  struct jpeg_progress_mgr pub;	/* fields known to JPEG library */
+  int completed_extra_passes;	/* extra passes completed */
+  int total_extra_passes;	/* total extra */
+  /* last printed percentage stored here to avoid multiple printouts */
+  int percent_done;
+};
+
+typedef struct cdjpeg_progress_mgr * cd_progress_ptr;
+
+
+/* Short forms of external names for systems with brain-damaged linkers. */
+
+#ifdef NEED_SHORT_EXTERNAL_NAMES
+#define jinit_read_bmp		jIRdBMP
+#define jinit_write_bmp		jIWrBMP
+#define jinit_read_gif		jIRdGIF
+#define jinit_write_gif		jIWrGIF
+#define jinit_read_ppm		jIRdPPM
+#define jinit_write_ppm		jIWrPPM
+#define jinit_read_rle		jIRdRLE
+#define jinit_write_rle		jIWrRLE
+#define jinit_read_targa	jIRdTarga
+#define jinit_write_targa	jIWrTarga
+#define read_quant_tables	RdQTables
+#define read_scan_script	RdScnScript
+#define set_quality_ratings     SetQRates
+#define set_quant_slots		SetQSlots
+#define set_sample_factors	SetSFacts
+#define read_color_map		RdCMap
+#define enable_signal_catcher	EnSigCatcher
+#define start_progress_monitor	StProgMon
+#define end_progress_monitor	EnProgMon
+#define read_stdin		RdStdin
+#define write_stdout		WrStdout
+#endif /* NEED_SHORT_EXTERNAL_NAMES */
+
+/* Module selection routines for I/O modules. */
+
+EXTERN(cjpeg_source_ptr) jinit_read_bmp JPP((j_compress_ptr cinfo));
+EXTERN(djpeg_dest_ptr) jinit_write_bmp JPP((j_decompress_ptr cinfo,
+					    boolean is_os2));
+EXTERN(cjpeg_source_ptr) jinit_read_gif JPP((j_compress_ptr cinfo));
+EXTERN(djpeg_dest_ptr) jinit_write_gif JPP((j_decompress_ptr cinfo));
+EXTERN(cjpeg_source_ptr) jinit_read_ppm JPP((j_compress_ptr cinfo));
+EXTERN(djpeg_dest_ptr) jinit_write_ppm JPP((j_decompress_ptr cinfo));
+EXTERN(cjpeg_source_ptr) jinit_read_rle JPP((j_compress_ptr cinfo));
+EXTERN(djpeg_dest_ptr) jinit_write_rle JPP((j_decompress_ptr cinfo));
+EXTERN(cjpeg_source_ptr) jinit_read_targa JPP((j_compress_ptr cinfo));
+EXTERN(djpeg_dest_ptr) jinit_write_targa JPP((j_decompress_ptr cinfo));
+
+/* cjpeg support routines (in rdswitch.c) */
+
+EXTERN(boolean) read_quant_tables JPP((j_compress_ptr cinfo, char * filename,
+				       boolean force_baseline));
+EXTERN(boolean) read_scan_script JPP((j_compress_ptr cinfo, char * filename));
+EXTERN(boolean) set_quality_ratings JPP((j_compress_ptr cinfo, char *arg,
+					 boolean force_baseline));
+EXTERN(boolean) set_quant_slots JPP((j_compress_ptr cinfo, char *arg));
+EXTERN(boolean) set_sample_factors JPP((j_compress_ptr cinfo, char *arg));
+
+/* djpeg support routines (in rdcolmap.c) */
+
+EXTERN(void) read_color_map JPP((j_decompress_ptr cinfo, FILE * infile));
+
+/* common support routines (in cdjpeg.c) */
+
+EXTERN(void) enable_signal_catcher JPP((j_common_ptr cinfo));
+EXTERN(void) start_progress_monitor JPP((j_common_ptr cinfo,
+					 cd_progress_ptr progress));
+EXTERN(void) end_progress_monitor JPP((j_common_ptr cinfo));
+EXTERN(boolean) keymatch JPP((char * arg, const char * keyword, int minchars));
+EXTERN(FILE *) read_stdin JPP((void));
+EXTERN(FILE *) write_stdout JPP((void));
+
+/* miscellaneous useful macros */
+
+#ifdef DONT_USE_B_MODE		/* define mode parameters for fopen() */
+#define READ_BINARY	"r"
+#define WRITE_BINARY	"w"
+#else
+#ifdef VMS			/* VMS is very nonstandard */
+#define READ_BINARY	"rb", "ctx=stm"
+#define WRITE_BINARY	"wb", "ctx=stm"
+#else				/* standard ANSI-compliant case */
+#define READ_BINARY	"rb"
+#define WRITE_BINARY	"wb"
+#endif
+#endif
+
+#ifndef EXIT_FAILURE		/* define exit() codes if not provided */
+#define EXIT_FAILURE  1
+#endif
+#ifndef EXIT_SUCCESS
+#ifdef VMS
+#define EXIT_SUCCESS  1		/* VMS is very nonstandard */
+#else
+#define EXIT_SUCCESS  0
+#endif
+#endif
+#ifndef EXIT_WARNING
+#ifdef VMS
+#define EXIT_WARNING  1		/* VMS is very nonstandard */
+#else
+#define EXIT_WARNING  2
+#endif
+#endif

+ 131 - 0
camerakit/src/main/cpp/libjpeg/include/config.h

@@ -0,0 +1,131 @@
+/* config.h.  Generated from config.h.in by configure.  */
+/* config.h.in.  Generated from configure.ac by autoheader.  */
+
+/* Build number */
+#define BUILD "20110829"
+
+/* Support arithmetic encoding */
+#define C_ARITH_CODING_SUPPORTED 1
+
+/* Support arithmetic decoding */
+#define D_ARITH_CODING_SUPPORTED 1
+
+/* Define to 1 if you have the <dlfcn.h> header file. */
+#define HAVE_DLFCN_H 1
+
+/* Define to 1 if you have the <inttypes.h> header file. */
+#define HAVE_INTTYPES_H 1
+
+/* Define to 1 if you have the <jni.h> header file. */
+/* #undef HAVE_JNI_H */
+
+/* Define to 1 if you have the `memcpy' function. */
+#define HAVE_MEMCPY 1
+
+/* Define to 1 if you have the <memory.h> header file. */
+#define HAVE_MEMORY_H 1
+
+/* Define to 1 if you have the `memset' function. */
+#define HAVE_MEMSET 1
+
+/* Define if your compiler supports prototypes */
+#define HAVE_PROTOTYPES 1
+
+/* Define to 1 if you have the <stddef.h> header file. */
+#define HAVE_STDDEF_H 1
+
+/* Define to 1 if you have the <stdint.h> header file. */
+#define HAVE_STDINT_H 1
+
+/* Define to 1 if you have the <stdlib.h> header file. */
+#define HAVE_STDLIB_H 1
+
+/* Define to 1 if you have the <strings.h> header file. */
+#define HAVE_STRINGS_H 1
+
+/* Define to 1 if you have the <string.h> header file. */
+#define HAVE_STRING_H 1
+
+/* Define to 1 if you have the <sys/stat.h> header file. */
+#define HAVE_SYS_STAT_H 1
+
+/* Define to 1 if you have the <sys/types.h> header file. */
+#define HAVE_SYS_TYPES_H 1
+
+/* Define to 1 if you have the <unistd.h> header file. */
+#define HAVE_UNISTD_H 1
+
+/* Define to 1 if the system has the type `unsigned char'. */
+#define HAVE_UNSIGNED_CHAR 1
+
+/* Define to 1 if the system has the type `unsigned short'. */
+#define HAVE_UNSIGNED_SHORT 1
+
+/* Compiler does not support pointers to undefined structures. */
+/* #undef INCOMPLETE_TYPES_BROKEN */
+
+/* libjpeg API version */
+#define JPEG_LIB_VERSION 62
+
+/* Define to the sub-directory in which libtool stores uninstalled libraries.
+   */
+#define LT_OBJDIR ".libs/"
+
+/* Define if you have BSD-like bzero and bcopy */
+/* #undef NEED_BSD_STRINGS */
+
+/* Define if you need short function names */
+/* #undef NEED_SHORT_EXTERNAL_NAMES */
+
+/* Define if you have sys/types.h */
+#define NEED_SYS_TYPES_H 1
+
+/* Name of package */
+#define PACKAGE "libjpeg-turbo"
+
+/* Define to the address where bug reports for this package should be sent. */
+#define PACKAGE_BUGREPORT ""
+
+/* Define to the full name of this package. */
+#define PACKAGE_NAME "libjpeg-turbo"
+
+/* Define to the full name and version of this package. */
+#define PACKAGE_STRING "libjpeg-turbo 1.1.90"
+
+/* Define to the one symbol short name of this package. */
+#define PACKAGE_TARNAME "libjpeg-turbo"
+
+/* Define to the home page for this package. */
+#define PACKAGE_URL ""
+
+/* Define to the version of this package. */
+#define PACKAGE_VERSION "1.1.90"
+
+/* Define if shift is unsigned */
+/* #undef RIGHT_SHIFT_IS_UNSIGNED */
+
+/* Define to 1 if you have the ANSI C header files. */
+#define STDC_HEADERS 1
+
+/* Version number of package */
+#define VERSION "1.1.90"
+
+/* Use accelerated SIMD routines. */
+#define WITH_SIMD 1
+
+/* Define to 1 if type `char' is unsigned and you are not using gcc.  */
+#ifndef __CHAR_UNSIGNED__
+/* # undef __CHAR_UNSIGNED__ */
+#endif
+
+/* Define to empty if `const' does not conform to ANSI C. */
+/* #undef const */
+
+/* Define to `__inline__' or `__inline' if that's what the C compiler
+   calls it, or to nothing if 'inline' is not supported under any name.  */
+#ifndef __cplusplus
+/* #undef inline */
+#endif
+
+/* Define to `unsigned int' if <sys/types.h> does not define. */
+/* #undef size_t */

+ 198 - 0
camerakit/src/main/cpp/libjpeg/include/cpu-features.h

@@ -0,0 +1,198 @@
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in
+ *    the documentation and/or other materials provided with the
+ *    distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+#ifndef _ARM_MACHINE_CPU_FEATURES_H
+#define _ARM_MACHINE_CPU_FEATURES_H
+
+/* The purpose of this file is to define several macros corresponding
+ * to CPU features that may or may not be available at build time on
+ * on the target CPU.
+ *
+ * This is done to abstract us from the various ARM Architecture
+ * quirks and alphabet soup.
+ *
+ * IMPORTANT: We have no intention to support anything below an ARMv4T !
+ */
+
+/* __ARM_ARCH__ is a number corresponding to the ARM revision
+ * we're going to support
+ *
+ * it looks like our toolchain doesn't define __ARM_ARCH__
+ * so try to guess it.
+ *
+ *
+ *
+ */
+#ifndef __ARM_ARCH__
+
+#  if defined __ARM_ARCH_7__   || defined __ARM_ARCH_7A__ || \
+      defined __ARM_ARCH_7R__  || defined __ARM_ARCH_7M__
+
+#    define __ARM_ARCH__ 7
+
+#  elif defined __ARM_ARCH_6__   || defined __ARM_ARCH_6J__ || \
+      defined __ARM_ARCH_6K__  || defined __ARM_ARCH_6Z__ || \
+      defined __ARM_ARCH_6KZ__ || defined __ARM_ARCH_6T2__
+#
+#    define __ARM_ARCH__ 6
+#
+#  elif defined __ARM_ARCH_5__ || defined __ARM_ARCH_5T__ || \
+        defined __ARM_ARCH_5TE__ || defined __ARM_ARCH_5TEJ__
+#
+#    define __ARM_ARCH__ 5
+#
+#  elif defined __ARM_ARCH_4T__
+#
+#    define __ARM_ARCH__ 4
+#
+#  elif defined __ARM_ARCH_4__
+#    error ARMv4 is not supported, please use ARMv4T at a minimum
+#  else
+#    error Unknown or unsupported ARM architecture
+#  endif
+#endif
+
+/* experimental feature used to check that our ARMv4 workarounds
+ * work correctly without a real ARMv4 machine */
+#ifdef BIONIC_EXPERIMENTAL_FORCE_ARMV4
+#  undef  __ARM_ARCH__
+#  define __ARM_ARCH__  4
+#endif
+
+/* define __ARM_HAVE_5TE if we have the ARMv5TE instructions */
+#if __ARM_ARCH__ > 5
+#  define  __ARM_HAVE_5TE  1
+#elif __ARM_ARCH__ == 5
+#  if defined __ARM_ARCH_5TE__ || defined __ARM_ARCH_5TEJ__
+#    define __ARM_HAVE_5TE  1
+#  endif
+#endif
+
+/* instructions introduced in ARMv5 */
+#if __ARM_ARCH__ >= 5
+#  define  __ARM_HAVE_BLX  1
+#  define  __ARM_HAVE_CLZ  1
+#  define  __ARM_HAVE_LDC2 1
+#  define  __ARM_HAVE_MCR2 1
+#  define  __ARM_HAVE_MRC2 1
+#  define  __ARM_HAVE_STC2 1
+#endif
+
+/* ARMv5TE introduces a few instructions */
+#if __ARM_HAVE_5TE
+#  define  __ARM_HAVE_PLD   1
+#  define  __ARM_HAVE_MCRR  1
+#  define  __ARM_HAVE_MRRC  1
+#endif
+
+/* define __ARM_HAVE_HALFWORD_MULTIPLY when half-word multiply instructions
+ * this means variants of: smul, smulw, smla, smlaw, smlal
+ */
+#if __ARM_HAVE_5TE
+#  define  __ARM_HAVE_HALFWORD_MULTIPLY  1
+#endif
+
+/* define __ARM_HAVE_PAIR_LOAD_STORE when 64-bit memory loads and stored
+ * into/from a pair of 32-bit registers is supported throuhg 'ldrd' and 'strd'
+ */
+#if __ARM_HAVE_5TE
+#  define  __ARM_HAVE_PAIR_LOAD_STORE 1
+#endif
+
+/* define __ARM_HAVE_SATURATED_ARITHMETIC is you have the saturated integer
+ * arithmetic instructions: qdd, qdadd, qsub, qdsub
+ */
+#if __ARM_HAVE_5TE
+#  define  __ARM_HAVE_SATURATED_ARITHMETIC 1
+#endif
+
+/* define __ARM_HAVE_PC_INTERWORK when a direct assignment to the
+ * pc register will switch into thumb/ARM mode depending on bit 0
+ * of the new instruction address. Before ARMv5, this was not the
+ * case, and you have to write:
+ *
+ *     mov  r0, [<some address>]
+ *     bx   r0
+ *
+ * instead of:
+ *
+ *     ldr  pc, [<some address>]
+ *
+ * note that this affects any instruction that explicitly changes the
+ * value of the pc register, including ldm { ...,pc } or 'add pc, #offset'
+ */
+#if __ARM_ARCH__ >= 5
+#  define __ARM_HAVE_PC_INTERWORK
+#endif
+
+/* define __ARM_HAVE_LDREX_STREX for ARMv6 and ARMv7 architecture to be
+ * used in replacement of deprecated swp instruction
+ */
+#if __ARM_ARCH__ >= 6
+#  define __ARM_HAVE_LDREX_STREX
+#endif
+
+/* define __ARM_HAVE_DMB for ARMv7 architecture
+ */
+#if __ARM_ARCH__ >= 7
+#  define __ARM_HAVE_DMB
+#endif
+
+/* define __ARM_HAVE_LDREXD for ARMv7 architecture
+ * (also present in ARMv6K, but not implemented in ARMv7-M, neither of which
+ * we care about)
+ */
+#if __ARM_ARCH__ >= 7
+#  define __ARM_HAVE_LDREXD
+#endif
+
+/* define _ARM_HAVE_VFP if we have VFPv3
+ */
+#if __ARM_ARCH__ >= 7 && defined __VFP_FP__
+#  define __ARM_HAVE_VFP
+#endif
+
+/* define _ARM_HAVE_NEON for ARMv7 architecture if we support the
+ * Neon SIMD instruction set extensions. This also implies
+ * that VFPv3-D32 is supported.
+ */
+#if __ARM_ARCH__ >= 7 && defined __ARM_NEON__
+#  define __ARM_HAVE_NEON
+#endif
+
+/* Assembly-only macros */
+
+/* define a handy PLD(address) macro since the cache preload
+ * is an optional opcode
+ */
+#if __ARM_HAVE_PLD
+#  define  PLD(reg,offset)    pld    [reg, offset]
+#else
+#  define  PLD(reg,offset)    /* nothing */
+#endif
+
+#endif /* _ARM_MACHINE_CPU_FEATURES_H */

+ 47 - 0
camerakit/src/main/cpp/libjpeg/include/jchuff.h

@@ -0,0 +1,47 @@
+/*
+ * jchuff.h
+ *
+ * Copyright (C) 1991-1997, Thomas G. Lane.
+ * This file is part of the Independent JPEG Group's software.
+ * For conditions of distribution and use, see the accompanying README file.
+ *
+ * This file contains declarations for Huffman entropy encoding routines
+ * that are shared between the sequential encoder (jchuff.c) and the
+ * progressive encoder (jcphuff.c).  No other modules need to see these.
+ */
+
+/* The legal range of a DCT coefficient is
+ *  -1024 .. +1023  for 8-bit data;
+ * -16384 .. +16383 for 12-bit data.
+ * Hence the magnitude should always fit in 10 or 14 bits respectively.
+ */
+
+#if BITS_IN_JSAMPLE == 8
+#define MAX_COEF_BITS 10
+#else
+#define MAX_COEF_BITS 14
+#endif
+
+/* Derived data constructed for each Huffman table */
+
+typedef struct {
+  unsigned int ehufco[256];	/* code for each symbol */
+  char ehufsi[256];		/* length of code for each symbol */
+  /* If no code has been allocated for a symbol S, ehufsi[S] contains 0 */
+} c_derived_tbl;
+
+/* Short forms of external names for systems with brain-damaged linkers. */
+
+#ifdef NEED_SHORT_EXTERNAL_NAMES
+#define jpeg_make_c_derived_tbl	jMkCDerived
+#define jpeg_gen_optimal_table	jGenOptTbl
+#endif /* NEED_SHORT_EXTERNAL_NAMES */
+
+/* Expand a Huffman table definition into the derived format */
+EXTERN(void) jpeg_make_c_derived_tbl
+	JPP((j_compress_ptr cinfo, boolean isDC, int tblno,
+	     c_derived_tbl ** pdtbl));
+
+/* Generate an optimal table definition given the specified counts */
+EXTERN(void) jpeg_gen_optimal_table
+	JPP((j_compress_ptr cinfo, JHUFF_TBL * htbl, long freq[]));

+ 62 - 0
camerakit/src/main/cpp/libjpeg/include/jconfig.h

@@ -0,0 +1,62 @@
+/* jconfig.h.  Generated from jconfig.h.in by configure.  */
+/* Version ID for the JPEG library.
+ * Might be useful for tests like "#if JPEG_LIB_VERSION >= 60".
+ */
+#define JPEG_LIB_VERSION 62
+
+/* Support arithmetic encoding */
+#define C_ARITH_CODING_SUPPORTED 1
+
+/* Support arithmetic decoding */
+#define D_ARITH_CODING_SUPPORTED 1
+
+/* Define if your compiler supports prototypes */
+#define HAVE_PROTOTYPES 1
+
+/* Define to 1 if you have the <stddef.h> header file. */
+#define HAVE_STDDEF_H 1
+
+/* Define to 1 if you have the <stdlib.h> header file. */
+#define HAVE_STDLIB_H 1
+
+/* Define to 1 if the system has the type `unsigned char'. */
+#define HAVE_UNSIGNED_CHAR 1
+
+/* Define to 1 if the system has the type `unsigned short'. */
+#define HAVE_UNSIGNED_SHORT 1
+
+/* Define if you want use complete types */
+/* #undef INCOMPLETE_TYPES_BROKEN */
+
+/* Define if you have BSD-like bzero and bcopy */
+/* #undef NEED_BSD_STRINGS */
+
+/* Define if you need short function names */
+/* #undef NEED_SHORT_EXTERNAL_NAMES */
+
+/* Define if you have sys/types.h */
+#define NEED_SYS_TYPES_H 1
+
+/* Define if shift is unsigned */
+/* #undef RIGHT_SHIFT_IS_UNSIGNED */
+
+/* Use accelerated SIMD routines. */
+#define WITH_SIMD 1
+
+/* Define to 1 if type `char' is unsigned and you are not using gcc.  */
+#ifndef __CHAR_UNSIGNED__
+/* # undef __CHAR_UNSIGNED__ */
+#endif
+
+/* Define to empty if `const' does not conform to ANSI C. */
+/* #undef const */
+
+/* Define to `__inline__' or `__inline' if that's what the C compiler
+   calls it, or to nothing if 'inline' is not supported under any name.  */
+#ifndef __cplusplus
+/* #undef inline */
+#endif
+
+/* Define to `unsigned int' if <sys/types.h> does not define. */
+/* #undef size_t */
+

+ 184 - 0
camerakit/src/main/cpp/libjpeg/include/jdct.h

@@ -0,0 +1,184 @@
+/*
+ * jdct.h
+ *
+ * Copyright (C) 1994-1996, Thomas G. Lane.
+ * This file is part of the Independent JPEG Group's software.
+ * For conditions of distribution and use, see the accompanying README file.
+ *
+ * This include file contains common declarations for the forward and
+ * inverse DCT modules.  These declarations are private to the DCT managers
+ * (jcdctmgr.c, jddctmgr.c) and the individual DCT algorithms.
+ * The individual DCT algorithms are kept in separate files to ease 
+ * machine-dependent tuning (e.g., assembly coding).
+ */
+
+
+/*
+ * A forward DCT routine is given a pointer to a work area of type DCTELEM[];
+ * the DCT is to be performed in-place in that buffer.  Type DCTELEM is int
+ * for 8-bit samples, INT32 for 12-bit samples.  (NOTE: Floating-point DCT
+ * implementations use an array of type FAST_FLOAT, instead.)
+ * The DCT inputs are expected to be signed (range +-CENTERJSAMPLE).
+ * The DCT outputs are returned scaled up by a factor of 8; they therefore
+ * have a range of +-8K for 8-bit data, +-128K for 12-bit data.  This
+ * convention improves accuracy in integer implementations and saves some
+ * work in floating-point ones.
+ * Quantization of the output coefficients is done by jcdctmgr.c. This
+ * step requires an unsigned type and also one with twice the bits.
+ */
+
+#if BITS_IN_JSAMPLE == 8
+#ifndef WITH_SIMD
+typedef int DCTELEM;		/* 16 or 32 bits is fine */
+typedef unsigned int UDCTELEM;
+typedef unsigned long long UDCTELEM2;
+#else
+typedef short DCTELEM;  /* prefer 16 bit with SIMD for parellelism */
+typedef unsigned short UDCTELEM;
+typedef unsigned int UDCTELEM2;
+#endif
+#else
+typedef INT32 DCTELEM;		/* must have 32 bits */
+typedef UINT32 UDCTELEM;
+typedef unsigned long long UDCTELEM2;
+#endif
+
+
+/*
+ * An inverse DCT routine is given a pointer to the input JBLOCK and a pointer
+ * to an output sample array.  The routine must dequantize the input data as
+ * well as perform the IDCT; for dequantization, it uses the multiplier table
+ * pointed to by compptr->dct_table.  The output data is to be placed into the
+ * sample array starting at a specified column.  (Any row offset needed will
+ * be applied to the array pointer before it is passed to the IDCT code.)
+ * Note that the number of samples emitted by the IDCT routine is
+ * DCT_scaled_size * DCT_scaled_size.
+ */
+
+/* typedef inverse_DCT_method_ptr is declared in jpegint.h */
+
+/*
+ * Each IDCT routine has its own ideas about the best dct_table element type.
+ */
+
+typedef MULTIPLIER ISLOW_MULT_TYPE; /* short or int, whichever is faster */
+#if BITS_IN_JSAMPLE == 8
+typedef MULTIPLIER IFAST_MULT_TYPE; /* 16 bits is OK, use short if faster */
+#define IFAST_SCALE_BITS  2	/* fractional bits in scale factors */
+#else
+typedef INT32 IFAST_MULT_TYPE;	/* need 32 bits for scaled quantizers */
+#define IFAST_SCALE_BITS  13	/* fractional bits in scale factors */
+#endif
+typedef FAST_FLOAT FLOAT_MULT_TYPE; /* preferred floating type */
+
+
+/*
+ * Each IDCT routine is responsible for range-limiting its results and
+ * converting them to unsigned form (0..MAXJSAMPLE).  The raw outputs could
+ * be quite far out of range if the input data is corrupt, so a bulletproof
+ * range-limiting step is required.  We use a mask-and-table-lookup method
+ * to do the combined operations quickly.  See the comments with
+ * prepare_range_limit_table (in jdmaster.c) for more info.
+ */
+
+#define IDCT_range_limit(cinfo)  ((cinfo)->sample_range_limit + CENTERJSAMPLE)
+
+#define RANGE_MASK  (MAXJSAMPLE * 4 + 3) /* 2 bits wider than legal samples */
+
+
+/* Short forms of external names for systems with brain-damaged linkers. */
+
+#ifdef NEED_SHORT_EXTERNAL_NAMES
+#define jpeg_fdct_islow		jFDislow
+#define jpeg_fdct_ifast		jFDifast
+#define jpeg_fdct_float		jFDfloat
+#define jpeg_idct_islow		jRDislow
+#define jpeg_idct_ifast		jRDifast
+#define jpeg_idct_float		jRDfloat
+#define jpeg_idct_4x4		jRD4x4
+#define jpeg_idct_2x2		jRD2x2
+#define jpeg_idct_1x1		jRD1x1
+#endif /* NEED_SHORT_EXTERNAL_NAMES */
+
+/* Extern declarations for the forward and inverse DCT routines. */
+
+EXTERN(void) jpeg_fdct_islow JPP((DCTELEM * data));
+EXTERN(void) jpeg_fdct_ifast JPP((DCTELEM * data));
+EXTERN(void) jpeg_fdct_float JPP((FAST_FLOAT * data));
+
+EXTERN(void) jpeg_idct_islow
+    JPP((j_decompress_ptr cinfo, jpeg_component_info * compptr,
+	 JCOEFPTR coef_block, JSAMPARRAY output_buf, JDIMENSION output_col));
+EXTERN(void) jpeg_idct_ifast
+    JPP((j_decompress_ptr cinfo, jpeg_component_info * compptr,
+	 JCOEFPTR coef_block, JSAMPARRAY output_buf, JDIMENSION output_col));
+EXTERN(void) jpeg_idct_float
+    JPP((j_decompress_ptr cinfo, jpeg_component_info * compptr,
+	 JCOEFPTR coef_block, JSAMPARRAY output_buf, JDIMENSION output_col));
+EXTERN(void) jpeg_idct_4x4
+    JPP((j_decompress_ptr cinfo, jpeg_component_info * compptr,
+	 JCOEFPTR coef_block, JSAMPARRAY output_buf, JDIMENSION output_col));
+EXTERN(void) jpeg_idct_2x2
+    JPP((j_decompress_ptr cinfo, jpeg_component_info * compptr,
+	 JCOEFPTR coef_block, JSAMPARRAY output_buf, JDIMENSION output_col));
+EXTERN(void) jpeg_idct_1x1
+    JPP((j_decompress_ptr cinfo, jpeg_component_info * compptr,
+	 JCOEFPTR coef_block, JSAMPARRAY output_buf, JDIMENSION output_col));
+
+
+/*
+ * Macros for handling fixed-point arithmetic; these are used by many
+ * but not all of the DCT/IDCT modules.
+ *
+ * All values are expected to be of type INT32.
+ * Fractional constants are scaled left by CONST_BITS bits.
+ * CONST_BITS is defined within each module using these macros,
+ * and may differ from one module to the next.
+ */
+
+#define ONE	((INT32) 1)
+#define CONST_SCALE (ONE << CONST_BITS)
+
+/* Convert a positive real constant to an integer scaled by CONST_SCALE.
+ * Caution: some C compilers fail to reduce "FIX(constant)" at compile time,
+ * thus causing a lot of useless floating-point operations at run time.
+ */
+
+#define FIX(x)	((INT32) ((x) * CONST_SCALE + 0.5))
+
+/* Descale and correctly round an INT32 value that's scaled by N bits.
+ * We assume RIGHT_SHIFT rounds towards minus infinity, so adding
+ * the fudge factor is correct for either sign of X.
+ */
+
+#define DESCALE(x,n)  RIGHT_SHIFT((x) + (ONE << ((n)-1)), n)
+
+/* Multiply an INT32 variable by an INT32 constant to yield an INT32 result.
+ * This macro is used only when the two inputs will actually be no more than
+ * 16 bits wide, so that a 16x16->32 bit multiply can be used instead of a
+ * full 32x32 multiply.  This provides a useful speedup on many machines.
+ * Unfortunately there is no way to specify a 16x16->32 multiply portably
+ * in C, but some C compilers will do the right thing if you provide the
+ * correct combination of casts.
+ */
+
+#ifdef SHORTxSHORT_32		/* may work if 'int' is 32 bits */
+#define MULTIPLY16C16(var,const)  (((INT16) (var)) * ((INT16) (const)))
+#endif
+#ifdef SHORTxLCONST_32		/* known to work with Microsoft C 6.0 */
+#define MULTIPLY16C16(var,const)  (((INT16) (var)) * ((INT32) (const)))
+#endif
+
+#ifndef MULTIPLY16C16		/* default definition */
+#define MULTIPLY16C16(var,const)  ((var) * (const))
+#endif
+
+/* Same except both inputs are variables. */
+
+#ifdef SHORTxSHORT_32		/* may work if 'int' is 32 bits */
+#define MULTIPLY16V16(var1,var2)  (((INT16) (var1)) * ((INT16) (var2)))
+#endif
+
+#ifndef MULTIPLY16V16		/* default definition */
+#define MULTIPLY16V16(var1,var2)  ((var1) * (var2))
+#endif

+ 235 - 0
camerakit/src/main/cpp/libjpeg/include/jdhuff.h

@@ -0,0 +1,235 @@
+/*
+ * jdhuff.h
+ *
+ * Copyright (C) 1991-1997, Thomas G. Lane.
+ * Copyright (C) 2010-2011, D. R. Commander.
+ * This file is part of the Independent JPEG Group's software.
+ * For conditions of distribution and use, see the accompanying README file.
+ *
+ * This file contains declarations for Huffman entropy decoding routines
+ * that are shared between the sequential decoder (jdhuff.c) and the
+ * progressive decoder (jdphuff.c).  No other modules need to see these.
+ */
+
+/* Short forms of external names for systems with brain-damaged linkers. */
+
+#ifdef NEED_SHORT_EXTERNAL_NAMES
+#define jpeg_make_d_derived_tbl	jMkDDerived
+#define jpeg_fill_bit_buffer	jFilBitBuf
+#define jpeg_huff_decode	jHufDecode
+#endif /* NEED_SHORT_EXTERNAL_NAMES */
+
+
+/* Derived data constructed for each Huffman table */
+
+#define HUFF_LOOKAHEAD	8	/* # of bits of lookahead */
+
+typedef struct {
+  /* Basic tables: (element [0] of each array is unused) */
+  INT32 maxcode[18];		/* largest code of length k (-1 if none) */
+  /* (maxcode[17] is a sentinel to ensure jpeg_huff_decode terminates) */
+  INT32 valoffset[18];		/* huffval[] offset for codes of length k */
+  /* valoffset[k] = huffval[] index of 1st symbol of code length k, less
+   * the smallest code of length k; so given a code of length k, the
+   * corresponding symbol is huffval[code + valoffset[k]]
+   */
+
+  /* Link to public Huffman table (needed only in jpeg_huff_decode) */
+  JHUFF_TBL *pub;
+
+  /* Lookahead table: indexed by the next HUFF_LOOKAHEAD bits of
+   * the input data stream.  If the next Huffman code is no more
+   * than HUFF_LOOKAHEAD bits long, we can obtain its length and
+   * the corresponding symbol directly from this tables.
+   *
+   * The lower 8 bits of each table entry contain the number of
+   * bits in the corresponding Huffman code, or HUFF_LOOKAHEAD + 1
+   * if too long.  The next 8 bits of each entry contain the
+   * symbol.
+   */
+  int lookup[1<<HUFF_LOOKAHEAD];
+} d_derived_tbl;
+
+/* Expand a Huffman table definition into the derived format */
+EXTERN(void) jpeg_make_d_derived_tbl
+	JPP((j_decompress_ptr cinfo, boolean isDC, int tblno,
+	     d_derived_tbl ** pdtbl));
+
+
+/*
+ * Fetching the next N bits from the input stream is a time-critical operation
+ * for the Huffman decoders.  We implement it with a combination of inline
+ * macros and out-of-line subroutines.  Note that N (the number of bits
+ * demanded at one time) never exceeds 15 for JPEG use.
+ *
+ * We read source bytes into get_buffer and dole out bits as needed.
+ * If get_buffer already contains enough bits, they are fetched in-line
+ * by the macros CHECK_BIT_BUFFER and GET_BITS.  When there aren't enough
+ * bits, jpeg_fill_bit_buffer is called; it will attempt to fill get_buffer
+ * as full as possible (not just to the number of bits needed; this
+ * prefetching reduces the overhead cost of calling jpeg_fill_bit_buffer).
+ * Note that jpeg_fill_bit_buffer may return FALSE to indicate suspension.
+ * On TRUE return, jpeg_fill_bit_buffer guarantees that get_buffer contains
+ * at least the requested number of bits --- dummy zeroes are inserted if
+ * necessary.
+ */
+
+#if __WORDSIZE == 64 || defined(_WIN64)
+
+typedef size_t bit_buf_type;	/* type of bit-extraction buffer */
+#define BIT_BUF_SIZE  64		/* size of buffer in bits */
+
+#else
+
+typedef INT32 bit_buf_type;	/* type of bit-extraction buffer */
+#define BIT_BUF_SIZE  32		/* size of buffer in bits */
+
+#endif
+#define LOG_TWO_BIT_BUF_SIZE  5        /* log_2(BIT_BUF_SIZE) */
+
+/* If long is > 32 bits on your machine, and shifting/masking longs is
+ * reasonably fast, making bit_buf_type be long and setting BIT_BUF_SIZE
+ * appropriately should be a win.  Unfortunately we can't define the size
+ * with something like  #define BIT_BUF_SIZE (sizeof(bit_buf_type)*8)
+ * because not all machines measure sizeof in 8-bit bytes.
+ */
+
+typedef struct {		/* Bitreading state saved across MCUs */
+  bit_buf_type get_buffer;	/* current bit-extraction buffer */
+  int bits_left;		/* # of unused bits in it */
+} bitread_perm_state;
+
+typedef struct {		/* Bitreading working state within an MCU */
+  /* Current data source location */
+  /* We need a copy, rather than munging the original, in case of suspension */
+  const JOCTET * next_input_byte; /* => next byte to read from source */
+  size_t bytes_in_buffer;	/* # of bytes remaining in source buffer */
+  /* Bit input buffer --- note these values are kept in register variables,
+   * not in this struct, inside the inner loops.
+   */
+  bit_buf_type get_buffer;	/* current bit-extraction buffer */
+  int bits_left;		/* # of unused bits in it */
+  /* Pointer needed by jpeg_fill_bit_buffer. */
+  j_decompress_ptr cinfo;	/* back link to decompress master record */
+} bitread_working_state;
+
+/* Macros to declare and load/save bitread local variables. */
+#define BITREAD_STATE_VARS  \
+	register bit_buf_type get_buffer;  \
+	register int bits_left;  \
+	bitread_working_state br_state
+
+#define BITREAD_LOAD_STATE(cinfop,permstate)  \
+	br_state.cinfo = cinfop; \
+	br_state.next_input_byte = cinfop->src->next_input_byte; \
+	br_state.bytes_in_buffer = cinfop->src->bytes_in_buffer; \
+	get_buffer = permstate.get_buffer; \
+	bits_left = permstate.bits_left;
+
+#define BITREAD_SAVE_STATE(cinfop,permstate)  \
+	cinfop->src->next_input_byte = br_state.next_input_byte; \
+	cinfop->src->bytes_in_buffer = br_state.bytes_in_buffer; \
+	permstate.get_buffer = get_buffer; \
+	permstate.bits_left = bits_left
+
+/*
+ * These macros provide the in-line portion of bit fetching.
+ * Use CHECK_BIT_BUFFER to ensure there are N bits in get_buffer
+ * before using GET_BITS, PEEK_BITS, or DROP_BITS.
+ * The variables get_buffer and bits_left are assumed to be locals,
+ * but the state struct might not be (jpeg_huff_decode needs this).
+ *	CHECK_BIT_BUFFER(state,n,action);
+ *		Ensure there are N bits in get_buffer; if suspend, take action.
+ *      val = GET_BITS(n);
+ *		Fetch next N bits.
+ *      val = PEEK_BITS(n);
+ *		Fetch next N bits without removing them from the buffer.
+ *	DROP_BITS(n);
+ *		Discard next N bits.
+ * The value N should be a simple variable, not an expression, because it
+ * is evaluated multiple times.
+ */
+
+#define CHECK_BIT_BUFFER(state,nbits,action) \
+	{ if (bits_left < (nbits)) {  \
+	    if (! jpeg_fill_bit_buffer(&(state),get_buffer,bits_left,nbits))  \
+	      { action; }  \
+	    get_buffer = (state).get_buffer; bits_left = (state).bits_left; } }
+
+#define GET_BITS(nbits) \
+	(((int) (get_buffer >> (bits_left -= (nbits)))) & ((1<<(nbits))-1))
+
+#define PEEK_BITS(nbits) \
+	(((int) (get_buffer >> (bits_left -  (nbits)))) & ((1<<(nbits))-1))
+
+#define DROP_BITS(nbits) \
+	(bits_left -= (nbits))
+
+/* Load up the bit buffer to a depth of at least nbits */
+EXTERN(boolean) jpeg_fill_bit_buffer
+	JPP((bitread_working_state * state, register bit_buf_type get_buffer,
+	     register int bits_left, int nbits));
+
+
+/*
+ * Code for extracting next Huffman-coded symbol from input bit stream.
+ * Again, this is time-critical and we make the main paths be macros.
+ *
+ * We use a lookahead table to process codes of up to HUFF_LOOKAHEAD bits
+ * without looping.  Usually, more than 95% of the Huffman codes will be 8
+ * or fewer bits long.  The few overlength codes are handled with a loop,
+ * which need not be inline code.
+ *
+ * Notes about the HUFF_DECODE macro:
+ * 1. Near the end of the data segment, we may fail to get enough bits
+ *    for a lookahead.  In that case, we do it the hard way.
+ * 2. If the lookahead table contains no entry, the next code must be
+ *    more than HUFF_LOOKAHEAD bits long.
+ * 3. jpeg_huff_decode returns -1 if forced to suspend.
+ */
+
+#define HUFF_DECODE(result,state,htbl,failaction,slowlabel) \
+{ register int nb, look; \
+  if (bits_left < HUFF_LOOKAHEAD) { \
+    if (! jpeg_fill_bit_buffer(&state,get_buffer,bits_left, 0)) {failaction;} \
+    get_buffer = state.get_buffer; bits_left = state.bits_left; \
+    if (bits_left < HUFF_LOOKAHEAD) { \
+      nb = 1; goto slowlabel; \
+    } \
+  } \
+  look = PEEK_BITS(HUFF_LOOKAHEAD); \
+  if ((nb = (htbl->lookup[look] >> HUFF_LOOKAHEAD)) <= HUFF_LOOKAHEAD) { \
+    DROP_BITS(nb); \
+    result = htbl->lookup[look] & ((1 << HUFF_LOOKAHEAD) - 1); \
+  } else { \
+slowlabel: \
+    if ((result=jpeg_huff_decode(&state,get_buffer,bits_left,htbl,nb)) < 0) \
+	{ failaction; } \
+    get_buffer = state.get_buffer; bits_left = state.bits_left; \
+  } \
+}
+
+#define HUFF_DECODE_FAST(s,nb,htbl) \
+  FILL_BIT_BUFFER_FAST; \
+  s = PEEK_BITS(HUFF_LOOKAHEAD); \
+  s = htbl->lookup[s]; \
+  nb = s >> HUFF_LOOKAHEAD; \
+  /* Pre-execute the common case of nb <= HUFF_LOOKAHEAD */ \
+  DROP_BITS(nb); \
+  s = s & ((1 << HUFF_LOOKAHEAD) - 1); \
+  if (nb > HUFF_LOOKAHEAD) { \
+    /* Equivalent of jpeg_huff_decode() */ \
+    /* Don't use GET_BITS() here because we don't want to modify bits_left */ \
+    s = (get_buffer >> bits_left) & ((1 << (nb)) - 1); \
+    while (s > htbl->maxcode[nb]) { \
+      s <<= 1; \
+      s |= GET_BITS(1); \
+      nb++; \
+    } \
+    s = htbl->pub->huffval[ (int) (s + htbl->valoffset[nb]) & 0xFF ]; \
+  }
+
+/* Out-of-line case for Huffman code fetching */
+EXTERN(int) jpeg_huff_decode
+	JPP((bitread_working_state * state, register bit_buf_type get_buffer,
+	     register int bits_left, d_derived_tbl * htbl, int min_bits));

+ 314 - 0
camerakit/src/main/cpp/libjpeg/include/jerror.h

@@ -0,0 +1,314 @@
+/*
+ * jerror.h
+ *
+ * Copyright (C) 1994-1997, Thomas G. Lane.
+ * Modified 1997-2009 by Guido Vollbeding.
+ * This file is part of the Independent JPEG Group's software.
+ * For conditions of distribution and use, see the accompanying README file.
+ *
+ * This file defines the error and message codes for the JPEG library.
+ * Edit this file to add new codes, or to translate the message strings to
+ * some other language.
+ * A set of error-reporting macros are defined too.  Some applications using
+ * the JPEG library may wish to include this file to get the error codes
+ * and/or the macros.
+ */
+
+/*
+ * To define the enum list of message codes, include this file without
+ * defining macro JMESSAGE.  To create a message string table, include it
+ * again with a suitable JMESSAGE definition (see jerror.c for an example).
+ */
+#ifndef JMESSAGE
+#ifndef JERROR_H
+/* First time through, define the enum list */
+#define JMAKE_ENUM_LIST
+#else
+/* Repeated inclusions of this file are no-ops unless JMESSAGE is defined */
+#define JMESSAGE(code,string)
+#endif /* JERROR_H */
+#endif /* JMESSAGE */
+
+#ifdef JMAKE_ENUM_LIST
+
+typedef enum {
+
+#define JMESSAGE(code,string)	code ,
+
+#endif /* JMAKE_ENUM_LIST */
+
+JMESSAGE(JMSG_NOMESSAGE, "Bogus message code %d") /* Must be first entry! */
+
+/* For maintenance convenience, list is alphabetical by message code name */
+#if JPEG_LIB_VERSION < 70
+JMESSAGE(JERR_ARITH_NOTIMPL,
+	 "Sorry, arithmetic coding is not implemented")
+#endif
+JMESSAGE(JERR_BAD_ALIGN_TYPE, "ALIGN_TYPE is wrong, please fix")
+JMESSAGE(JERR_BAD_ALLOC_CHUNK, "MAX_ALLOC_CHUNK is wrong, please fix")
+JMESSAGE(JERR_BAD_BUFFER_MODE, "Bogus buffer control mode")
+JMESSAGE(JERR_BAD_COMPONENT_ID, "Invalid component ID %d in SOS")
+#if JPEG_LIB_VERSION >= 70
+JMESSAGE(JERR_BAD_CROP_SPEC, "Invalid crop request")
+#endif
+JMESSAGE(JERR_BAD_DCT_COEF, "DCT coefficient out of range")
+JMESSAGE(JERR_BAD_DCTSIZE, "IDCT output block size %d not supported")
+#if JPEG_LIB_VERSION >= 70
+JMESSAGE(JERR_BAD_DROP_SAMPLING,
+	 "Component index %d: mismatching sampling ratio %d:%d, %d:%d, %c")
+#endif
+JMESSAGE(JERR_BAD_HUFF_TABLE, "Bogus Huffman table definition")
+JMESSAGE(JERR_BAD_IN_COLORSPACE, "Bogus input colorspace")
+JMESSAGE(JERR_BAD_J_COLORSPACE, "Bogus JPEG colorspace")
+JMESSAGE(JERR_BAD_LENGTH, "Bogus marker length")
+JMESSAGE(JERR_BAD_LIB_VERSION,
+	 "Wrong JPEG library version: library is %d, caller expects %d")
+JMESSAGE(JERR_BAD_MCU_SIZE, "Sampling factors too large for interleaved scan")
+JMESSAGE(JERR_BAD_POOL_ID, "Invalid memory pool code %d")
+JMESSAGE(JERR_BAD_PRECISION, "Unsupported JPEG data precision %d")
+JMESSAGE(JERR_BAD_PROGRESSION,
+	 "Invalid progressive parameters Ss=%d Se=%d Ah=%d Al=%d")
+JMESSAGE(JERR_BAD_PROG_SCRIPT,
+	 "Invalid progressive parameters at scan script entry %d")
+JMESSAGE(JERR_BAD_SAMPLING, "Bogus sampling factors")
+JMESSAGE(JERR_BAD_SCAN_SCRIPT, "Invalid scan script at entry %d")
+JMESSAGE(JERR_BAD_STATE, "Improper call to JPEG library in state %d")
+JMESSAGE(JERR_BAD_STRUCT_SIZE,
+	 "JPEG parameter struct mismatch: library thinks size is %u, caller expects %u")
+JMESSAGE(JERR_BAD_VIRTUAL_ACCESS, "Bogus virtual array access")
+JMESSAGE(JERR_BUFFER_SIZE, "Buffer passed to JPEG library is too small")
+JMESSAGE(JERR_CANT_SUSPEND, "Suspension not allowed here")
+JMESSAGE(JERR_CCIR601_NOTIMPL, "CCIR601 sampling not implemented yet")
+JMESSAGE(JERR_COMPONENT_COUNT, "Too many color components: %d, max %d")
+JMESSAGE(JERR_CONVERSION_NOTIMPL, "Unsupported color conversion request")
+JMESSAGE(JERR_DAC_INDEX, "Bogus DAC index %d")
+JMESSAGE(JERR_DAC_VALUE, "Bogus DAC value 0x%x")
+JMESSAGE(JERR_DHT_INDEX, "Bogus DHT index %d")
+JMESSAGE(JERR_DQT_INDEX, "Bogus DQT index %d")
+JMESSAGE(JERR_EMPTY_IMAGE, "Empty JPEG image (DNL not supported)")
+JMESSAGE(JERR_EMS_READ, "Read from EMS failed")
+JMESSAGE(JERR_EMS_WRITE, "Write to EMS failed")
+JMESSAGE(JERR_EOI_EXPECTED, "Didn't expect more than one scan")
+JMESSAGE(JERR_FILE_READ, "Input file read error")
+JMESSAGE(JERR_FILE_WRITE, "Output file write error --- out of disk space?")
+JMESSAGE(JERR_FRACT_SAMPLE_NOTIMPL, "Fractional sampling not implemented yet")
+JMESSAGE(JERR_HUFF_CLEN_OVERFLOW, "Huffman code size table overflow")
+JMESSAGE(JERR_HUFF_MISSING_CODE, "Missing Huffman code table entry")
+JMESSAGE(JERR_IMAGE_TOO_BIG, "Maximum supported image dimension is %u pixels")
+JMESSAGE(JERR_INPUT_EMPTY, "Empty input file")
+JMESSAGE(JERR_INPUT_EOF, "Premature end of input file")
+JMESSAGE(JERR_MISMATCHED_QUANT_TABLE,
+	 "Cannot transcode due to multiple use of quantization table %d")
+JMESSAGE(JERR_MISSING_DATA, "Scan script does not transmit all data")
+JMESSAGE(JERR_MODE_CHANGE, "Invalid color quantization mode change")
+JMESSAGE(JERR_NOTIMPL, "Not implemented yet")
+JMESSAGE(JERR_NOT_COMPILED, "Requested feature was omitted at compile time")
+#if JPEG_LIB_VERSION >= 70
+JMESSAGE(JERR_NO_ARITH_TABLE, "Arithmetic table 0x%02x was not defined")
+#endif
+JMESSAGE(JERR_NO_BACKING_STORE, "Backing store not supported")
+JMESSAGE(JERR_NO_HUFF_TABLE, "Huffman table 0x%02x was not defined")
+JMESSAGE(JERR_NO_IMAGE, "JPEG datastream contains no image")
+JMESSAGE(JERR_NO_QUANT_TABLE, "Quantization table 0x%02x was not defined")
+JMESSAGE(JERR_NO_SOI, "Not a JPEG file: starts with 0x%02x 0x%02x")
+JMESSAGE(JERR_OUT_OF_MEMORY, "Insufficient memory (case %d)")
+JMESSAGE(JERR_QUANT_COMPONENTS,
+	 "Cannot quantize more than %d color components")
+JMESSAGE(JERR_QUANT_FEW_COLORS, "Cannot quantize to fewer than %d colors")
+JMESSAGE(JERR_QUANT_MANY_COLORS, "Cannot quantize to more than %d colors")
+JMESSAGE(JERR_SOF_DUPLICATE, "Invalid JPEG file structure: two SOF markers")
+JMESSAGE(JERR_SOF_NO_SOS, "Invalid JPEG file structure: missing SOS marker")
+JMESSAGE(JERR_SOF_UNSUPPORTED, "Unsupported JPEG process: SOF type 0x%02x")
+JMESSAGE(JERR_SOI_DUPLICATE, "Invalid JPEG file structure: two SOI markers")
+JMESSAGE(JERR_SOS_NO_SOF, "Invalid JPEG file structure: SOS before SOF")
+JMESSAGE(JERR_TFILE_CREATE, "Failed to create temporary file %s")
+JMESSAGE(JERR_TFILE_READ, "Read failed on temporary file")
+JMESSAGE(JERR_TFILE_SEEK, "Seek failed on temporary file")
+JMESSAGE(JERR_TFILE_WRITE,
+	 "Write failed on temporary file --- out of disk space?")
+JMESSAGE(JERR_TOO_LITTLE_DATA, "Application transferred too few scanlines")
+JMESSAGE(JERR_UNKNOWN_MARKER, "Unsupported marker type 0x%02x")
+JMESSAGE(JERR_VIRTUAL_BUG, "Virtual array controller messed up")
+JMESSAGE(JERR_WIDTH_OVERFLOW, "Image too wide for this implementation")
+JMESSAGE(JERR_XMS_READ, "Read from XMS failed")
+JMESSAGE(JERR_XMS_WRITE, "Write to XMS failed")
+JMESSAGE(JMSG_COPYRIGHT, JCOPYRIGHT)
+JMESSAGE(JMSG_VERSION, JVERSION)
+JMESSAGE(JTRC_16BIT_TABLES,
+	 "Caution: quantization tables are too coarse for baseline JPEG")
+JMESSAGE(JTRC_ADOBE,
+	 "Adobe APP14 marker: version %d, flags 0x%04x 0x%04x, transform %d")
+JMESSAGE(JTRC_APP0, "Unknown APP0 marker (not JFIF), length %u")
+JMESSAGE(JTRC_APP14, "Unknown APP14 marker (not Adobe), length %u")
+JMESSAGE(JTRC_DAC, "Define Arithmetic Table 0x%02x: 0x%02x")
+JMESSAGE(JTRC_DHT, "Define Huffman Table 0x%02x")
+JMESSAGE(JTRC_DQT, "Define Quantization Table %d  precision %d")
+JMESSAGE(JTRC_DRI, "Define Restart Interval %u")
+JMESSAGE(JTRC_EMS_CLOSE, "Freed EMS handle %u")
+JMESSAGE(JTRC_EMS_OPEN, "Obtained EMS handle %u")
+JMESSAGE(JTRC_EOI, "End Of Image")
+JMESSAGE(JTRC_HUFFBITS, "        %3d %3d %3d %3d %3d %3d %3d %3d")
+JMESSAGE(JTRC_JFIF, "JFIF APP0 marker: version %d.%02d, density %dx%d  %d")
+JMESSAGE(JTRC_JFIF_BADTHUMBNAILSIZE,
+	 "Warning: thumbnail image size does not match data length %u")
+JMESSAGE(JTRC_JFIF_EXTENSION,
+	 "JFIF extension marker: type 0x%02x, length %u")
+JMESSAGE(JTRC_JFIF_THUMBNAIL, "    with %d x %d thumbnail image")
+JMESSAGE(JTRC_MISC_MARKER, "Miscellaneous marker 0x%02x, length %u")
+JMESSAGE(JTRC_PARMLESS_MARKER, "Unexpected marker 0x%02x")
+JMESSAGE(JTRC_QUANTVALS, "        %4u %4u %4u %4u %4u %4u %4u %4u")
+JMESSAGE(JTRC_QUANT_3_NCOLORS, "Quantizing to %d = %d*%d*%d colors")
+JMESSAGE(JTRC_QUANT_NCOLORS, "Quantizing to %d colors")
+JMESSAGE(JTRC_QUANT_SELECTED, "Selected %d colors for quantization")
+JMESSAGE(JTRC_RECOVERY_ACTION, "At marker 0x%02x, recovery action %d")
+JMESSAGE(JTRC_RST, "RST%d")
+JMESSAGE(JTRC_SMOOTH_NOTIMPL,
+	 "Smoothing not supported with nonstandard sampling ratios")
+JMESSAGE(JTRC_SOF, "Start Of Frame 0x%02x: width=%u, height=%u, components=%d")
+JMESSAGE(JTRC_SOF_COMPONENT, "    Component %d: %dhx%dv q=%d")
+JMESSAGE(JTRC_SOI, "Start of Image")
+JMESSAGE(JTRC_SOS, "Start Of Scan: %d components")
+JMESSAGE(JTRC_SOS_COMPONENT, "    Component %d: dc=%d ac=%d")
+JMESSAGE(JTRC_SOS_PARAMS, "  Ss=%d, Se=%d, Ah=%d, Al=%d")
+JMESSAGE(JTRC_TFILE_CLOSE, "Closed temporary file %s")
+JMESSAGE(JTRC_TFILE_OPEN, "Opened temporary file %s")
+JMESSAGE(JTRC_THUMB_JPEG,
+	 "JFIF extension marker: JPEG-compressed thumbnail image, length %u")
+JMESSAGE(JTRC_THUMB_PALETTE,
+	 "JFIF extension marker: palette thumbnail image, length %u")
+JMESSAGE(JTRC_THUMB_RGB,
+	 "JFIF extension marker: RGB thumbnail image, length %u")
+JMESSAGE(JTRC_UNKNOWN_IDS,
+	 "Unrecognized component IDs %d %d %d, assuming YCbCr")
+JMESSAGE(JTRC_XMS_CLOSE, "Freed XMS handle %u")
+JMESSAGE(JTRC_XMS_OPEN, "Obtained XMS handle %u")
+JMESSAGE(JWRN_ADOBE_XFORM, "Unknown Adobe color transform code %d")
+#if JPEG_LIB_VERSION >= 70
+JMESSAGE(JWRN_ARITH_BAD_CODE, "Corrupt JPEG data: bad arithmetic code")
+#endif
+JMESSAGE(JWRN_BOGUS_PROGRESSION,
+	 "Inconsistent progression sequence for component %d coefficient %d")
+JMESSAGE(JWRN_EXTRANEOUS_DATA,
+	 "Corrupt JPEG data: %u extraneous bytes before marker 0x%02x")
+JMESSAGE(JWRN_HIT_MARKER, "Corrupt JPEG data: premature end of data segment")
+JMESSAGE(JWRN_HUFF_BAD_CODE, "Corrupt JPEG data: bad Huffman code")
+JMESSAGE(JWRN_JFIF_MAJOR, "Warning: unknown JFIF revision number %d.%02d")
+JMESSAGE(JWRN_JPEG_EOF, "Premature end of JPEG file")
+JMESSAGE(JWRN_MUST_RESYNC,
+	 "Corrupt JPEG data: found marker 0x%02x instead of RST%d")
+JMESSAGE(JWRN_NOT_SEQUENTIAL, "Invalid SOS parameters for sequential JPEG")
+JMESSAGE(JWRN_TOO_MUCH_DATA, "Application transferred too many scanlines")
+#if JPEG_LIB_VERSION < 70
+JMESSAGE(JERR_BAD_CROP_SPEC, "Invalid crop request")
+#if defined(C_ARITH_CODING_SUPPORTED) || defined(D_ARITH_CODING_SUPPORTED)
+JMESSAGE(JERR_NO_ARITH_TABLE, "Arithmetic table 0x%02x was not defined")
+JMESSAGE(JWRN_ARITH_BAD_CODE, "Corrupt JPEG data: bad arithmetic code")
+#endif
+#endif
+
+#ifdef JMAKE_ENUM_LIST
+
+  JMSG_LASTMSGCODE
+} J_MESSAGE_CODE;
+
+#undef JMAKE_ENUM_LIST
+#endif /* JMAKE_ENUM_LIST */
+
+/* Zap JMESSAGE macro so that future re-inclusions do nothing by default */
+#undef JMESSAGE
+
+
+#ifndef JERROR_H
+#define JERROR_H
+
+/* Macros to simplify using the error and trace message stuff */
+/* The first parameter is either type of cinfo pointer */
+
+/* Fatal errors (print message and exit) */
+#define ERREXIT(cinfo,code)  \
+  ((cinfo)->err->msg_code = (code), \
+   (*(cinfo)->err->error_exit) ((j_common_ptr) (cinfo)))
+#define ERREXIT1(cinfo,code,p1)  \
+  ((cinfo)->err->msg_code = (code), \
+   (cinfo)->err->msg_parm.i[0] = (p1), \
+   (*(cinfo)->err->error_exit) ((j_common_ptr) (cinfo)))
+#define ERREXIT2(cinfo,code,p1,p2)  \
+  ((cinfo)->err->msg_code = (code), \
+   (cinfo)->err->msg_parm.i[0] = (p1), \
+   (cinfo)->err->msg_parm.i[1] = (p2), \
+   (*(cinfo)->err->error_exit) ((j_common_ptr) (cinfo)))
+#define ERREXIT3(cinfo,code,p1,p2,p3)  \
+  ((cinfo)->err->msg_code = (code), \
+   (cinfo)->err->msg_parm.i[0] = (p1), \
+   (cinfo)->err->msg_parm.i[1] = (p2), \
+   (cinfo)->err->msg_parm.i[2] = (p3), \
+   (*(cinfo)->err->error_exit) ((j_common_ptr) (cinfo)))
+#define ERREXIT4(cinfo,code,p1,p2,p3,p4)  \
+  ((cinfo)->err->msg_code = (code), \
+   (cinfo)->err->msg_parm.i[0] = (p1), \
+   (cinfo)->err->msg_parm.i[1] = (p2), \
+   (cinfo)->err->msg_parm.i[2] = (p3), \
+   (cinfo)->err->msg_parm.i[3] = (p4), \
+   (*(cinfo)->err->error_exit) ((j_common_ptr) (cinfo)))
+#define ERREXITS(cinfo,code,str)  \
+  ((cinfo)->err->msg_code = (code), \
+   strncpy((cinfo)->err->msg_parm.s, (str), JMSG_STR_PARM_MAX), \
+   (*(cinfo)->err->error_exit) ((j_common_ptr) (cinfo)))
+
+#define MAKESTMT(stuff)		do { stuff } while (0)
+
+/* Nonfatal errors (we can keep going, but the data is probably corrupt) */
+#define WARNMS(cinfo,code)  \
+  ((cinfo)->err->msg_code = (code), \
+   (*(cinfo)->err->emit_message) ((j_common_ptr) (cinfo), -1))
+#define WARNMS1(cinfo,code,p1)  \
+  ((cinfo)->err->msg_code = (code), \
+   (cinfo)->err->msg_parm.i[0] = (p1), \
+   (*(cinfo)->err->emit_message) ((j_common_ptr) (cinfo), -1))
+#define WARNMS2(cinfo,code,p1,p2)  \
+  ((cinfo)->err->msg_code = (code), \
+   (cinfo)->err->msg_parm.i[0] = (p1), \
+   (cinfo)->err->msg_parm.i[1] = (p2), \
+   (*(cinfo)->err->emit_message) ((j_common_ptr) (cinfo), -1))
+
+/* Informational/debugging messages */
+#define TRACEMS(cinfo,lvl,code)  \
+  ((cinfo)->err->msg_code = (code), \
+   (*(cinfo)->err->emit_message) ((j_common_ptr) (cinfo), (lvl)))
+#define TRACEMS1(cinfo,lvl,code,p1)  \
+  ((cinfo)->err->msg_code = (code), \
+   (cinfo)->err->msg_parm.i[0] = (p1), \
+   (*(cinfo)->err->emit_message) ((j_common_ptr) (cinfo), (lvl)))
+#define TRACEMS2(cinfo,lvl,code,p1,p2)  \
+  ((cinfo)->err->msg_code = (code), \
+   (cinfo)->err->msg_parm.i[0] = (p1), \
+   (cinfo)->err->msg_parm.i[1] = (p2), \
+   (*(cinfo)->err->emit_message) ((j_common_ptr) (cinfo), (lvl)))
+#define TRACEMS3(cinfo,lvl,code,p1,p2,p3)  \
+  MAKESTMT(int * _mp = (cinfo)->err->msg_parm.i; \
+	   _mp[0] = (p1); _mp[1] = (p2); _mp[2] = (p3); \
+	   (cinfo)->err->msg_code = (code); \
+	   (*(cinfo)->err->emit_message) ((j_common_ptr) (cinfo), (lvl)); )
+#define TRACEMS4(cinfo,lvl,code,p1,p2,p3,p4)  \
+  MAKESTMT(int * _mp = (cinfo)->err->msg_parm.i; \
+	   _mp[0] = (p1); _mp[1] = (p2); _mp[2] = (p3); _mp[3] = (p4); \
+	   (cinfo)->err->msg_code = (code); \
+	   (*(cinfo)->err->emit_message) ((j_common_ptr) (cinfo), (lvl)); )
+#define TRACEMS5(cinfo,lvl,code,p1,p2,p3,p4,p5)  \
+  MAKESTMT(int * _mp = (cinfo)->err->msg_parm.i; \
+	   _mp[0] = (p1); _mp[1] = (p2); _mp[2] = (p3); _mp[3] = (p4); \
+	   _mp[4] = (p5); \
+	   (cinfo)->err->msg_code = (code); \
+	   (*(cinfo)->err->emit_message) ((j_common_ptr) (cinfo), (lvl)); )
+#define TRACEMS8(cinfo,lvl,code,p1,p2,p3,p4,p5,p6,p7,p8)  \
+  MAKESTMT(int * _mp = (cinfo)->err->msg_parm.i; \
+	   _mp[0] = (p1); _mp[1] = (p2); _mp[2] = (p3); _mp[3] = (p4); \
+	   _mp[4] = (p5); _mp[5] = (p6); _mp[6] = (p7); _mp[7] = (p8); \
+	   (cinfo)->err->msg_code = (code); \
+	   (*(cinfo)->err->emit_message) ((j_common_ptr) (cinfo), (lvl)); )
+#define TRACEMSS(cinfo,lvl,code,str)  \
+  ((cinfo)->err->msg_code = (code), \
+   strncpy((cinfo)->err->msg_parm.s, (str), JMSG_STR_PARM_MAX), \
+   (*(cinfo)->err->emit_message) ((j_common_ptr) (cinfo), (lvl)))
+
+#endif /* JERROR_H */

+ 91 - 0
camerakit/src/main/cpp/libjpeg/include/jinclude.h

@@ -0,0 +1,91 @@
+/*
+ * jinclude.h
+ *
+ * Copyright (C) 1991-1994, Thomas G. Lane.
+ * This file is part of the Independent JPEG Group's software.
+ * For conditions of distribution and use, see the accompanying README file.
+ *
+ * This file exists to provide a single place to fix any problems with
+ * including the wrong system include files.  (Common problems are taken
+ * care of by the standard jconfig symbols, but on really weird systems
+ * you may have to edit this file.)
+ *
+ * NOTE: this file is NOT intended to be included by applications using the
+ * JPEG library.  Most applications need only include jpeglib.h.
+ */
+
+
+/* Include auto-config file to find out which system include files we need. */
+
+#include "jconfig.h"		/* auto configuration options */
+#define JCONFIG_INCLUDED	/* so that jpeglib.h doesn't do it again */
+
+/*
+ * We need the NULL macro and size_t typedef.
+ * On an ANSI-conforming system it is sufficient to include <stddef.h>.
+ * Otherwise, we get them from <stdlib.h> or <stdio.h>; we may have to
+ * pull in <sys/types.h> as well.
+ * Note that the core JPEG library does not require <stdio.h>;
+ * only the default error handler and data source/destination modules do.
+ * But we must pull it in because of the references to FILE in jpeglib.h.
+ * You can remove those references if you want to compile without <stdio.h>.
+ */
+
+#ifdef HAVE_STDDEF_H
+#include <stddef.h>
+#endif
+
+#ifdef HAVE_STDLIB_H
+#include <stdlib.h>
+#endif
+
+#ifdef NEED_SYS_TYPES_H
+#include <sys/types.h>
+#endif
+
+#include <stdio.h>
+
+/*
+ * We need memory copying and zeroing functions, plus strncpy().
+ * ANSI and System V implementations declare these in <string.h>.
+ * BSD doesn't have the mem() functions, but it does have bcopy()/bzero().
+ * Some systems may declare memset and memcpy in <memory.h>.
+ *
+ * NOTE: we assume the size parameters to these functions are of type size_t.
+ * Change the casts in these macros if not!
+ */
+
+#ifdef NEED_BSD_STRINGS
+
+#include <strings.h>
+#define MEMZERO(target,size)	bzero((void *)(target), (size_t)(size))
+#define MEMCOPY(dest,src,size)	bcopy((const void *)(src), (void *)(dest), (size_t)(size))
+
+#else /* not BSD, assume ANSI/SysV string lib */
+
+#include <string.h>
+#define MEMZERO(target,size)	memset((void *)(target), 0, (size_t)(size))
+#define MEMCOPY(dest,src,size)	memcpy((void *)(dest), (const void *)(src), (size_t)(size))
+
+#endif
+
+/*
+ * In ANSI C, and indeed any rational implementation, size_t is also the
+ * type returned by sizeof().  However, it seems there are some irrational
+ * implementations out there, in which sizeof() returns an int even though
+ * size_t is defined as long or unsigned long.  To ensure consistent results
+ * we always use this SIZEOF() macro in place of using sizeof() directly.
+ */
+
+#define SIZEOF(object)	((size_t) sizeof(object))
+
+/*
+ * The modules that use fread() and fwrite() always invoke them through
+ * these macros.  On some systems you may need to twiddle the argument casts.
+ * CAUTION: argument order is different from underlying functions!
+ */
+
+#define JFREAD(file,buf,sizeofbuf)  \
+  ((size_t) fread((void *) (buf), (size_t) 1, (size_t) (sizeofbuf), (file)))
+#define JFWRITE(file,buf,sizeofbuf)  \
+  ((size_t) fwrite((const void *) (buf), (size_t) 1, (size_t) (sizeofbuf), (file)))

+ 198 - 0
camerakit/src/main/cpp/libjpeg/include/jmemsys.h

@@ -0,0 +1,198 @@
+/*
+ * jmemsys.h
+ *
+ * Copyright (C) 1992-1997, Thomas G. Lane.
+ * This file is part of the Independent JPEG Group's software.
+ * For conditions of distribution and use, see the accompanying README file.
+ *
+ * This include file defines the interface between the system-independent
+ * and system-dependent portions of the JPEG memory manager.  No other
+ * modules need include it.  (The system-independent portion is jmemmgr.c;
+ * there are several different versions of the system-dependent portion.)
+ *
+ * This file works as-is for the system-dependent memory managers supplied
+ * in the IJG distribution.  You may need to modify it if you write a
+ * custom memory manager.  If system-dependent changes are needed in
+ * this file, the best method is to #ifdef them based on a configuration
+ * symbol supplied in jconfig.h, as we have done with USE_MSDOS_MEMMGR
+ * and USE_MAC_MEMMGR.
+ */
+
+
+/* Short forms of external names for systems with brain-damaged linkers. */
+
+#ifdef NEED_SHORT_EXTERNAL_NAMES
+#define jpeg_get_small		jGetSmall
+#define jpeg_free_small		jFreeSmall
+#define jpeg_get_large		jGetLarge
+#define jpeg_free_large		jFreeLarge
+#define jpeg_mem_available	jMemAvail
+#define jpeg_open_backing_store	jOpenBackStore
+#define jpeg_mem_init		jMemInit
+#define jpeg_mem_term		jMemTerm
+#endif /* NEED_SHORT_EXTERNAL_NAMES */
+
+
+/*
+ * These two functions are used to allocate and release small chunks of
+ * memory.  (Typically the total amount requested through jpeg_get_small is
+ * no more than 20K or so; this will be requested in chunks of a few K each.)
+ * Behavior should be the same as for the standard library functions malloc
+ * and free; in particular, jpeg_get_small must return NULL on failure.
+ * On most systems, these ARE malloc and free.  jpeg_free_small is passed the
+ * size of the object being freed, just in case it's needed.
+ * On an 80x86 machine using small-data memory model, these manage near heap.
+ */
+
+EXTERN(void *) jpeg_get_small JPP((j_common_ptr cinfo, size_t sizeofobject));
+EXTERN(void) jpeg_free_small JPP((j_common_ptr cinfo, void * object,
+				  size_t sizeofobject));
+
+/*
+ * These two functions are used to allocate and release large chunks of
+ * memory (up to the total free space designated by jpeg_mem_available).
+ * The interface is the same as above, except that on an 80x86 machine,
+ * far pointers are used.  On most other machines these are identical to
+ * the jpeg_get/free_small routines; but we keep them separate anyway,
+ * in case a different allocation strategy is desirable for large chunks.
+ */
+
+EXTERN(void FAR *) jpeg_get_large JPP((j_common_ptr cinfo,
+				       size_t sizeofobject));
+EXTERN(void) jpeg_free_large JPP((j_common_ptr cinfo, void FAR * object,
+				  size_t sizeofobject));
+
+/*
+ * The macro MAX_ALLOC_CHUNK designates the maximum number of bytes that may
+ * be requested in a single call to jpeg_get_large (and jpeg_get_small for that
+ * matter, but that case should never come into play).  This macro is needed
+ * to model the 64Kb-segment-size limit of far addressing on 80x86 machines.
+ * On those machines, we expect that jconfig.h will provide a proper value.
+ * On machines with 32-bit flat address spaces, any large constant may be used.
+ *
+ * NB: jmemmgr.c expects that MAX_ALLOC_CHUNK will be representable as type
+ * size_t and will be a multiple of sizeof(align_type).
+ */
+
+#ifndef MAX_ALLOC_CHUNK		/* may be overridden in jconfig.h */
+#define MAX_ALLOC_CHUNK  1000000000L
+#endif
+
+/*
+ * This routine computes the total space still available for allocation by
+ * jpeg_get_large.  If more space than this is needed, backing store will be
+ * used.  NOTE: any memory already allocated must not be counted.
+ *
+ * There is a minimum space requirement, corresponding to the minimum
+ * feasible buffer sizes; jmemmgr.c will request that much space even if
+ * jpeg_mem_available returns zero.  The maximum space needed, enough to hold
+ * all working storage in memory, is also passed in case it is useful.
+ * Finally, the total space already allocated is passed.  If no better
+ * method is available, cinfo->mem->max_memory_to_use - already_allocated
+ * is often a suitable calculation.
+ *
+ * It is OK for jpeg_mem_available to underestimate the space available
+ * (that'll just lead to more backing-store access than is really necessary).
+ * However, an overestimate will lead to failure.  Hence it's wise to subtract
+ * a slop factor from the true available space.  5% should be enough.
+ *
+ * On machines with lots of virtual memory, any large constant may be returned.
+ * Conversely, zero may be returned to always use the minimum amount of memory.
+ */
+
+EXTERN(size_t) jpeg_mem_available JPP((j_common_ptr cinfo,
+				     size_t min_bytes_needed,
+				     size_t max_bytes_needed,
+				     size_t already_allocated));
+
+
+/*
+ * This structure holds whatever state is needed to access a single
+ * backing-store object.  The read/write/close method pointers are called
+ * by jmemmgr.c to manipulate the backing-store object; all other fields
+ * are private to the system-dependent backing store routines.
+ */
+
+#define TEMP_NAME_LENGTH   64	/* max length of a temporary file's name */
+
+
+#ifdef USE_MSDOS_MEMMGR		/* DOS-specific junk */
+
+typedef unsigned short XMSH;	/* type of extended-memory handles */
+typedef unsigned short EMSH;	/* type of expanded-memory handles */
+
+typedef union {
+  short file_handle;		/* DOS file handle if it's a temp file */
+  XMSH xms_handle;		/* handle if it's a chunk of XMS */
+  EMSH ems_handle;		/* handle if it's a chunk of EMS */
+} handle_union;
+
+#endif /* USE_MSDOS_MEMMGR */
+
+#ifdef USE_MAC_MEMMGR		/* Mac-specific junk */
+#include <Files.h>
+#endif /* USE_MAC_MEMMGR */
+
+
+typedef struct backing_store_struct * backing_store_ptr;
+
+typedef struct backing_store_struct {
+  /* Methods for reading/writing/closing this backing-store object */
+  JMETHOD(void, read_backing_store, (j_common_ptr cinfo,
+				     backing_store_ptr info,
+				     void FAR * buffer_address,
+				     long file_offset, long byte_count));
+  JMETHOD(void, write_backing_store, (j_common_ptr cinfo,
+				      backing_store_ptr info,
+				      void FAR * buffer_address,
+				      long file_offset, long byte_count));
+  JMETHOD(void, close_backing_store, (j_common_ptr cinfo,
+				      backing_store_ptr info));
+
+  /* Private fields for system-dependent backing-store management */
+#ifdef USE_MSDOS_MEMMGR
+  /* For the MS-DOS manager (jmemdos.c), we need: */
+  handle_union handle;		/* reference to backing-store storage object */
+  char temp_name[TEMP_NAME_LENGTH]; /* name if it's a file */
+#else
+#ifdef USE_MAC_MEMMGR
+  /* For the Mac manager (jmemmac.c), we need: */
+  short temp_file;		/* file reference number to temp file */
+  FSSpec tempSpec;		/* the FSSpec for the temp file */
+  char temp_name[TEMP_NAME_LENGTH]; /* name if it's a file */
+#else
+  /* For a typical implementation with temp files, we need: */
+  FILE * temp_file;		/* stdio reference to temp file */
+  char temp_name[TEMP_NAME_LENGTH]; /* name of temp file */
+#endif
+#endif
+} backing_store_info;
+
+
+/*
+ * Initial opening of a backing-store object.  This must fill in the
+ * read/write/close pointers in the object.  The read/write routines
+ * may take an error exit if the specified maximum file size is exceeded.
+ * (If jpeg_mem_available always returns a large value, this routine can
+ * just take an error exit.)
+ */
+
+EXTERN(void) jpeg_open_backing_store JPP((j_common_ptr cinfo,
+					  backing_store_ptr info,
+					  long total_bytes_needed));
+
+
+/*
+ * These routines take care of any system-dependent initialization and
+ * cleanup required.  jpeg_mem_init will be called before anything is
+ * allocated (and, therefore, nothing in cinfo is of use except the error
+ * manager pointer).  It should return a suitable default value for
+ * max_memory_to_use; this may subsequently be overridden by the surrounding
+ * application.  (Note that max_memory_to_use is only important if
+ * jpeg_mem_available chooses to consult it ... no one else will.)
+ * jpeg_mem_term may assume that all requested memory has been freed and that
+ * all opened backing-store objects have been closed.
+ */
+
+EXTERN(long) jpeg_mem_init JPP((j_common_ptr cinfo));
+EXTERN(void) jpeg_mem_term JPP((j_common_ptr cinfo));

+ 446 - 0
camerakit/src/main/cpp/libjpeg/include/jmorecfg.h

@@ -0,0 +1,446 @@
+/*
+ * jmorecfg.h
+ *
+ * Copyright (C) 1991-1997, Thomas G. Lane.
+ * Copyright (C) 2009, 2011, D. R. Commander.
+ * This file is part of the Independent JPEG Group's software.
+ * For conditions of distribution and use, see the accompanying README file.
+ *
+ * This file contains additional configuration options that customize the
+ * JPEG software for special applications or support machine-dependent
+ * optimizations.  Most users will not need to touch this file.
+ */
+
+/*
+ * When we're building for android, turn on ANDROID_RGB by default. 
+ * This is needed for components like skia which make use of the
+ * new encodings defined behind ANDROID_RBG. It's not a reasonable
+ * config to have ANDROID_RBG off.
+ */
+#ifdef ANDROID
+#ifndef ANDROID_RGB
+#define ANDROID_RGB
+#endif
+#endif
+
+/*
+ * Define BITS_IN_JSAMPLE as either
+ *   8   for 8-bit sample values (the usual setting)
+ *   12  for 12-bit sample values
+ * Only 8 and 12 are legal data precisions for lossy JPEG according to the
+ * JPEG standard, and the IJG code does not support anything else!
+ * We do not support run-time selection of data precision, sorry.
+ */
+
+#define BITS_IN_JSAMPLE  8	/* use 8 or 12 */
+
+
+/*
+ * Maximum number of components (color channels) allowed in JPEG image.
+ * To meet the letter of the JPEG spec, set this to 255.  However, darn
+ * few applications need more than 4 channels (maybe 5 for CMYK + alpha
+ * mask).  We recommend 10 as a reasonable compromise; use 4 if you are
+ * really short on memory.  (Each allowed component costs a hundred or so
+ * bytes of storage, whether actually used in an image or not.)
+ */
+
+#define MAX_COMPONENTS  10	/* maximum number of image components */
+
+
+/*
+ * Basic data types.
+ * You may need to change these if you have a machine with unusual data
+ * type sizes; for example, "char" not 8 bits, "short" not 16 bits,
+ * or "long" not 32 bits.  We don't care whether "int" is 16 or 32 bits,
+ * but it had better be at least 16.
+ */
+
+/* Representation of a single sample (pixel element value).
+ * We frequently allocate large arrays of these, so it's important to keep
+ * them small.  But if you have memory to burn and access to char or short
+ * arrays is very slow on your hardware, you might want to change these.
+ */
+
+#if BITS_IN_JSAMPLE == 8
+/* JSAMPLE should be the smallest type that will hold the values 0..255.
+ * You can use a signed char by having GETJSAMPLE mask it with 0xFF.
+ */
+
+#ifdef HAVE_UNSIGNED_CHAR
+
+typedef unsigned char JSAMPLE;
+#define GETJSAMPLE(value)  ((int) (value))
+
+#else /* not HAVE_UNSIGNED_CHAR */
+
+typedef char JSAMPLE;
+#ifdef __CHAR_UNSIGNED__
+#define GETJSAMPLE(value)  ((int) (value))
+#else
+#define GETJSAMPLE(value)  ((int) (value) & 0xFF)
+#endif /* __CHAR_UNSIGNED__ */
+
+#endif /* HAVE_UNSIGNED_CHAR */
+
+#define MAXJSAMPLE	255
+#define CENTERJSAMPLE	128
+
+#endif /* BITS_IN_JSAMPLE == 8 */
+
+
+#if BITS_IN_JSAMPLE == 12
+/* JSAMPLE should be the smallest type that will hold the values 0..4095.
+ * On nearly all machines "short" will do nicely.
+ */
+
+typedef short JSAMPLE;
+#define GETJSAMPLE(value)  ((int) (value))
+
+#define MAXJSAMPLE	4095
+#define CENTERJSAMPLE	2048
+
+#endif /* BITS_IN_JSAMPLE == 12 */
+
+
+/* Representation of a DCT frequency coefficient.
+ * This should be a signed value of at least 16 bits; "short" is usually OK.
+ * Again, we allocate large arrays of these, but you can change to int
+ * if you have memory to burn and "short" is really slow.
+ */
+
+typedef short JCOEF;
+
+
+/* Compressed datastreams are represented as arrays of JOCTET.
+ * These must be EXACTLY 8 bits wide, at least once they are written to
+ * external storage.  Note that when using the stdio data source/destination
+ * managers, this is also the data type passed to fread/fwrite.
+ */
+
+#ifdef HAVE_UNSIGNED_CHAR
+
+typedef unsigned char JOCTET;
+#define GETJOCTET(value)  (value)
+
+#else /* not HAVE_UNSIGNED_CHAR */
+
+typedef char JOCTET;
+#ifdef __CHAR_UNSIGNED__
+#define GETJOCTET(value)  (value)
+#else
+#define GETJOCTET(value)  ((value) & 0xFF)
+#endif /* __CHAR_UNSIGNED__ */
+
+#endif /* HAVE_UNSIGNED_CHAR */
+
+
+/* These typedefs are used for various table entries and so forth.
+ * They must be at least as wide as specified; but making them too big
+ * won't cost a huge amount of memory, so we don't provide special
+ * extraction code like we did for JSAMPLE.  (In other words, these
+ * typedefs live at a different point on the speed/space tradeoff curve.)
+ */
+
+/* UINT8 must hold at least the values 0..255. */
+
+#ifdef HAVE_UNSIGNED_CHAR
+typedef unsigned char UINT8;
+#else /* not HAVE_UNSIGNED_CHAR */
+#ifdef __CHAR_UNSIGNED__
+typedef char UINT8;
+#else /* not __CHAR_UNSIGNED__ */
+typedef short UINT8;
+#endif /* __CHAR_UNSIGNED__ */
+#endif /* HAVE_UNSIGNED_CHAR */
+
+/* UINT16 must hold at least the values 0..65535. */
+
+#ifdef HAVE_UNSIGNED_SHORT
+typedef unsigned short UINT16;
+#else /* not HAVE_UNSIGNED_SHORT */
+typedef unsigned int UINT16;
+#endif /* HAVE_UNSIGNED_SHORT */
+
+/* INT16 must hold at least the values -32768..32767. */
+
+#ifndef XMD_H			/* X11/xmd.h correctly defines INT16 */
+typedef short INT16;
+#endif
+
+/* INT32 must hold at least signed 32-bit values. */
+
+#ifndef XMD_H			/* X11/xmd.h correctly defines INT32 */
+typedef long INT32;
+#endif
+
+/* Datatype used for image dimensions.  The JPEG standard only supports
+ * images up to 64K*64K due to 16-bit fields in SOF markers.  Therefore
+ * "unsigned int" is sufficient on all machines.  However, if you need to
+ * handle larger images and you don't mind deviating from the spec, you
+ * can change this datatype.
+ */
+
+typedef unsigned int JDIMENSION;
+
+#define JPEG_MAX_DIMENSION  65500L  /* a tad under 64K to prevent overflows */
+
+
+/* These macros are used in all function definitions and extern declarations.
+ * You could modify them if you need to change function linkage conventions;
+ * in particular, you'll need to do that to make the library a Windows DLL.
+ * Another application is to make all functions global for use with debuggers
+ * or code profilers that require it.
+ */
+
+/* a function called through method pointers: */
+#define METHODDEF(type)		static type
+/* a function used only in its module: */
+#define LOCAL(type)		static type
+/* a function referenced thru EXTERNs: */
+#define GLOBAL(type)		type
+/* a reference to a GLOBAL function: */
+#define EXTERN(type)		extern type
+
+
+/* This macro is used to declare a "method", that is, a function pointer.
+ * We want to supply prototype parameters if the compiler can cope.
+ * Note that the arglist parameter must be parenthesized!
+ * Again, you can customize this if you need special linkage keywords.
+ */
+
+#ifdef HAVE_PROTOTYPES
+#define JMETHOD(type,methodname,arglist)  type (*methodname) arglist
+#else
+#define JMETHOD(type,methodname,arglist)  type (*methodname) ()
+#endif
+
+
+/* Here is the pseudo-keyword for declaring pointers that must be "far"
+ * on 80x86 machines.  Most of the specialized coding for 80x86 is handled
+ * by just saying "FAR *" where such a pointer is needed.  In a few places
+ * explicit coding is needed; see uses of the NEED_FAR_POINTERS symbol.
+ */
+
+#ifdef NEED_FAR_POINTERS
+#define FAR  far
+#else
+#define FAR
+#endif
+
+
+/*
+ * On a few systems, type boolean and/or its values FALSE, TRUE may appear
+ * in standard header files.  Or you may have conflicts with application-
+ * specific header files that you want to include together with these files.
+ * Defining HAVE_BOOLEAN before including jpeglib.h should make it work.
+ */
+
+#ifndef HAVE_BOOLEAN
+typedef int boolean;
+#endif
+#ifndef FALSE			/* in case these macros already exist */
+#define FALSE	0		/* values of boolean */
+#endif
+#ifndef TRUE
+#define TRUE	1
+#endif
+
+
+/*
+ * The remaining options affect code selection within the JPEG library,
+ * but they don't need to be visible to most applications using the library.
+ * To minimize application namespace pollution, the symbols won't be
+ * defined unless JPEG_INTERNALS or JPEG_INTERNAL_OPTIONS has been defined.
+ */
+
+#ifdef JPEG_INTERNALS
+#define JPEG_INTERNAL_OPTIONS
+#endif
+
+#ifdef JPEG_INTERNAL_OPTIONS
+
+
+/*
+ * These defines indicate whether to include various optional functions.
+ * Undefining some of these symbols will produce a smaller but less capable
+ * library.  Note that you can leave certain source files out of the
+ * compilation/linking process if you've #undef'd the corresponding symbols.
+ * (You may HAVE to do that if your compiler doesn't like null source files.)
+ */
+
+/* Capability options common to encoder and decoder: */
+
+#define DCT_ISLOW_SUPPORTED	/* slow but accurate integer algorithm */
+#define DCT_IFAST_SUPPORTED	/* faster, less accurate integer method */
+#define DCT_FLOAT_SUPPORTED	/* floating-point: accurate, fast on fast HW */
+
+/* Encoder capability options: */
+
+#define C_MULTISCAN_FILES_SUPPORTED /* Multiple-scan JPEG files? */
+#define C_PROGRESSIVE_SUPPORTED	    /* Progressive JPEG? (Requires MULTISCAN)*/
+#define ENTROPY_OPT_SUPPORTED	    /* Optimization of entropy coding parms? */
+/* Note: if you selected 12-bit data precision, it is dangerous to turn off
+ * ENTROPY_OPT_SUPPORTED.  The standard Huffman tables are only good for 8-bit
+ * precision, so jchuff.c normally uses entropy optimization to compute
+ * usable tables for higher precision.  If you don't want to do optimization,
+ * you'll have to supply different default Huffman tables.
+ * The exact same statements apply for progressive JPEG: the default tables
+ * don't work for progressive mode.  (This may get fixed, however.)
+ */
+#define INPUT_SMOOTHING_SUPPORTED   /* Input image smoothing option? */
+
+/* Decoder capability options: */
+
+#define D_MULTISCAN_FILES_SUPPORTED /* Multiple-scan JPEG files? */
+#define D_PROGRESSIVE_SUPPORTED	    /* Progressive JPEG? (Requires MULTISCAN)*/
+#define SAVE_MARKERS_SUPPORTED	    /* jpeg_save_markers() needed? */
+#define BLOCK_SMOOTHING_SUPPORTED   /* Block smoothing? (Progressive only) */
+#define IDCT_SCALING_SUPPORTED	    /* Output rescaling via IDCT? */
+#undef  UPSAMPLE_SCALING_SUPPORTED  /* Output rescaling at upsample stage? */
+#define UPSAMPLE_MERGING_SUPPORTED  /* Fast path for sloppy upsampling? */
+#define QUANT_1PASS_SUPPORTED	    /* 1-pass color quantization? */
+#define QUANT_2PASS_SUPPORTED	    /* 2-pass color quantization? */
+
+/* more capability options later, no doubt */
+
+
+/*
+ * Ordering of RGB data in scanlines passed to or from the application.
+ * If your application wants to deal with data in the order B,G,R, just
+ * change these macros.  You can also deal with formats such as R,G,B,X
+ * (one extra byte per pixel) by changing RGB_PIXELSIZE.  Note that changing
+ * the offsets will also change the order in which colormap data is organized.
+ * RESTRICTIONS:
+ * 1. The sample applications cjpeg,djpeg do NOT support modified RGB formats.
+ * 2. These macros only affect RGB<=>YCbCr color conversion, so they are not
+ *    useful if you are using JPEG color spaces other than YCbCr or grayscale.
+ * 3. The color quantizer modules will not behave desirably if RGB_PIXELSIZE
+ *    is not 3 (they don't understand about dummy color components!).  So you
+ *    can't use color quantization if you change that value.
+ */
+
+#define RGB_RED		0	/* Offset of Red in an RGB scanline element */
+#define RGB_GREEN	1	/* Offset of Green */
+#define RGB_BLUE	2	/* Offset of Blue */
+#define RGB_PIXELSIZE	3	/* JSAMPLEs per RGB scanline element */
+
+#ifdef ANDROID_RGB
+#define RGB_ALPHA   3   /* Offset of Alpha */
+#endif
+
+#define JPEG_NUMCS 16
+
+#define EXT_RGB_RED        0
+#define EXT_RGB_GREEN      1
+#define EXT_RGB_BLUE       2
+#define EXT_RGB_PIXELSIZE  3
+
+#define EXT_RGBX_RED       0
+#define EXT_RGBX_GREEN     1
+#define EXT_RGBX_BLUE      2
+#define EXT_RGBX_PIXELSIZE 4
+
+#define EXT_BGR_RED        2
+#define EXT_BGR_GREEN      1
+#define EXT_BGR_BLUE       0
+#define EXT_BGR_PIXELSIZE  3
+
+#define EXT_BGRX_RED       2
+#define EXT_BGRX_GREEN     1
+#define EXT_BGRX_BLUE      0
+#define EXT_BGRX_PIXELSIZE 4
+
+#define EXT_XBGR_RED       3
+#define EXT_XBGR_GREEN     2
+#define EXT_XBGR_BLUE      1
+#define EXT_XBGR_PIXELSIZE 4
+
+#define EXT_XRGB_RED       1
+#define EXT_XRGB_GREEN     2
+#define EXT_XRGB_BLUE      3
+#define EXT_XRGB_PIXELSIZE 4
+
+#ifdef ANDROID_RGB
+#define RGB_ALPHA   3   /* Offset of Alpha */
+#endif
+
+static const int rgb_red[JPEG_NUMCS] = {
+  -1, -1, RGB_RED, -1, -1, -1, EXT_RGB_RED, EXT_RGBX_RED,
+  EXT_BGR_RED, EXT_BGRX_RED, EXT_XBGR_RED, EXT_XRGB_RED,
+  EXT_RGBX_RED, EXT_BGRX_RED, EXT_XBGR_RED, EXT_XRGB_RED
+};
+
+static const int rgb_green[JPEG_NUMCS] = {
+  -1, -1, RGB_GREEN, -1, -1, -1, EXT_RGB_GREEN, EXT_RGBX_GREEN,
+  EXT_BGR_GREEN, EXT_BGRX_GREEN, EXT_XBGR_GREEN, EXT_XRGB_GREEN,
+  EXT_RGBX_GREEN, EXT_BGRX_GREEN, EXT_XBGR_GREEN, EXT_XRGB_GREEN
+};
+
+static const int rgb_blue[JPEG_NUMCS] = {
+  -1, -1, RGB_BLUE, -1, -1, -1, EXT_RGB_BLUE, EXT_RGBX_BLUE,
+  EXT_BGR_BLUE, EXT_BGRX_BLUE, EXT_XBGR_BLUE, EXT_XRGB_BLUE,
+  EXT_RGBX_BLUE, EXT_BGRX_BLUE, EXT_XBGR_BLUE, EXT_XRGB_BLUE
+};
+
+static const int rgb_pixelsize[JPEG_NUMCS] = {
+  -1, -1, RGB_PIXELSIZE, -1, -1, -1, EXT_RGB_PIXELSIZE, EXT_RGBX_PIXELSIZE,
+  EXT_BGR_PIXELSIZE, EXT_BGRX_PIXELSIZE, EXT_XBGR_PIXELSIZE, EXT_XRGB_PIXELSIZE,
+  EXT_RGBX_PIXELSIZE, EXT_BGRX_PIXELSIZE, EXT_XBGR_PIXELSIZE, EXT_XRGB_PIXELSIZE
+};
+
+
+/*
+ * Define ANDROID_RGB to enable specific optimizations for Android
+ *   JCS_RGBA_8888 support
+ *   JCS_RGB_565 support
+ *
+ */
+
+#ifdef ANDROID_RGB
+#define PACK_SHORT_565(r,g,b)  ((((r)<<8)&0xf800)|(((g)<<3)&0x7E0)|((b)>>3))
+#define PACK_TWO_PIXELS(l,r)   ((r<<16) | l)
+#define PACK_NEED_ALIGNMENT(ptr) (((int)(ptr))&3)
+#define WRITE_TWO_PIXELS(addr, pixels) do {     \
+         ((INT16*)(addr))[0] = (pixels);        \
+         ((INT16*)(addr))[1] = (pixels)>>16;    \
+    } while(0)
+#define WRITE_TWO_ALIGNED_PIXELS(addr, pixels)  ((*(INT32*)(addr)) = pixels)
+#define DITHER_565_R(r, dither) ((r) + ((dither)&0xFF))
+#define DITHER_565_G(g, dither) ((g) + (((dither)&0xFF)>>1))
+#define DITHER_565_B(b, dither) ((b) + ((dither)&0xFF))
+#endif
+
+
+/* Definitions for speed-related optimizations. */
+
+/* On some machines (notably 68000 series) "int" is 32 bits, but multiplying
+ * two 16-bit shorts is faster than multiplying two ints.  Define MULTIPLIER
+ * as short on such a machine.  MULTIPLIER must be at least 16 bits wide.
+ */
+
+#ifndef MULTIPLIER
+#ifndef WITH_SIMD
+#define MULTIPLIER  int		/* type for fastest integer multiply */
+#else
+#define MULTIPLIER short  /* prefer 16-bit with SIMD for parellelism */
+#endif
+#endif
+
+
+/* FAST_FLOAT should be either float or double, whichever is done faster
+ * by your compiler.  (Note that this type is only used in the floating point
+ * DCT routines, so it only matters if you've defined DCT_FLOAT_SUPPORTED.)
+ * Typically, float is faster in ANSI C compilers, while double is faster in
+ * pre-ANSI compilers (because they insist on converting to double anyway).
+ * The code below therefore chooses float if we have ANSI-style prototypes.
+ */
+
+#ifndef FAST_FLOAT
+#ifdef HAVE_PROTOTYPES
+#define FAST_FLOAT  float
+#else
+#define FAST_FLOAT  double
+#endif
+#endif
+
+#endif /* JPEG_INTERNAL_OPTIONS */

+ 26 - 0
camerakit/src/main/cpp/libjpeg/include/jpegcomp.h

@@ -0,0 +1,26 @@
+/*
+ * jpegcomp.h
+ *
+ * Copyright (C) 2010, D. R. Commander
+ * For conditions of distribution and use, see the accompanying README file.
+ *
+ * JPEG compatibility macros
+ * These declarations are considered internal to the JPEG library; most
+ * applications using the library shouldn't need to include this file.
+ */
+
+#if JPEG_LIB_VERSION >= 70
+#define _DCT_scaled_size DCT_h_scaled_size
+#define _min_DCT_scaled_size min_DCT_h_scaled_size
+#define _min_DCT_h_scaled_size min_DCT_h_scaled_size
+#define _min_DCT_v_scaled_size min_DCT_v_scaled_size
+#define _jpeg_width jpeg_width
+#define _jpeg_height jpeg_height
+#else
+#define _DCT_scaled_size DCT_scaled_size
+#define _min_DCT_scaled_size min_DCT_scaled_size
+#define _min_DCT_h_scaled_size min_DCT_scaled_size
+#define _min_DCT_v_scaled_size min_DCT_scaled_size
+#define _jpeg_width image_width
+#define _jpeg_height image_height
+#endif

+ 460 - 0
camerakit/src/main/cpp/libjpeg/include/jpegint.h

@@ -0,0 +1,460 @@
+/*
+ * jpegint.h
+ *
+ * Copyright (C) 1991-1997, Thomas G. Lane.
+ * Modified 1997-2009 by Guido Vollbeding.
+ * This file is part of the Independent JPEG Group's software.
+ * For conditions of distribution and use, see the accompanying README file.
+ *
+ * This file provides common declarations for the various JPEG modules.
+ * These declarations are considered internal to the JPEG library; most
+ * applications using the library shouldn't need to include this file.
+ */
+
+
+/* Declarations for both compression & decompression */
+
+typedef enum {			/* Operating modes for buffer controllers */
+	JBUF_PASS_THRU,		/* Plain stripwise operation */
+	/* Remaining modes require a full-image buffer to have been created */
+	JBUF_SAVE_SOURCE,	/* Run source subobject only, save output */
+	JBUF_CRANK_DEST,	/* Run dest subobject only, using saved data */
+	JBUF_SAVE_AND_PASS	/* Run both subobjects, save output */
+} J_BUF_MODE;
+
+/* Values of global_state field (jdapi.c has some dependencies on ordering!) */
+#define CSTATE_START	100	/* after create_compress */
+#define CSTATE_SCANNING	101	/* start_compress done, write_scanlines OK */
+#define CSTATE_RAW_OK	102	/* start_compress done, write_raw_data OK */
+#define CSTATE_WRCOEFS	103	/* jpeg_write_coefficients done */
+#define DSTATE_START	200	/* after create_decompress */
+#define DSTATE_INHEADER	201	/* reading header markers, no SOS yet */
+#define DSTATE_READY	202	/* found SOS, ready for start_decompress */
+#define DSTATE_PRELOAD	203	/* reading multiscan file in start_decompress*/
+#define DSTATE_PRESCAN	204	/* performing dummy pass for 2-pass quant */
+#define DSTATE_SCANNING	205	/* start_decompress done, read_scanlines OK */
+#define DSTATE_RAW_OK	206	/* start_decompress done, read_raw_data OK */
+#define DSTATE_BUFIMAGE	207	/* expecting jpeg_start_output */
+#define DSTATE_BUFPOST	208	/* looking for SOS/EOI in jpeg_finish_output */
+#define DSTATE_RDCOEFS	209	/* reading file in jpeg_read_coefficients */
+#define DSTATE_STOPPING	210	/* looking for EOI in jpeg_finish_decompress */
+
+
+/* Declarations for compression modules */
+
+/* Master control module */
+struct jpeg_comp_master {
+  JMETHOD(void, prepare_for_pass, (j_compress_ptr cinfo));
+  JMETHOD(void, pass_startup, (j_compress_ptr cinfo));
+  JMETHOD(void, finish_pass, (j_compress_ptr cinfo));
+
+  /* State variables made visible to other modules */
+  boolean call_pass_startup;	/* True if pass_startup must be called */
+  boolean is_last_pass;		/* True during last pass */
+};
+
+/* Main buffer control (downsampled-data buffer) */
+struct jpeg_c_main_controller {
+  JMETHOD(void, start_pass, (j_compress_ptr cinfo, J_BUF_MODE pass_mode));
+  JMETHOD(void, process_data, (j_compress_ptr cinfo,
+			       JSAMPARRAY input_buf, JDIMENSION *in_row_ctr,
+			       JDIMENSION in_rows_avail));
+};
+
+/* Compression preprocessing (downsampling input buffer control) */
+struct jpeg_c_prep_controller {
+  JMETHOD(void, start_pass, (j_compress_ptr cinfo, J_BUF_MODE pass_mode));
+  JMETHOD(void, pre_process_data, (j_compress_ptr cinfo,
+				   JSAMPARRAY input_buf,
+				   JDIMENSION *in_row_ctr,
+				   JDIMENSION in_rows_avail,
+				   JSAMPIMAGE output_buf,
+				   JDIMENSION *out_row_group_ctr,
+				   JDIMENSION out_row_groups_avail));
+};
+
+/* Coefficient buffer control */
+struct jpeg_c_coef_controller {
+  JMETHOD(void, start_pass, (j_compress_ptr cinfo, J_BUF_MODE pass_mode));
+  JMETHOD(boolean, compress_data, (j_compress_ptr cinfo,
+				   JSAMPIMAGE input_buf));
+};
+
+/* Colorspace conversion */
+struct jpeg_color_converter {
+  JMETHOD(void, start_pass, (j_compress_ptr cinfo));
+  JMETHOD(void, color_convert, (j_compress_ptr cinfo,
+				JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+				JDIMENSION output_row, int num_rows));
+};
+
+/* Downsampling */
+struct jpeg_downsampler {
+  JMETHOD(void, start_pass, (j_compress_ptr cinfo));
+  JMETHOD(void, downsample, (j_compress_ptr cinfo,
+			     JSAMPIMAGE input_buf, JDIMENSION in_row_index,
+			     JSAMPIMAGE output_buf,
+			     JDIMENSION out_row_group_index));
+
+  boolean need_context_rows;	/* TRUE if need rows above & below */
+};
+
+/* Forward DCT (also controls coefficient quantization) */
+struct jpeg_forward_dct {
+  JMETHOD(void, start_pass, (j_compress_ptr cinfo));
+  /* perhaps this should be an array??? */
+  JMETHOD(void, forward_DCT, (j_compress_ptr cinfo,
+			      jpeg_component_info * compptr,
+			      JSAMPARRAY sample_data, JBLOCKROW coef_blocks,
+			      JDIMENSION start_row, JDIMENSION start_col,
+			      JDIMENSION num_blocks));
+};
+
+/* Entropy encoding */
+struct jpeg_entropy_encoder {
+  JMETHOD(void, start_pass, (j_compress_ptr cinfo, boolean gather_statistics));
+  JMETHOD(boolean, encode_mcu, (j_compress_ptr cinfo, JBLOCKROW *MCU_data));
+  JMETHOD(void, finish_pass, (j_compress_ptr cinfo));
+};
+
+/* Marker writing */
+struct jpeg_marker_writer {
+  JMETHOD(void, write_file_header, (j_compress_ptr cinfo));
+  JMETHOD(void, write_frame_header, (j_compress_ptr cinfo));
+  JMETHOD(void, write_scan_header, (j_compress_ptr cinfo));
+  JMETHOD(void, write_file_trailer, (j_compress_ptr cinfo));
+  JMETHOD(void, write_tables_only, (j_compress_ptr cinfo));
+  /* These routines are exported to allow insertion of extra markers */
+  /* Probably only COM and APPn markers should be written this way */
+  JMETHOD(void, write_marker_header, (j_compress_ptr cinfo, int marker,
+				      unsigned int datalen));
+  JMETHOD(void, write_marker_byte, (j_compress_ptr cinfo, int val));
+};
+
+
+/* Declarations for decompression modules */
+
+/* Master control module */
+struct jpeg_decomp_master {
+  JMETHOD(void, prepare_for_output_pass, (j_decompress_ptr cinfo));
+  JMETHOD(void, finish_output_pass, (j_decompress_ptr cinfo));
+
+  /* State variables made visible to other modules */
+  boolean is_dummy_pass;	/* True during 1st pass for 2-pass quant */
+};
+
+/* Input control module */
+struct jpeg_input_controller {
+  JMETHOD(int, consume_input, (j_decompress_ptr cinfo));
+  JMETHOD(void, reset_input_controller, (j_decompress_ptr cinfo));
+  JMETHOD(void, start_input_pass, (j_decompress_ptr cinfo));
+  JMETHOD(void, finish_input_pass, (j_decompress_ptr cinfo));
+
+  /* State variables made visible to other modules */
+  boolean has_multiple_scans;	/* True if file has multiple scans */
+  boolean eoi_reached;		/* True when EOI has been consumed */
+
+#ifdef ANDROID
+  JMETHOD(int, consume_input_build_huffman_index, (j_decompress_ptr cinfo,
+                    huffman_index *index, int scan_count));
+  JMETHOD(int, consume_markers, (j_decompress_ptr cinfo,
+                    huffman_index *index, int scan_count));
+#endif
+};
+
+/* Main buffer control (downsampled-data buffer) */
+struct jpeg_d_main_controller {
+  JMETHOD(void, start_pass, (j_decompress_ptr cinfo, J_BUF_MODE pass_mode));
+  JMETHOD(void, process_data, (j_decompress_ptr cinfo,
+			       JSAMPARRAY output_buf, JDIMENSION *out_row_ctr,
+			       JDIMENSION out_rows_avail));
+};
+
+/* Coefficient buffer control */
+struct jpeg_d_coef_controller {
+  JMETHOD(void, start_input_pass, (j_decompress_ptr cinfo));
+  JMETHOD(int, consume_data, (j_decompress_ptr cinfo));
+  JMETHOD(void, start_output_pass, (j_decompress_ptr cinfo));
+  JMETHOD(int, decompress_data, (j_decompress_ptr cinfo,
+				 JSAMPIMAGE output_buf));
+
+  /* Pointer to array of coefficient virtual arrays, or NULL if none */
+  jvirt_barray_ptr *coef_arrays;
+
+#ifdef ANDROID
+  JMETHOD(int, consume_data_build_huffman_index, (j_decompress_ptr cinfo,
+                    huffman_index* index, int scan_count));
+
+ /* column number of the first and last tile, respectively */
+ int column_left_boundary;
+ int column_right_boundary;
+
+ /* column number of the first and last MCU, respectively */
+ int MCU_column_left_boundary;
+ int MCU_column_right_boundary;
+
+ /* the number of MCU columns to skip from the indexed MCU, iM,
+  * to the requested MCU boundary, rM, where iM is the MCU that we sample
+  * into our index and is the nearest one to the left of rM.
+  */
+ int MCU_columns_to_skip;
+
+#endif
+};
+
+/* Decompression postprocessing (color quantization buffer control) */
+struct jpeg_d_post_controller {
+  JMETHOD(void, start_pass, (j_decompress_ptr cinfo, J_BUF_MODE pass_mode));
+  JMETHOD(void, post_process_data, (j_decompress_ptr cinfo,
+				    JSAMPIMAGE input_buf,
+				    JDIMENSION *in_row_group_ctr,
+				    JDIMENSION in_row_groups_avail,
+				    JSAMPARRAY output_buf,
+				    JDIMENSION *out_row_ctr,
+				    JDIMENSION out_rows_avail));
+};
+
+/* Marker reading & parsing */
+struct jpeg_marker_reader {
+  JMETHOD(void, reset_marker_reader, (j_decompress_ptr cinfo));
+  /* Read markers until SOS or EOI.
+   * Returns same codes as are defined for jpeg_consume_input:
+   * JPEG_SUSPENDED, JPEG_REACHED_SOS, or JPEG_REACHED_EOI.
+   */
+  JMETHOD(int, read_markers, (j_decompress_ptr cinfo));
+  /* Read a restart marker --- exported for use by entropy decoder only */
+  jpeg_marker_parser_method read_restart_marker;
+
+  /* State of marker reader --- nominally internal, but applications
+   * supplying COM or APPn handlers might like to know the state.
+   */
+  boolean saw_SOI;		/* found SOI? */
+  boolean saw_SOF;		/* found SOF? */
+  int next_restart_num;		/* next restart number expected (0-7) */
+  unsigned int discarded_bytes;	/* # of bytes skipped looking for a marker */
+
+#ifdef ANDROID
+  JMETHOD(void, get_sos_marker_position, (j_decompress_ptr cinfo,
+                   huffman_index *index));
+ 
+  int current_sos_marker_position;
+#endif
+};
+
+/* Entropy decoding */
+struct jpeg_entropy_decoder {
+  JMETHOD(void, start_pass, (j_decompress_ptr cinfo));
+  JMETHOD(boolean, decode_mcu, (j_decompress_ptr cinfo,
+				JBLOCKROW *MCU_data));
+
+  /* This is here to share code between baseline and progressive decoders; */
+  /* other modules probably should not use it */
+  boolean insufficient_data;	/* set TRUE after emitting warning */
+
+#ifdef ANDROID
+  JMETHOD(boolean, decode_mcu_discard_coef, (j_decompress_ptr cinfo));
+  JMETHOD(void, configure_huffman_decoder, (j_decompress_ptr cinfo,
+                    huffman_offset_data offset));
+  JMETHOD(void, get_huffman_decoder_configuration, (j_decompress_ptr cinfo,
+                    huffman_offset_data *offset));
+
+  huffman_index *index;
+#endif
+};
+
+/* Inverse DCT (also performs dequantization) */
+typedef JMETHOD(void, inverse_DCT_method_ptr,
+		(j_decompress_ptr cinfo, jpeg_component_info * compptr,
+		 JCOEFPTR coef_block,
+		 JSAMPARRAY output_buf, JDIMENSION output_col));
+
+struct jpeg_inverse_dct {
+  JMETHOD(void, start_pass, (j_decompress_ptr cinfo));
+  /* It is useful to allow each component to have a separate IDCT method. */
+  inverse_DCT_method_ptr inverse_DCT[MAX_COMPONENTS];
+};
+
+/* Upsampling (note that upsampler must also call color converter) */
+struct jpeg_upsampler {
+  JMETHOD(void, start_pass, (j_decompress_ptr cinfo));
+  JMETHOD(void, upsample, (j_decompress_ptr cinfo,
+			   JSAMPIMAGE input_buf,
+			   JDIMENSION *in_row_group_ctr,
+			   JDIMENSION in_row_groups_avail,
+			   JSAMPARRAY output_buf,
+			   JDIMENSION *out_row_ctr,
+			   JDIMENSION out_rows_avail));
+
+  boolean need_context_rows;	/* TRUE if need rows above & below */
+};
+
+/* Colorspace conversion */
+struct jpeg_color_deconverter {
+  JMETHOD(void, start_pass, (j_decompress_ptr cinfo));
+  JMETHOD(void, color_convert, (j_decompress_ptr cinfo,
+				JSAMPIMAGE input_buf, JDIMENSION input_row,
+				JSAMPARRAY output_buf, int num_rows));
+};
+
+/* Color quantization or color precision reduction */
+struct jpeg_color_quantizer {
+  JMETHOD(void, start_pass, (j_decompress_ptr cinfo, boolean is_pre_scan));
+  JMETHOD(void, color_quantize, (j_decompress_ptr cinfo,
+				 JSAMPARRAY input_buf, JSAMPARRAY output_buf,
+				 int num_rows));
+  JMETHOD(void, finish_pass, (j_decompress_ptr cinfo));
+  JMETHOD(void, new_color_map, (j_decompress_ptr cinfo));
+};
+
+
+/* Miscellaneous useful macros */
+
+#undef MAX
+#define MAX(a,b)	((a) > (b) ? (a) : (b))
+#undef MIN
+#define MIN(a,b)	((a) < (b) ? (a) : (b))
+
+
+/* We assume that right shift corresponds to signed division by 2 with
+ * rounding towards minus infinity.  This is correct for typical "arithmetic
+ * shift" instructions that shift in copies of the sign bit.  But some
+ * C compilers implement >> with an unsigned shift.  For these machines you
+ * must define RIGHT_SHIFT_IS_UNSIGNED.
+ * RIGHT_SHIFT provides a proper signed right shift of an INT32 quantity.
+ * It is only applied with constant shift counts.  SHIFT_TEMPS must be
+ * included in the variables of any routine using RIGHT_SHIFT.
+ */
+
+#ifdef RIGHT_SHIFT_IS_UNSIGNED
+#define SHIFT_TEMPS	INT32 shift_temp;
+#define RIGHT_SHIFT(x,shft)  \
+	((shift_temp = (x)) < 0 ? \
+	 (shift_temp >> (shft)) | ((~((INT32) 0)) << (32-(shft))) : \
+	 (shift_temp >> (shft)))
+#else
+#define SHIFT_TEMPS
+#define RIGHT_SHIFT(x,shft)	((x) >> (shft))
+#endif
+
+
+/* Short forms of external names for systems with brain-damaged linkers. */
+
+#ifdef NEED_SHORT_EXTERNAL_NAMES
+#define jinit_compress_master	jICompress
+#define jinit_c_master_control	jICMaster
+#define jinit_c_main_controller	jICMainC
+#define jinit_c_prep_controller	jICPrepC
+#define jinit_c_coef_controller	jICCoefC
+#define jinit_color_converter	jICColor
+#define jinit_downsampler	jIDownsampler
+#define jinit_forward_dct	jIFDCT
+#define jinit_huff_encoder	jIHEncoder
+#define jinit_phuff_encoder	jIPHEncoder
+#define jinit_arith_encoder	jIAEncoder
+#define jinit_marker_writer	jIMWriter
+#define jinit_master_decompress	jIDMaster
+#define jinit_d_main_controller	jIDMainC
+#define jinit_d_coef_controller	jIDCoefC
+#define jinit_d_post_controller	jIDPostC
+#define jinit_input_controller	jIInCtlr
+#define jinit_marker_reader	jIMReader
+#define jinit_huff_decoder	jIHDecoder
+#define jinit_phuff_decoder	jIPHDecoder
+#define jinit_arith_decoder	jIADecoder
+#define jinit_inverse_dct	jIIDCT
+#define jinit_upsampler		jIUpsampler
+#define jinit_color_deconverter	jIDColor
+#define jinit_1pass_quantizer	jI1Quant
+#define jinit_2pass_quantizer	jI2Quant
+#define jinit_merged_upsampler	jIMUpsampler
+#define jinit_memory_mgr	jIMemMgr
+#define jdiv_round_up		jDivRound
+#define jround_up		jRound
+#define jcopy_sample_rows	jCopySamples
+#define jcopy_block_row		jCopyBlocks
+#define jzero_far		jZeroFar
+#define jpeg_zigzag_order	jZIGTable
+#define jpeg_natural_order	jZAGTable
+#define jpeg_aritab		jAriTab
+#endif /* NEED_SHORT_EXTERNAL_NAMES */
+
+
+/* Compression module initialization routines */
+EXTERN(void) jinit_compress_master JPP((j_compress_ptr cinfo));
+EXTERN(void) jinit_c_master_control JPP((j_compress_ptr cinfo,
+					 boolean transcode_only));
+EXTERN(void) jinit_c_main_controller JPP((j_compress_ptr cinfo,
+					  boolean need_full_buffer));
+EXTERN(void) jinit_c_prep_controller JPP((j_compress_ptr cinfo,
+					  boolean need_full_buffer));
+EXTERN(void) jinit_c_coef_controller JPP((j_compress_ptr cinfo,
+					  boolean need_full_buffer));
+EXTERN(void) jinit_color_converter JPP((j_compress_ptr cinfo));
+EXTERN(void) jinit_downsampler JPP((j_compress_ptr cinfo));
+EXTERN(void) jinit_forward_dct JPP((j_compress_ptr cinfo));
+EXTERN(void) jinit_huff_encoder JPP((j_compress_ptr cinfo));
+EXTERN(void) jinit_phuff_encoder JPP((j_compress_ptr cinfo));
+EXTERN(void) jinit_arith_encoder JPP((j_compress_ptr cinfo));
+EXTERN(void) jinit_marker_writer JPP((j_compress_ptr cinfo));
+/* Decompression module initialization routines */
+EXTERN(void) jinit_master_decompress JPP((j_decompress_ptr cinfo));
+EXTERN(void) jinit_d_main_controller JPP((j_decompress_ptr cinfo,
+					  boolean need_full_buffer));
+EXTERN(void) jinit_d_coef_controller JPP((j_decompress_ptr cinfo,
+					  boolean need_full_buffer));
+EXTERN(void) jinit_d_post_controller JPP((j_decompress_ptr cinfo,
+					  boolean need_full_buffer));
+EXTERN(void) jinit_input_controller JPP((j_decompress_ptr cinfo));
+EXTERN(void) jinit_marker_reader JPP((j_decompress_ptr cinfo));
+EXTERN(void) jinit_huff_decoder JPP((j_decompress_ptr cinfo));
+EXTERN(void) jinit_phuff_decoder JPP((j_decompress_ptr cinfo));
+EXTERN(void) jinit_arith_decoder JPP((j_decompress_ptr cinfo));
+EXTERN(void) jinit_inverse_dct JPP((j_decompress_ptr cinfo));
+EXTERN(void) jinit_upsampler JPP((j_decompress_ptr cinfo));
+EXTERN(void) jinit_color_deconverter JPP((j_decompress_ptr cinfo));
+EXTERN(void) jinit_1pass_quantizer JPP((j_decompress_ptr cinfo));
+EXTERN(void) jinit_2pass_quantizer JPP((j_decompress_ptr cinfo));
+EXTERN(void) jinit_merged_upsampler JPP((j_decompress_ptr cinfo));
+
+#ifdef ANDROID
+EXTERN(void) jinit_huff_decoder_no_data JPP((j_decompress_ptr cinfo));
+EXTERN(void) jpeg_decompress_per_scan_setup (j_decompress_ptr cinfo);
+#endif
+
+/* Memory manager initialization */
+EXTERN(void) jinit_memory_mgr JPP((j_common_ptr cinfo));
+
+/* Utility routines in jutils.c */
+EXTERN(long) jdiv_round_up JPP((long a, long b));
+EXTERN(long) jround_up JPP((long a, long b));
+EXTERN(long) jmin JPP((long a, long b));
+EXTERN(void) jcopy_sample_rows JPP((JSAMPARRAY input_array, int source_row,
+				    JSAMPARRAY output_array, int dest_row,
+				    int num_rows, JDIMENSION num_cols));
+EXTERN(void) jcopy_block_row JPP((JBLOCKROW input_row, JBLOCKROW output_row,
+				  JDIMENSION num_blocks));
+EXTERN(void) jzero_far JPP((void FAR * target, size_t bytestozero));
+EXTERN(void) jset_input_stream_position JPP((j_decompress_ptr cinfo,
+                    int offset));
+EXTERN(void) jset_input_stream_position_bit JPP((j_decompress_ptr cinfo,
+                    int byte_offset, int bit_left, INT32 buf));
+
+EXTERN(int) jget_input_stream_position JPP((j_decompress_ptr cinfo));
+
+/* Constant tables in jutils.c */
+#if 0				/* This table is not actually needed in v6a */
+extern const int jpeg_zigzag_order[]; /* natural coef order to zigzag order */
+#endif
+extern const int jpeg_natural_order[]; /* zigzag coef order to natural order */
+
+/* Arithmetic coding probability estimation tables in jaricom.c */
+extern const INT32 jpeg_aritab[];
+
+/* Suppress undefined-structure complaints if necessary. */
+
+#ifdef INCOMPLETE_TYPES_BROKEN
+#ifndef AM_MEMORY_MANAGER	/* only jmemmgr.c defines these */
+struct jvirt_sarray_control { long dummy; };
+struct jvirt_barray_control { long dummy; };
+#endif
+#endif /* INCOMPLETE_TYPES_BROKEN */

+ 1611 - 0
camerakit/src/main/cpp/libjpeg/include/jpeglib.h

@@ -0,0 +1,1611 @@
+/*
+ * jpeglib.h
+ *
+ * Copyright (C) 1991-1998, Thomas G. Lane.
+ * Modified 2002-2009 by Guido Vollbeding.
+ * Copyright (C) 2009-2011, D. R. Commander.
+ * This file is part of the Independent JPEG Group's software.
+ * For conditions of distribution and use, see the accompanying README file.
+ *
+ * This file defines the application interface for the JPEG library.
+ * Most applications using the library need only include this file,
+ * and perhaps jerror.h if they want to know the exact error codes.
+ */
+
+#ifndef JPEGLIB_H
+#define JPEGLIB_H
+
+
+#include "stdio.h"
+
+/*
+ * First we include the configuration files that record how this
+ * installation of the JPEG library is set up.  jconfig.h can be
+ * generated automatically for many systems.  jmorecfg.h contains
+ * manual configuration options that most people need not worry about.
+ */
+
+#ifndef JCONFIG_INCLUDED    /* in case jinclude.h already did */
+
+#include "jconfig.h"		/* widely used configuration options */
+
+#endif
+
+#include "jmorecfg.h"		/* seldom changed options */
+
+#ifndef ANDROID
+#ifdef __cplusplus
+#ifndef DONT_USE_EXTERN_C
+extern "C" {
+#endif
+#endif
+#endif
+
+
+/* Various constants determining the sizes of things.
+ * All of these are specified by the JPEG standard, so don't change them
+ * if you want to be compatible.
+ */
+
+#define DCTSIZE            8    /* The basic DCT block is 8x8 samples */
+#define DCTSIZE2        64    /* DCTSIZE squared; # of elements in a block */
+#define NUM_QUANT_TBLS      4    /* Quantization tables are numbered 0..3 */
+#define NUM_HUFF_TBLS       4    /* Huffman tables are numbered 0..3 */
+#define NUM_ARITH_TBLS      16    /* Arith-coding tables are numbered 0..15 */
+#define MAX_COMPS_IN_SCAN   4    /* JPEG limit on # of components in one scan */
+#define MAX_SAMP_FACTOR     4    /* JPEG limit on sampling factors */
+/* Unfortunately, some bozo at Adobe saw no reason to be bound by the standard;
+ * the PostScript DCT filter can emit files with many more than 10 blocks/MCU.
+ * If you happen to run across such a file, you can up D_MAX_BLOCKS_IN_MCU
+ * to handle it.  We even let you do this from the jconfig.h file.  However,
+ * we strongly discourage changing C_MAX_BLOCKS_IN_MCU; just because Adobe
+ * sometimes emits noncompliant files doesn't mean you should too.
+ */
+#define C_MAX_BLOCKS_IN_MCU   10 /* compressor's limit on blocks per MCU */
+#ifndef D_MAX_BLOCKS_IN_MCU
+#define D_MAX_BLOCKS_IN_MCU   10 /* decompressor's limit on blocks per MCU */
+#endif
+
+
+/* Data structures for images (arrays of samples and of DCT coefficients).
+ * On 80x86 machines, the image arrays are too big for near pointers,
+ * but the pointer arrays can fit in near memory.
+ */
+
+typedef JSAMPLE FAR *JSAMPROW;
+/* ptr to one image row of pixel samples. */
+typedef JSAMPROW *JSAMPARRAY;
+/* ptr to some rows (a 2-D sample array) */
+typedef JSAMPARRAY *JSAMPIMAGE;
+/* a 3-D sample array: top index is color */
+
+typedef JCOEF JBLOCK[DCTSIZE2];
+/* one block of coefficients */
+typedef JBLOCK FAR *JBLOCKROW;
+/* pointer to one row of coefficient blocks */
+typedef JBLOCKROW *JBLOCKARRAY;
+/* a 2-D array of coefficient blocks */
+typedef JBLOCKARRAY *JBLOCKIMAGE;
+/* a 3-D array of coefficient blocks */
+
+typedef JCOEF FAR *JCOEFPTR;    /* useful in a couple of places */
+
+
+/* Types for JPEG compression parameters and working tables. */
+
+
+/* DCT coefficient quantization tables. */
+
+typedef struct {
+    /* This array gives the coefficient quantizers in natural array order
+     * (not the zigzag order in which they are stored in a JPEG DQT marker).
+     * CAUTION: IJG versions prior to v6a kept this array in zigzag order.
+     */
+    UINT16 quantval[DCTSIZE2];    /* quantization step for each coefficient */
+    /* This field is used only during compression.  It's initialized FALSE when
+     * the table is created, and set TRUE when it's been output to the file.
+     * You could suppress output of a table by setting this to TRUE.
+     * (See jpeg_suppress_tables for an example.)
+     */
+    boolean sent_table;        /* TRUE when table has been output */
+} JQUANT_TBL;
+
+
+/* Huffman coding tables. */
+
+typedef struct {
+    /* These two fields directly represent the contents of a JPEG DHT marker */
+    UINT8 bits[17];        /* bits[k] = # of symbols with codes of */
+    /* length k bits; bits[0] is unused */
+    UINT8 huffval[256];        /* The symbols, in order of incr code length */
+    /* This field is used only during compression.  It's initialized FALSE when
+     * the table is created, and set TRUE when it's been output to the file.
+     * You could suppress output of a table by setting this to TRUE.
+     * (See jpeg_suppress_tables for an example.)
+     */
+    boolean sent_table;        /* TRUE when table has been output */
+} JHUFF_TBL;
+
+
+/* Basic info about one component (color channel). */
+
+typedef struct {
+    /* These values are fixed over the whole image. */
+    /* For compression, they must be supplied by parameter setup; */
+    /* for decompression, they are read from the SOF marker. */
+    int component_id;
+    /* identifier for this component (0..255) */
+    int component_index;
+    /* its index in SOF or cinfo->comp_info[] */
+    int h_samp_factor;
+    /* horizontal sampling factor (1..4) */
+    int v_samp_factor;
+    /* vertical sampling factor (1..4) */
+    int quant_tbl_no;        /* quantization table selector (0..3) */
+    /* These values may vary between scans. */
+    /* For compression, they must be supplied by parameter setup; */
+    /* for decompression, they are read from the SOS marker. */
+    /* The decompressor output side may not use these variables. */
+    int dc_tbl_no;
+    /* DC entropy table selector (0..3) */
+    int ac_tbl_no;        /* AC entropy table selector (0..3) */
+
+    /* Remaining fields should be treated as private by applications. */
+
+    /* These values are computed during compression or decompression startup: */
+    /* Component's size in DCT blocks.
+     * Any dummy blocks added to complete an MCU are not counted; therefore
+     * these values do not depend on whether a scan is interleaved or not.
+     */
+    JDIMENSION width_in_blocks;
+    JDIMENSION height_in_blocks;
+    /* Size of a DCT block in samples.  Always DCTSIZE for compression.
+     * For decompression this is the size of the output from one DCT block,
+     * reflecting any scaling we choose to apply during the IDCT step.
+     * Values of 1,2,4,8 are likely to be supported.  Note that different
+     * components may receive different IDCT scalings.
+     */
+#if JPEG_LIB_VERSION >= 70
+    int DCT_h_scaled_size;
+    int DCT_v_scaled_size;
+#else
+    int DCT_scaled_size;
+#endif
+    /* The downsampled dimensions are the component's actual, unpadded number
+     * of samples at the main buffer (preprocessing/compression interface), thus
+     * downsampled_width = ceil(image_width * Hi/Hmax)
+     * and similarly for height.  For decompression, IDCT scaling is included, so
+     * downsampled_width = ceil(image_width * Hi/Hmax * DCT_[h_]scaled_size/DCTSIZE)
+     */
+    JDIMENSION downsampled_width;
+    /* actual width in samples */
+    JDIMENSION downsampled_height; /* actual height in samples */
+    /* This flag is used only for decompression.  In cases where some of the
+     * components will be ignored (eg grayscale output from YCbCr image),
+     * we can skip most computations for the unused components.
+     */
+    boolean component_needed;    /* do we need the value of this component? */
+
+    /* These values are computed before starting a scan of the component. */
+    /* The decompressor output side may not use these variables. */
+    int MCU_width;
+    /* number of blocks per MCU, horizontally */
+    int MCU_height;
+    /* number of blocks per MCU, vertically */
+    int MCU_blocks;
+    /* MCU_width * MCU_height */
+    int MCU_sample_width;
+    /* MCU width in samples, MCU_width*DCT_[h_]scaled_size */
+    int last_col_width;
+    /* # of non-dummy blocks across in last MCU */
+    int last_row_height;        /* # of non-dummy blocks down in last MCU */
+
+    /* Saved quantization table for component; NULL if none yet saved.
+     * See jdinput.c comments about the need for this information.
+     * This field is currently used only for decompression.
+     */
+    JQUANT_TBL *quant_table;
+
+    /* Private per-component storage for DCT or IDCT subsystem. */
+    void *dct_table;
+} jpeg_component_info;
+
+
+/* The script for encoding a multiple-scan file is an array of these: */
+
+typedef struct {
+    int comps_in_scan;
+    /* number of components encoded in this scan */
+    int component_index[MAX_COMPS_IN_SCAN];
+    /* their SOF/comp_info[] indexes */
+    int Ss, Se;
+    /* progressive JPEG spectral selection parms */
+    int Ah, Al;            /* progressive JPEG successive approx. parms */
+} jpeg_scan_info;
+
+/* The decompressor can save APPn and COM markers in a list of these: */
+
+typedef struct jpeg_marker_struct FAR *jpeg_saved_marker_ptr;
+
+struct jpeg_marker_struct {
+    jpeg_saved_marker_ptr next;
+    /* next in list, or NULL */
+    UINT8 marker;
+    /* marker code: JPEG_COM, or JPEG_APP0+n */
+    unsigned int original_length;
+    /* # bytes of data in the file */
+    unsigned int data_length;
+    /* # bytes of data saved at data[] */
+    JOCTET FAR *data;        /* the data contained in the marker */
+    /* the marker length word is not counted in data_length or original_length */
+};
+
+/* Known color spaces. */
+
+#define JCS_EXTENSIONS 1
+#define JCS_ALPHA_EXTENSIONS 1
+
+typedef enum {
+    JCS_UNKNOWN, /* error/unspecified */
+            JCS_GRAYSCALE, /* monochrome */
+            JCS_RGB, /* red/green/blue as specified by the RGB_RED, RGB_GREEN,
+				   RGB_BLUE, and RGB_PIXELSIZE macros */
+            JCS_YCbCr, /* Y/Cb/Cr (also known as YUV) */
+            JCS_CMYK, /* C/M/Y/K */
+            JCS_YCCK, /* Y/Cb/Cr/K */
+            JCS_EXT_RGB, /* red/green/blue */
+            JCS_EXT_RGBX, /* red/green/blue/x */
+            JCS_EXT_BGR, /* blue/green/red */
+            JCS_EXT_BGRX, /* blue/green/red/x */
+            JCS_EXT_XBGR, /* x/blue/green/red */
+            JCS_EXT_XRGB,        /* x/red/green/blue */
+    /* When out_color_space it set to JCS_EXT_RGBX, JCS_EXT_BGRX,
+       JCS_EXT_XBGR, or JCS_EXT_XRGB during decompression, the X byte is
+       undefined, and in order to ensure the best performance,
+       libjpeg-turbo can set that byte to whatever value it wishes.  Use
+       the following colorspace constants to ensure that the X byte is set
+       to 0xFF, so that it can be interpreted as an opaque alpha
+       channel. */
+            JCS_EXT_RGBA, /* red/green/blue/alpha */
+            JCS_EXT_BGRA, /* blue/green/red/alpha */
+            JCS_EXT_ABGR, /* alpha/blue/green/red */
+            JCS_EXT_ARGB,        /* alpha/red/green/blue */
+#ifdef ANDROID_RGB
+    JCS_RGBA_8888, /* red/green/blue/alpha */
+            JCS_RGB_565     /* red/green/blue in 565 format */
+#endif
+} J_COLOR_SPACE;
+
+/* DCT/IDCT algorithm options. */
+
+typedef enum {
+    JDCT_ISLOW, /* slow but accurate integer algorithm */
+            JDCT_IFAST, /* faster, less accurate integer method */
+            JDCT_FLOAT        /* floating-point: accurate, fast on fast HW */
+} J_DCT_METHOD;
+
+#ifndef JDCT_DEFAULT        /* may be overridden in jconfig.h */
+#define JDCT_DEFAULT  JDCT_ISLOW
+#endif
+#ifndef JDCT_FASTEST        /* may be overridden in jconfig.h */
+#define JDCT_FASTEST  JDCT_IFAST
+#endif
+
+/* Dithering options for decompression. */
+
+typedef enum {
+    JDITHER_NONE, /* no dithering */
+            JDITHER_ORDERED, /* simple ordered dither */
+            JDITHER_FS        /* Floyd-Steinberg error diffusion dither */
+} J_DITHER_MODE;
+
+
+/* Common fields between JPEG compression and decompression master structs. */
+
+#define jpeg_common_fields \
+  struct jpeg_error_mgr * err;    /* Error handler module */\
+  struct jpeg_memory_mgr * mem;    /* Memory manager module */\
+  struct jpeg_progress_mgr * progress; /* Progress monitor, or NULL if none */\
+  void * client_data;        /* Available for use by application */\
+  boolean is_decompressor;    /* So common code can tell which is which */\
+  int global_state        /* For checking call sequence validity */
+
+/* Routines that are to be used by both halves of the library are declared
+ * to receive a pointer to this structure.  There are no actual instances of
+ * jpeg_common_struct, only of jpeg_compress_struct and jpeg_decompress_struct.
+ */
+struct jpeg_common_struct {
+    jpeg_common_fields;        /* Fields common to both master struct types */
+    /* Additional fields follow in an actual jpeg_compress_struct or
+     * jpeg_decompress_struct.  All three structs must agree on these
+     * initial fields!  (This would be a lot cleaner in C++.)
+     */
+};
+
+typedef struct jpeg_common_struct *j_common_ptr;
+typedef struct jpeg_compress_struct *j_compress_ptr;
+typedef struct jpeg_decompress_struct *j_decompress_ptr;
+
+
+/* Master record for a compression instance */
+
+struct jpeg_compress_struct {
+    jpeg_common_fields;        /* Fields shared with jpeg_decompress_struct */
+
+    /* Destination for compressed data */
+    struct jpeg_destination_mgr *dest;
+
+    /* Description of source image --- these fields must be filled in by
+     * outer application before starting compression.  in_color_space must
+     * be correct before you can even call jpeg_set_defaults().
+     */
+
+    JDIMENSION image_width;
+    /* input image width */
+    JDIMENSION image_height;
+    /* input image height */
+    int input_components;
+    /* # of color components in input image */
+    J_COLOR_SPACE in_color_space;
+    /* colorspace of input image */
+
+    double input_gamma;        /* image gamma of input image */
+
+    /* Compression parameters --- these fields must be set before calling
+     * jpeg_start_compress().  We recommend calling jpeg_set_defaults() to
+     * initialize everything to reasonable defaults, then changing anything
+     * the application specifically wants to change.  That way you won't get
+     * burnt when new parameters are added.  Also note that there are several
+     * helper routines to simplify changing parameters.
+     */
+
+#if JPEG_LIB_VERSION >= 70
+    unsigned int scale_num, scale_denom; /* fraction by which to scale image */
+
+    JDIMENSION jpeg_width;	/* scaled JPEG image width */
+    JDIMENSION jpeg_height;	/* scaled JPEG image height */
+    /* Dimensions of actual JPEG image that will be written to file,
+     * derived from input dimensions by scaling factors above.
+     * These fields are computed by jpeg_start_compress().
+     * You can also use jpeg_calc_jpeg_dimensions() to determine these values
+     * in advance of calling jpeg_start_compress().
+     */
+#endif
+
+    int data_precision;
+    /* bits of precision in image data */
+
+    int num_components;
+    /* # of color components in JPEG image */
+    J_COLOR_SPACE jpeg_color_space;
+    /* colorspace of JPEG image */
+
+    jpeg_component_info *comp_info;
+    /* comp_info[i] describes component that appears i'th in SOF */
+
+    JQUANT_TBL *quant_tbl_ptrs[NUM_QUANT_TBLS];
+#if JPEG_LIB_VERSION >= 70
+    int q_scale_factor[NUM_QUANT_TBLS];
+#endif
+    /* ptrs to coefficient quantization tables, or NULL if not defined,
+     * and corresponding scale factors (percentage, initialized 100).
+     */
+
+    JHUFF_TBL *dc_huff_tbl_ptrs[NUM_HUFF_TBLS];
+    JHUFF_TBL *ac_huff_tbl_ptrs[NUM_HUFF_TBLS];
+    /* ptrs to Huffman coding tables, or NULL if not defined */
+
+    UINT8 arith_dc_L[NUM_ARITH_TBLS];
+    /* L values for DC arith-coding tables */
+    UINT8 arith_dc_U[NUM_ARITH_TBLS];
+    /* U values for DC arith-coding tables */
+    UINT8 arith_ac_K[NUM_ARITH_TBLS];
+    /* Kx values for AC arith-coding tables */
+
+    int num_scans;
+    /* # of entries in scan_info array */
+    const jpeg_scan_info *scan_info; /* script for multi-scan file, or NULL */
+    /* The default value of scan_info is NULL, which causes a single-scan
+     * sequential JPEG file to be emitted.  To create a multi-scan file,
+     * set num_scans and scan_info to point to an array of scan definitions.
+     */
+
+    boolean raw_data_in;
+    /* TRUE=caller supplies downsampled data */
+    boolean arith_code;
+    /* TRUE=arithmetic coding, FALSE=Huffman */
+    boolean optimize_coding;
+    /* TRUE=optimize entropy encoding parms */
+    boolean CCIR601_sampling;    /* TRUE=first samples are cosited */
+#if JPEG_LIB_VERSION >= 70
+    boolean do_fancy_downsampling; /* TRUE=apply fancy downsampling */
+#endif
+    int smoothing_factor;
+    /* 1..100, or 0 for no input smoothing */
+    J_DCT_METHOD dct_method;    /* DCT algorithm selector */
+
+    /* The restart interval can be specified in absolute MCUs by setting
+     * restart_interval, or in MCU rows by setting restart_in_rows
+     * (in which case the correct restart_interval will be figured
+     * for each scan).
+     */
+    unsigned int restart_interval;
+    /* MCUs per restart, or 0 for no restart */
+    int restart_in_rows;        /* if > 0, MCU rows per restart interval */
+
+    /* Parameters controlling emission of special markers. */
+
+    boolean write_JFIF_header;
+    /* should a JFIF marker be written? */
+    UINT8 JFIF_major_version;
+    /* What to write for the JFIF version number */
+    UINT8 JFIF_minor_version;
+    /* These three values are not used by the JPEG code, merely copied */
+    /* into the JFIF APP0 marker.  density_unit can be 0 for unknown, */
+    /* 1 for dots/inch, or 2 for dots/cm.  Note that the pixel aspect */
+    /* ratio is defined by X_density/Y_density even when density_unit=0. */
+    UINT8 density_unit;
+    /* JFIF code for pixel size units */
+    UINT16 X_density;
+    /* Horizontal pixel density */
+    UINT16 Y_density;
+    /* Vertical pixel density */
+    boolean write_Adobe_marker;    /* should an Adobe marker be written? */
+
+    /* State variable: index of next scanline to be written to
+     * jpeg_write_scanlines().  Application may use this to control its
+     * processing loop, e.g., "while (next_scanline < image_height)".
+     */
+
+    JDIMENSION next_scanline;    /* 0 .. image_height-1  */
+
+    /* Remaining fields are known throughout compressor, but generally
+     * should not be touched by a surrounding application.
+     */
+
+    /*
+     * These fields are computed during compression startup
+     */
+    boolean progressive_mode;
+    /* TRUE if scan script uses progressive mode */
+    int max_h_samp_factor;
+    /* largest h_samp_factor */
+    int max_v_samp_factor;    /* largest v_samp_factor */
+
+#if JPEG_LIB_VERSION >= 70
+    int min_DCT_h_scaled_size;	/* smallest DCT_h_scaled_size of any component */
+    int min_DCT_v_scaled_size;	/* smallest DCT_v_scaled_size of any component */
+#endif
+
+    JDIMENSION total_iMCU_rows;    /* # of iMCU rows to be input to coef ctlr */
+    /* The coefficient controller receives data in units of MCU rows as defined
+     * for fully interleaved scans (whether the JPEG file is interleaved or not).
+     * There are v_samp_factor * DCTSIZE sample rows of each component in an
+     * "iMCU" (interleaved MCU) row.
+     */
+
+    /*
+     * These fields are valid during any one scan.
+     * They describe the components and MCUs actually appearing in the scan.
+     */
+    int comps_in_scan;
+    /* # of JPEG components in this scan */
+    jpeg_component_info *cur_comp_info[MAX_COMPS_IN_SCAN];
+    /* *cur_comp_info[i] describes component that appears i'th in SOS */
+
+    JDIMENSION MCUs_per_row;
+    /* # of MCUs across the image */
+    JDIMENSION MCU_rows_in_scan;
+    /* # of MCU rows in the image */
+
+    int blocks_in_MCU;
+    /* # of DCT blocks per MCU */
+    int MCU_membership[C_MAX_BLOCKS_IN_MCU];
+    /* MCU_membership[i] is index in cur_comp_info of component owning */
+    /* i'th block in an MCU */
+
+    int Ss, Se, Ah, Al;        /* progressive JPEG parameters for scan */
+
+#if JPEG_LIB_VERSION >= 80
+    int block_size;		/* the basic DCT block size: 1..16 */
+    const int * natural_order;	/* natural-order position array */
+    int lim_Se;			/* min( Se, DCTSIZE2-1 ) */
+#endif
+
+    /*
+     * Links to compression subobjects (methods and private variables of modules)
+     */
+    struct jpeg_comp_master *master;
+    struct jpeg_c_main_controller *main;
+    struct jpeg_c_prep_controller *prep;
+    struct jpeg_c_coef_controller *coef;
+    struct jpeg_marker_writer *marker;
+    struct jpeg_color_converter *cconvert;
+    struct jpeg_downsampler *downsample;
+    struct jpeg_forward_dct *fdct;
+    struct jpeg_entropy_encoder *entropy;
+    jpeg_scan_info *script_space;
+    /* workspace for jpeg_simple_progression */
+    int script_space_size;
+};
+
+
+/* Master record for a decompression instance */
+
+struct jpeg_decompress_struct {
+    jpeg_common_fields;        /* Fields shared with jpeg_compress_struct */
+
+    /* Source of compressed data */
+    struct jpeg_source_mgr *src;
+
+    /* Basic description of image --- filled in by jpeg_read_header(). */
+    /* Application may inspect these values to decide how to process image. */
+
+    JDIMENSION original_image_width;
+    /* nominal image width (from SOF marker) */
+    JDIMENSION image_width;
+    /* nominal image width (from SOF marker) */
+    JDIMENSION image_height;
+    /* nominal image height */
+    int num_components;
+    /* # of color components in JPEG image */
+    J_COLOR_SPACE jpeg_color_space; /* colorspace of JPEG image */
+
+    /* Decompression processing parameters --- these fields must be set before
+     * calling jpeg_start_decompress().  Note that jpeg_read_header() initializes
+     * them to default values.
+     */
+
+    J_COLOR_SPACE out_color_space;
+    /* colorspace for output */
+
+    unsigned int scale_num, scale_denom;
+    /* fraction by which to scale image */
+
+    double output_gamma;
+    /* image gamma wanted in output */
+
+    boolean buffered_image;
+    /* TRUE=multiple output passes */
+    boolean raw_data_out;
+    /* TRUE=downsampled data wanted */
+
+    J_DCT_METHOD dct_method;
+    /* IDCT algorithm selector */
+    boolean do_fancy_upsampling;
+    /* TRUE=apply fancy upsampling */
+    boolean do_block_smoothing;
+    /* TRUE=apply interblock smoothing */
+
+    boolean quantize_colors;    /* TRUE=colormapped output wanted */
+    /* the following are ignored if not quantize_colors: */
+    J_DITHER_MODE dither_mode;
+    /* type of color dithering to use */
+    boolean two_pass_quantize;
+    /* TRUE=use two-pass color quantization */
+    int desired_number_of_colors;    /* max # colors to use in created colormap */
+    /* these are significant only in buffered-image mode: */
+    boolean enable_1pass_quant;
+    /* enable future use of 1-pass quantizer */
+    boolean enable_external_quant;
+    /* enable future use of external colormap */
+    boolean enable_2pass_quant;    /* enable future use of 2-pass quantizer */
+
+    /* Description of actual output image that will be returned to application.
+     * These fields are computed by jpeg_start_decompress().
+     * You can also use jpeg_calc_output_dimensions() to determine these values
+     * in advance of calling jpeg_start_decompress().
+     */
+
+    JDIMENSION output_width;
+    /* scaled image width */
+    JDIMENSION output_height;
+    /* scaled image height */
+    int out_color_components;
+    /* # of color components in out_color_space */
+    int output_components;    /* # of color components returned */
+    /* output_components is 1 (a colormap index) when quantizing colors;
+     * otherwise it equals out_color_components.
+     */
+    int rec_outbuf_height;    /* min recommended height of scanline buffer */
+    /* If the buffer passed to jpeg_read_scanlines() is less than this many rows
+     * high, space and time will be wasted due to unnecessary data copying.
+     * Usually rec_outbuf_height will be 1 or 2, at most 4.
+     */
+
+    /* When quantizing colors, the output colormap is described by these fields.
+     * The application can supply a colormap by setting colormap non-NULL before
+     * calling jpeg_start_decompress; otherwise a colormap is created during
+     * jpeg_start_decompress or jpeg_start_output.
+     * The map has out_color_components rows and actual_number_of_colors columns.
+     */
+    int actual_number_of_colors;
+    /* number of entries in use */
+    JSAMPARRAY colormap;        /* The color map as a 2-D pixel array */
+
+    /* State variables: these variables indicate the progress of decompression.
+     * The application may examine these but must not modify them.
+     */
+
+    /* Row index of next scanline to be read from jpeg_read_scanlines().
+     * Application may use this to control its processing loop, e.g.,
+     * "while (output_scanline < output_height)".
+     */
+    JDIMENSION output_scanline;    /* 0 .. output_height-1  */
+
+    /* Current input scan number and number of iMCU rows completed in scan.
+     * These indicate the progress of the decompressor input side.
+     */
+    int input_scan_number;
+    /* Number of SOS markers seen so far */
+    JDIMENSION input_iMCU_row;    /* Number of iMCU rows completed */
+
+    /* The "output scan number" is the notional scan being displayed by the
+     * output side.  The decompressor will not allow output scan/row number
+     * to get ahead of input scan/row, but it can fall arbitrarily far behind.
+     */
+    int output_scan_number;
+    /* Nominal scan number being displayed */
+    JDIMENSION output_iMCU_row;    /* Number of iMCU rows read */
+
+    /* Current progression status.  coef_bits[c][i] indicates the precision
+     * with which component c's DCT coefficient i (in zigzag order) is known.
+     * It is -1 when no data has yet been received, otherwise it is the point
+     * transform (shift) value for the most recent scan of the coefficient
+     * (thus, 0 at completion of the progression).
+     * This pointer is NULL when reading a non-progressive file.
+     */
+    int (*coef_bits)[DCTSIZE2];    /* -1 or current Al value for each coef */
+
+    /* Internal JPEG parameters --- the application usually need not look at
+     * these fields.  Note that the decompressor output side may not use
+     * any parameters that can change between scans.
+     */
+
+    /* Quantization and Huffman tables are carried forward across input
+     * datastreams when processing abbreviated JPEG datastreams.
+     */
+
+    JQUANT_TBL *quant_tbl_ptrs[NUM_QUANT_TBLS];
+    /* ptrs to coefficient quantization tables, or NULL if not defined */
+
+    JHUFF_TBL *dc_huff_tbl_ptrs[NUM_HUFF_TBLS];
+    JHUFF_TBL *ac_huff_tbl_ptrs[NUM_HUFF_TBLS];
+    /* ptrs to Huffman coding tables, or NULL if not defined */
+
+    /* These parameters are never carried across datastreams, since they
+     * are given in SOF/SOS markers or defined to be reset by SOI.
+     */
+
+    int data_precision;
+    /* bits of precision in image data */
+
+    jpeg_component_info *comp_info;
+    /* comp_info[i] describes component that appears i'th in SOF */
+
+#if JPEG_LIB_VERSION >= 80
+    boolean is_baseline;		/* TRUE if Baseline SOF0 encountered */
+#endif
+#ifdef ANDROID
+    boolean tile_decode;          /* TRUE if using tile based decoding */
+#endif
+    boolean progressive_mode;
+    /* TRUE if SOFn specifies progressive mode */
+    boolean arith_code;
+    /* TRUE=arithmetic coding, FALSE=Huffman */
+
+    UINT8 arith_dc_L[NUM_ARITH_TBLS];
+    /* L values for DC arith-coding tables */
+    UINT8 arith_dc_U[NUM_ARITH_TBLS];
+    /* U values for DC arith-coding tables */
+    UINT8 arith_ac_K[NUM_ARITH_TBLS];
+    /* Kx values for AC arith-coding tables */
+
+    unsigned int restart_interval; /* MCUs per restart interval, or 0 for no restart */
+
+    /* These fields record data obtained from optional markers recognized by
+     * the JPEG library.
+     */
+    boolean saw_JFIF_marker;    /* TRUE iff a JFIF APP0 marker was found */
+    /* Data copied from JFIF marker; only valid if saw_JFIF_marker is TRUE: */
+    UINT8 JFIF_major_version;
+    /* JFIF version number */
+    UINT8 JFIF_minor_version;
+    UINT8 density_unit;
+    /* JFIF code for pixel size units */
+    UINT16 X_density;
+    /* Horizontal pixel density */
+    UINT16 Y_density;
+    /* Vertical pixel density */
+    boolean saw_Adobe_marker;
+    /* TRUE iff an Adobe APP14 marker was found */
+    UINT8 Adobe_transform;
+    /* Color transform code from Adobe marker */
+
+    boolean CCIR601_sampling;    /* TRUE=first samples are cosited */
+
+    /* Aside from the specific data retained from APPn markers known to the
+     * library, the uninterpreted contents of any or all APPn and COM markers
+     * can be saved in a list for examination by the application.
+     */
+    jpeg_saved_marker_ptr marker_list; /* Head of list of saved markers */
+
+    /* Remaining fields are known throughout decompressor, but generally
+     * should not be touched by a surrounding application.
+     */
+
+    /*
+     * These fields are computed during decompression startup
+     */
+    int max_h_samp_factor;
+    /* largest h_samp_factor */
+    int max_v_samp_factor;    /* largest v_samp_factor */
+
+#if JPEG_LIB_VERSION >= 70
+    int min_DCT_h_scaled_size;	/* smallest DCT_h_scaled_size of any component */
+    int min_DCT_v_scaled_size;	/* smallest DCT_v_scaled_size of any component */
+#else
+    int min_DCT_scaled_size;    /* smallest DCT_scaled_size of any component */
+#endif
+
+    JDIMENSION total_iMCU_rows;    /* # of iMCU rows in image */
+    /* The coefficient controller's input and output progress is measured in
+     * units of "iMCU" (interleaved MCU) rows.  These are the same as MCU rows
+     * in fully interleaved JPEG scans, but are used whether the scan is
+     * interleaved or not.  We define an iMCU row as v_samp_factor DCT block
+     * rows of each component.  Therefore, the IDCT output contains
+     * v_samp_factor*DCT_[v_]scaled_size sample rows of a component per iMCU row.
+     */
+
+    JSAMPLE *sample_range_limit; /* table for fast range-limiting */
+
+    /*
+     * These fields are valid during any one scan.
+     * They describe the components and MCUs actually appearing in the scan.
+     * Note that the decompressor output side must not use these fields.
+     */
+    int comps_in_scan;
+    /* # of JPEG components in this scan */
+    jpeg_component_info *cur_comp_info[MAX_COMPS_IN_SCAN];
+    /* *cur_comp_info[i] describes component that appears i'th in SOS */
+
+    JDIMENSION MCUs_per_row;
+    /* # of MCUs across the image */
+    JDIMENSION MCU_rows_in_scan;
+    /* # of MCU rows in the image */
+
+    int blocks_in_MCU;
+    /* # of DCT blocks per MCU */
+    int MCU_membership[D_MAX_BLOCKS_IN_MCU];
+    /* MCU_membership[i] is index in cur_comp_info of component owning */
+    /* i'th block in an MCU */
+
+    int Ss, Se, Ah, Al;        /* progressive JPEG parameters for scan */
+
+#if JPEG_LIB_VERSION >= 80
+    /* These fields are derived from Se of first SOS marker.
+     */
+    int block_size;		/* the basic DCT block size: 1..16 */
+    const int * natural_order; /* natural-order position array for entropy decode */
+    int lim_Se;			/* min( Se, DCTSIZE2-1 ) for entropy decode */
+#endif
+
+    /* This field is shared between entropy decoder and marker parser.
+     * It is either zero or the code of a JPEG marker that has been
+     * read from the data source, but has not yet been processed.
+     */
+    int unread_marker;
+
+    /*
+     * Links to decompression subobjects (methods, private variables of modules)
+     */
+    struct jpeg_decomp_master *master;
+    struct jpeg_d_main_controller *main;
+    struct jpeg_d_coef_controller *coef;
+    struct jpeg_d_post_controller *post;
+    struct jpeg_input_controller *inputctl;
+    struct jpeg_marker_reader *marker;
+    struct jpeg_entropy_decoder *entropy;
+    struct jpeg_inverse_dct *idct;
+    struct jpeg_upsampler *upsample;
+    struct jpeg_color_deconverter *cconvert;
+    struct jpeg_color_quantizer *cquantize;
+};
+
+
+typedef struct {
+
+    // |--- byte_offset ---|- bit_left -|
+    //  \------ 27 -------/ \---- 5 ----/
+    unsigned int bitstream_offset;
+    short prev_dc[3];
+
+    // remaining EOBs in EOBRUN
+    unsigned short EOBRUN;
+
+    // save the decoder current bit buffer, entropy->bitstate.get_buffer.
+    INT32 get_buffer;
+
+    // save the restart info.
+    unsigned short restarts_to_go;
+    unsigned char next_restart_num;
+} huffman_offset_data;
+
+typedef struct {
+
+    // The header starting position of this scan
+    unsigned int bitstream_offset;
+
+    // Number of components in this scan
+    int comps_in_scan;
+
+    // Number of MCUs in each row
+    int MCUs_per_row;
+    int MCU_rows_per_iMCU_row;
+
+    // The last MCU position and its dc value in this scan
+    huffman_offset_data prev_MCU_offset;
+
+    huffman_offset_data **offset;
+} huffman_scan_header;
+
+#define DEFAULT_MCU_SAMPLE_SIZE 16
+
+typedef struct {
+
+    // The number of MCUs that we sample each time as an index point
+    int MCU_sample_size;
+
+    // Number of scan in this image
+    int scan_count;
+
+    // Number of iMCUs rows in this image
+    int total_iMCU_rows;
+
+    // Memory used by scan struct
+    size_t mem_used;
+    huffman_scan_header *scan;
+} huffman_index;
+
+
+/* "Object" declarations for JPEG modules that may be supplied or called
+ * directly by the surrounding application.
+ * As with all objects in the JPEG library, these structs only define the
+ * publicly visible methods and state variables of a module.  Additional
+ * private fields may exist after the public ones.
+ */
+
+
+/* Error handler object */
+
+struct jpeg_error_mgr {
+    /* Error exit handler: does not return to caller */
+    JMETHOD(void, error_exit, (j_common_ptr
+            cinfo));
+    /* Conditionally emit a trace or warning message */
+    JMETHOD(void, emit_message, (j_common_ptr
+            cinfo,
+            int msg_level));
+    /* Routine that actually outputs a trace or error message */
+    JMETHOD(void, output_message, (j_common_ptr
+            cinfo));
+    /* Format a message string for the most recent JPEG error or message */
+    JMETHOD(void, format_message, (j_common_ptr
+            cinfo,
+            char *buffer));
+
+#define JMSG_LENGTH_MAX  200    /* recommended size of format_message buffer */
+    /* Reset error state variables at start of a new image */
+    JMETHOD(void, reset_error_mgr, (j_common_ptr
+            cinfo));
+
+    /* The message ID code and any parameters are saved here.
+     * A message can have one string parameter or up to 8 int parameters.
+     */
+    int msg_code;
+#define JMSG_STR_PARM_MAX  80
+    union {
+        int i[8];
+        char s[JMSG_STR_PARM_MAX];
+    } msg_parm;
+
+    /* Standard state variables for error facility */
+
+    int trace_level;        /* max msg_level that will be displayed */
+
+    /* For recoverable corrupt-data errors, we emit a warning message,
+     * but keep going unless emit_message chooses to abort.  emit_message
+     * should count warnings in num_warnings.  The surrounding application
+     * can check for bad data by seeing if num_warnings is nonzero at the
+     * end of processing.
+     */
+    long num_warnings;        /* number of corrupt-data warnings */
+
+    /* These fields point to the table(s) of error message strings.
+     * An application can change the table pointer to switch to a different
+     * message list (typically, to change the language in which errors are
+     * reported).  Some applications may wish to add additional error codes
+     * that will be handled by the JPEG library error mechanism; the second
+     * table pointer is used for this purpose.
+     *
+     * First table includes all errors generated by JPEG library itself.
+     * Error code 0 is reserved for a "no such error string" message.
+     */
+    const char *const *jpeg_message_table;
+    /* Library errors */
+    int last_jpeg_message;    /* Table contains strings 0..last_jpeg_message */
+    /* Second table can be added by application (see cjpeg/djpeg for example).
+     * It contains strings numbered first_addon_message..last_addon_message.
+     */
+    const char *const *addon_message_table;
+    /* Non-library errors */
+    int first_addon_message;
+    /* code for first string in addon table */
+    int last_addon_message;    /* code for last string in addon table */
+};
+
+
+/* Progress monitor object */
+
+struct jpeg_progress_mgr {
+    JMETHOD(void, progress_monitor, (j_common_ptr
+            cinfo));
+
+    long pass_counter;
+    /* work units completed in this pass */
+    long pass_limit;
+    /* total number of work units in this pass */
+    int completed_passes;
+    /* passes completed so far */
+    int total_passes;        /* total number of passes expected */
+};
+
+
+/* Data destination object for compression */
+
+struct jpeg_destination_mgr {
+    JOCTET *next_output_byte;
+    /* => next byte to write in buffer */
+    size_t free_in_buffer;    /* # of byte spaces remaining in buffer */
+
+    JMETHOD(void, init_destination, (j_compress_ptr
+            cinfo));
+
+    JMETHOD(boolean, empty_output_buffer, (j_compress_ptr
+            cinfo));
+
+    JMETHOD(void, term_destination, (j_compress_ptr
+            cinfo));
+};
+
+
+/* Data source object for decompression */
+
+struct jpeg_source_mgr {
+    const JOCTET *next_input_byte;
+    /* => next byte to read from buffer */
+    size_t bytes_in_buffer;    /* # of bytes remaining in buffer */
+#ifdef ANDROID
+    const JOCTET *start_input_byte;
+    /* => first byte to read from input */
+    size_t current_offset; /* current readed input offset */
+#endif
+
+    JMETHOD(void, init_source, (j_decompress_ptr
+            cinfo));
+
+    JMETHOD(boolean, fill_input_buffer, (j_decompress_ptr
+            cinfo));
+
+    JMETHOD(void, skip_input_data, (j_decompress_ptr
+            cinfo,
+            long num_bytes));
+
+    JMETHOD(boolean, resync_to_restart, (j_decompress_ptr
+            cinfo,
+            int desired));
+
+    JMETHOD(void, term_source, (j_decompress_ptr
+            cinfo));
+
+#ifdef ANDROID
+
+    JMETHOD(boolean, seek_input_data, (j_decompress_ptr
+            cinfo,
+            long byte_offset));
+
+#endif
+};
+
+
+/* Memory manager object.
+ * Allocates "small" objects (a few K total), "large" objects (tens of K),
+ * and "really big" objects (virtual arrays with backing store if needed).
+ * The memory manager does not allow individual objects to be freed; rather,
+ * each created object is assigned to a pool, and whole pools can be freed
+ * at once.  This is faster and more convenient than remembering exactly what
+ * to free, especially where malloc()/free() are not too speedy.
+ * NB: alloc routines never return NULL.  They exit to error_exit if not
+ * successful.
+ */
+
+#define JPOOL_PERMANENT    0    /* lasts until master record is destroyed */
+#define JPOOL_IMAGE    1    /* lasts until done with image/datastream */
+#define JPOOL_NUMPOOLS    2
+
+typedef struct jvirt_sarray_control *jvirt_sarray_ptr;
+typedef struct jvirt_barray_control *jvirt_barray_ptr;
+
+
+struct jpeg_memory_mgr {
+    /* Method pointers */
+    JMETHOD(void *, alloc_small, (j_common_ptr
+            cinfo,
+            int pool_id,
+            size_t sizeofobject));
+
+    JMETHOD(void FAR
+                    *, alloc_large, (j_common_ptr
+                    cinfo,
+                    int pool_id,
+                    size_t sizeofobject));
+
+    JMETHOD(JSAMPARRAY, alloc_sarray, (j_common_ptr
+            cinfo,
+            int pool_id,
+            JDIMENSION samplesperrow,
+            JDIMENSION numrows));
+
+    JMETHOD(JBLOCKARRAY, alloc_barray, (j_common_ptr
+            cinfo,
+            int pool_id,
+            JDIMENSION blocksperrow,
+            JDIMENSION numrows));
+
+    JMETHOD(jvirt_sarray_ptr, request_virt_sarray, (j_common_ptr
+            cinfo,
+            int pool_id,
+            boolean pre_zero,
+            JDIMENSION samplesperrow,
+            JDIMENSION numrows,
+            JDIMENSION maxaccess));
+
+    JMETHOD(jvirt_barray_ptr, request_virt_barray, (j_common_ptr
+            cinfo,
+            int pool_id,
+            boolean pre_zero,
+            JDIMENSION blocksperrow,
+            JDIMENSION numrows,
+            JDIMENSION maxaccess));
+
+    JMETHOD(void, realize_virt_arrays, (j_common_ptr
+            cinfo));
+
+    JMETHOD(JSAMPARRAY, access_virt_sarray, (j_common_ptr
+            cinfo,
+                    jvirt_sarray_ptr
+            ptr,
+                    JDIMENSION
+            start_row,
+                    JDIMENSION
+            num_rows,
+                    boolean
+            writable));
+
+    JMETHOD(JBLOCKARRAY, access_virt_barray, (j_common_ptr
+            cinfo,
+                    jvirt_barray_ptr
+            ptr,
+                    JDIMENSION
+            start_row,
+                    JDIMENSION
+            num_rows,
+                    boolean
+            writable));
+
+    JMETHOD(void, free_pool, (j_common_ptr
+            cinfo,
+            int pool_id));
+
+    JMETHOD(void, self_destruct, (j_common_ptr
+            cinfo));
+
+    /* Limit on memory allocation for this JPEG object.  (Note that this is
+     * merely advisory, not a guaranteed maximum; it only affects the space
+     * used for virtual-array buffers.)  May be changed by outer application
+     * after creating the JPEG object.
+     */
+    long max_memory_to_use;
+
+    /* Maximum allocation request accepted by alloc_large. */
+    long max_alloc_chunk;
+};
+
+
+/* Routine signature for application-supplied marker processing methods.
+ * Need not pass marker code since it is stored in cinfo->unread_marker.
+ */
+typedef JMETHOD(boolean, jpeg_marker_parser_method, (j_decompress_ptr
+        cinfo));
+
+
+/* Declarations for routines called by application.
+ * The JPP macro hides prototype parameters from compilers that can't cope.
+ * Note JPP requires double parentheses.
+ */
+
+#ifdef HAVE_PROTOTYPES
+#define JPP(arglist)    arglist
+#else
+#define JPP(arglist)	()
+#endif
+
+
+/* Short forms of external names for systems with brain-damaged linkers.
+ * We shorten external names to be unique in the first six letters, which
+ * is good enough for all known systems.
+ * (If your compiler itself needs names to be unique in less than 15 
+ * characters, you are out of luck.  Get a better compiler.)
+ */
+
+#ifdef NEED_SHORT_EXTERNAL_NAMES
+#define jpeg_std_error		jStdError
+#define jpeg_CreateCompress	jCreaCompress
+#define jpeg_CreateDecompress	jCreaDecompress
+#define jpeg_destroy_compress	jDestCompress
+#define jpeg_destroy_decompress	jDestDecompress
+#define jpeg_stdio_dest		jStdDest
+#define jpeg_stdio_src		jStdSrc
+#if JPEG_LIB_VERSION >= 80
+#define jpeg_mem_dest		jMemDest
+#define jpeg_mem_src		jMemSrc
+#endif
+#define jpeg_set_defaults	jSetDefaults
+#define jpeg_set_colorspace	jSetColorspace
+#define jpeg_default_colorspace	jDefColorspace
+#define jpeg_set_quality	jSetQuality
+#define jpeg_set_linear_quality	jSetLQuality
+#if JPEG_LIB_VERSION >= 70
+#define jpeg_default_qtables	jDefQTables
+#endif
+#define jpeg_add_quant_table	jAddQuantTable
+#define jpeg_quality_scaling	jQualityScaling
+#define jpeg_simple_progression	jSimProgress
+#define jpeg_suppress_tables	jSuppressTables
+#define jpeg_alloc_quant_table	jAlcQTable
+#define jpeg_alloc_huff_table	jAlcHTable
+#define jpeg_start_compress	jStrtCompress
+#define jpeg_write_scanlines	jWrtScanlines
+#define jpeg_finish_compress	jFinCompress
+#if JPEG_LIB_VERSION >= 70
+#define jpeg_calc_jpeg_dimensions	jCjpegDimensions
+#endif
+#define jpeg_write_raw_data	jWrtRawData
+#define jpeg_write_marker	jWrtMarker
+#define jpeg_write_m_header	jWrtMHeader
+#define jpeg_write_m_byte	jWrtMByte
+#define jpeg_write_tables	jWrtTables
+#define jpeg_read_header	jReadHeader
+#define jpeg_start_decompress	jStrtDecompress
+#define jpeg_read_scanlines	jReadScanlines
+#define jpeg_finish_decompress	jFinDecompress
+#define jpeg_read_raw_data	jReadRawData
+#define jpeg_has_multiple_scans	jHasMultScn
+#define jpeg_start_output	jStrtOutput
+#define jpeg_finish_output	jFinOutput
+#define jpeg_input_complete	jInComplete
+#define jpeg_new_colormap	jNewCMap
+#define jpeg_consume_input	jConsumeInput
+#if JPEG_LIB_VERSION >= 80
+#define jpeg_core_output_dimensions	jCoreDimensions
+#endif
+#define jpeg_calc_output_dimensions	jCalcDimensions
+#define jpeg_save_markers	jSaveMarkers
+#define jpeg_set_marker_processor	jSetMarker
+#define jpeg_read_coefficients	jReadCoefs
+#define jpeg_write_coefficients	jWrtCoefs
+#define jpeg_copy_critical_parameters	jCopyCrit
+#define jpeg_abort_compress	jAbrtCompress
+#define jpeg_abort_decompress	jAbrtDecompress
+#define jpeg_abort		jAbort
+#define jpeg_destroy		jDestroy
+#define jpeg_resync_to_restart	jResyncRestart
+#endif /* NEED_SHORT_EXTERNAL_NAMES */
+
+
+/* Default error-management setup */
+EXTERN(struct jpeg_error_mgr *)jpeg_std_error
+        JPP((struct jpeg_error_mgr * err));
+
+/* Initialization of JPEG compression objects.
+ * jpeg_create_compress() and jpeg_create_decompress() are the exported
+ * names that applications should call.  These expand to calls on
+ * jpeg_CreateCompress and jpeg_CreateDecompress with additional information
+ * passed for version mismatch checking.
+ * NB: you must set up the error-manager BEFORE calling jpeg_create_xxx.
+ */
+#define jpeg_create_compress(cinfo) \
+    jpeg_CreateCompress((cinfo), JPEG_LIB_VERSION, \
+            (size_t) sizeof(struct jpeg_compress_struct))
+#define jpeg_create_decompress(cinfo) \
+    jpeg_CreateDecompress((cinfo), JPEG_LIB_VERSION, \
+              (size_t) sizeof(struct jpeg_decompress_struct))
+
+EXTERN(void) jpeg_CreateCompress JPP((j_compress_ptr
+                                             cinfo,
+                                             int version, size_t
+                                             structsize));
+
+EXTERN(void) jpeg_CreateDecompress JPP((j_decompress_ptr
+                                               cinfo,
+                                               int version, size_t
+                                               structsize));
+/* Destruction of JPEG compression objects */
+EXTERN(void) jpeg_destroy_compress JPP((j_compress_ptr
+                                               cinfo));
+
+EXTERN(void) jpeg_destroy_decompress JPP((j_decompress_ptr
+                                                 cinfo));
+
+/* Standard data source and destination managers: stdio streams. */
+/* Caller is responsible for opening the file before and closing after. */
+EXTERN(void) jpeg_stdio_dest JPP((j_compress_ptr
+                                         cinfo, FILE * outfile));
+
+EXTERN(void) jpeg_stdio_src JPP((j_decompress_ptr
+                                        cinfo, FILE * infile));
+
+#if JPEG_LIB_VERSION >= 80
+/* Data source and destination managers: memory buffers. */
+EXTERN(void) jpeg_mem_dest JPP((j_compress_ptr cinfo,
+                   unsigned char ** outbuffer,
+                   unsigned long * outsize));
+EXTERN(void) jpeg_mem_src JPP((j_decompress_ptr cinfo,
+                  unsigned char * inbuffer,
+                  unsigned long insize));
+#endif
+
+/* Default parameter setup for compression */
+EXTERN(void) jpeg_set_defaults JPP((j_compress_ptr
+                                           cinfo));
+/* Compression parameter setup aids */
+EXTERN(void) jpeg_set_colorspace JPP((j_compress_ptr
+                                             cinfo,
+                                                     J_COLOR_SPACE
+                                             colorspace));
+
+EXTERN(void) jpeg_default_colorspace JPP((j_compress_ptr
+                                                 cinfo));
+
+EXTERN(void) jpeg_set_quality JPP((j_compress_ptr
+                                          cinfo,
+                                          int quality,
+                                          boolean force_baseline));
+
+EXTERN(void) jpeg_set_linear_quality JPP((j_compress_ptr
+                                                 cinfo,
+                                                 int scale_factor,
+                                                 boolean force_baseline));
+
+#if JPEG_LIB_VERSION >= 70
+EXTERN(void) jpeg_default_qtables JPP((j_compress_ptr cinfo,
+                       boolean force_baseline));
+#endif
+
+EXTERN(void) jpeg_add_quant_table JPP((j_compress_ptr
+                                              cinfo,
+                                              int which_tbl,
+                                              const unsigned int *basic_table,
+                                              int scale_factor,
+                                              boolean force_baseline));
+
+EXTERN(int) jpeg_quality_scaling JPP((int quality));
+
+EXTERN(void) jpeg_simple_progression JPP((j_compress_ptr
+                                                 cinfo));
+
+EXTERN(void) jpeg_suppress_tables JPP((j_compress_ptr
+                                              cinfo,
+                                                      boolean
+                                              suppress));
+
+EXTERN(JQUANT_TBL *)jpeg_alloc_quant_table JPP((j_common_ptr
+                                                       cinfo));
+
+EXTERN(JHUFF_TBL *)jpeg_alloc_huff_table JPP((j_common_ptr
+                                                     cinfo));
+
+/* Main entry points for compression */
+EXTERN(void) jpeg_start_compress JPP((j_compress_ptr
+                                             cinfo,
+                                                     boolean
+                                             write_all_tables));
+
+EXTERN(JDIMENSION) jpeg_write_scanlines JPP((j_compress_ptr
+                                                    cinfo,
+                                                            JSAMPARRAY
+                                                    scanlines,
+                                                            JDIMENSION
+                                                    num_lines));
+
+EXTERN(void) jpeg_finish_compress JPP((j_compress_ptr
+                                              cinfo));
+
+#if JPEG_LIB_VERSION >= 70
+/* Precalculate JPEG dimensions for current compression parameters. */
+EXTERN(void) jpeg_calc_jpeg_dimensions JPP((j_compress_ptr cinfo));
+#endif
+
+/* Replaces jpeg_write_scanlines when writing raw downsampled data. */
+EXTERN(JDIMENSION) jpeg_write_raw_data JPP((j_compress_ptr
+                                                   cinfo,
+                                                           JSAMPIMAGE
+                                                   data,
+                                                           JDIMENSION
+                                                   num_lines));
+
+/* Write a special marker.  See libjpeg.txt concerning safe usage. */
+EXTERN(void) jpeg_write_marker
+        JPP((j_compress_ptr
+                    cinfo,
+                    int marker,
+                    const JOCTET *dataptr,
+                    unsigned int datalen));
+/* Same, but piecemeal. */
+EXTERN(void) jpeg_write_m_header
+        JPP((j_compress_ptr
+                    cinfo,
+                    int marker,
+                    unsigned int datalen));
+
+EXTERN(void) jpeg_write_m_byte
+        JPP((j_compress_ptr
+                    cinfo,
+                    int val));
+
+/* Alternate compression function: just write an abbreviated table file */
+EXTERN(void) jpeg_write_tables JPP((j_compress_ptr
+                                           cinfo));
+
+/* Decompression startup: read start of JPEG datastream to see what's there */
+EXTERN(int) jpeg_read_header JPP((j_decompress_ptr
+                                         cinfo,
+                                                 boolean
+                                         require_image));
+/* Return value is one of: */
+#define JPEG_SUSPENDED        0 /* Suspended due to lack of input data */
+#define JPEG_HEADER_OK        1 /* Found valid image datastream */
+#define JPEG_HEADER_TABLES_ONLY    2 /* Found valid table-specs-only datastream */
+/* If you pass require_image = TRUE (normal case), you need not check for
+ * a TABLES_ONLY return code; an abbreviated file will cause an error exit.
+ * JPEG_SUSPENDED is only possible if you use a data source module that can
+ * give a suspension return (the stdio source module doesn't).
+ */
+
+/* Main entry points for decompression */
+EXTERN(boolean) jpeg_start_decompress JPP((j_decompress_ptr
+                                                  cinfo));
+
+#ifdef ANDROID
+
+EXTERN(boolean) jpeg_start_tile_decompress JPP((j_decompress_ptr
+                                                       cinfo));
+
+#endif
+
+EXTERN(JDIMENSION) jpeg_read_scanlines JPP((j_decompress_ptr
+                                                   cinfo,
+                                                           JSAMPARRAY
+                                                   scanlines,
+                                                           JDIMENSION
+                                                   max_lines));
+
+EXTERN(boolean) jpeg_finish_decompress JPP((j_decompress_ptr
+                                                   cinfo));
+
+/* Replaces jpeg_read_scanlines when reading raw downsampled data. */
+EXTERN(JDIMENSION) jpeg_read_raw_data JPP((j_decompress_ptr
+                                                  cinfo,
+                                                          JSAMPIMAGE
+                                                  data,
+                                                          JDIMENSION
+                                                  max_lines));
+
+#ifdef ANDROID
+
+EXTERN(JDIMENSION) jpeg_read_scanlines_from JPP((j_decompress_ptr
+                                                        cinfo,
+                                                                JSAMPARRAY
+                                                        scanlines,
+                                                        int line_offset,
+                                                        JDIMENSION max_lines));
+
+EXTERN(JDIMENSION) jpeg_read_tile_scanline JPP((j_decompress_ptr
+                                                       cinfo,
+                                                               huffman_index * index,
+                                                               JSAMPARRAY
+                                                       scanlines));
+
+EXTERN(void) jpeg_init_read_tile_scanline JPP((j_decompress_ptr
+                                                      cinfo,
+                                                              huffman_index * index,
+                                                      int *start_x,
+                                                      int *start_y,
+                                                      int *width,
+                                                      int *height));
+
+#endif
+
+/* Additional entry points for buffered-image mode. */
+EXTERN(boolean) jpeg_has_multiple_scans JPP((j_decompress_ptr
+                                                    cinfo));
+
+EXTERN(boolean) jpeg_start_output JPP((j_decompress_ptr
+                                              cinfo,
+                                              int scan_number));
+
+EXTERN(boolean) jpeg_finish_output JPP((j_decompress_ptr
+                                               cinfo));
+
+EXTERN(boolean) jpeg_input_complete JPP((j_decompress_ptr
+                                                cinfo));
+
+EXTERN(void) jpeg_new_colormap JPP((j_decompress_ptr
+                                           cinfo));
+
+EXTERN(int) jpeg_consume_input JPP((j_decompress_ptr
+                                           cinfo));
+/* Return value is one of: */
+/* #define JPEG_SUSPENDED	0    Suspended due to lack of input data */
+#define JPEG_REACHED_SOS    1 /* Reached start of new scan */
+#define JPEG_REACHED_EOI    2 /* Reached end of image */
+#define JPEG_ROW_COMPLETED    3 /* Completed one iMCU row */
+#define JPEG_SCAN_COMPLETED    4 /* Completed last iMCU row of a scan */
+
+/* Precalculate output dimensions for current decompression parameters. */
+#if JPEG_LIB_VERSION >= 80
+EXTERN(void) jpeg_core_output_dimensions JPP((j_decompress_ptr cinfo));
+#endif
+
+EXTERN(void) jpeg_calc_output_dimensions JPP((j_decompress_ptr
+                                                     cinfo));
+
+/* Control saving of COM and APPn markers into marker_list. */
+EXTERN(void) jpeg_save_markers
+        JPP((j_decompress_ptr
+                    cinfo,
+                    int marker_code,
+                    unsigned int length_limit));
+
+/* Install a special processing method for COM or APPn markers. */
+EXTERN(void) jpeg_set_marker_processor
+        JPP((j_decompress_ptr
+                    cinfo,
+                    int marker_code,
+                    jpeg_marker_parser_method routine));
+
+/* Read or write raw DCT coefficients --- useful for lossless transcoding. */
+EXTERN(jvirt_barray_ptr *)jpeg_read_coefficients JPP((j_decompress_ptr
+                                                             cinfo));
+
+EXTERN(void) jpeg_write_coefficients JPP((j_compress_ptr
+                                                 cinfo,
+                                                         jvirt_barray_ptr * coef_arrays));
+
+EXTERN(void) jpeg_copy_critical_parameters JPP((j_decompress_ptr
+                                                       srcinfo,
+                                                               j_compress_ptr
+                                                       dstinfo));
+
+/* If you choose to abort compression or decompression before completing
+ * jpeg_finish_(de)compress, then you need to clean up to release memory,
+ * temporary files, etc.  You can just call jpeg_destroy_(de)compress
+ * if you're done with the JPEG object, but if you want to clean it up and
+ * reuse it, call this:
+ */
+EXTERN(void) jpeg_abort_compress JPP((j_compress_ptr
+                                             cinfo));
+
+EXTERN(void) jpeg_abort_decompress JPP((j_decompress_ptr
+                                               cinfo));
+
+/* Generic versions of jpeg_abort and jpeg_destroy that work on either
+ * flavor of JPEG object.  These may be more convenient in some places.
+ */
+EXTERN(void) jpeg_abort JPP((j_common_ptr
+                                    cinfo));
+
+EXTERN(void) jpeg_destroy JPP((j_common_ptr
+                                      cinfo));
+
+/* Default restart-marker-resync procedure for use by data source modules */
+EXTERN(boolean) jpeg_resync_to_restart JPP((j_decompress_ptr
+                                                   cinfo,
+                                                   int desired));
+
+#ifdef ANDROID
+
+EXTERN(boolean) jpeg_build_huffman_index
+        JPP((j_decompress_ptr
+                    cinfo, huffman_index * index));
+
+EXTERN(void) jpeg_configure_huffman_decoder(j_decompress_ptr cinfo,
+                                            huffman_offset_data offset);
+
+EXTERN(void) jpeg_get_huffman_decoder_configuration(j_decompress_ptr cinfo,
+                                                    huffman_offset_data *offset);
+
+EXTERN(void) jpeg_create_huffman_index(j_decompress_ptr cinfo,
+                                       huffman_index *index);
+
+EXTERN(void) jpeg_configure_huffman_index_scan(j_decompress_ptr cinfo,
+                                               huffman_index *index, int scan_no, int offset);
+
+EXTERN(void) jpeg_destroy_huffman_index(huffman_index *index);
+
+#endif
+
+/* These marker codes are exported since applications and data source modules
+ * are likely to want to use them.
+ */
+
+#define JPEG_RST0    0xD0    /* RST0 marker code */
+#define JPEG_EOI    0xD9    /* EOI marker code */
+#define JPEG_APP0    0xE0    /* APP0 marker code */
+#define JPEG_COM    0xFE    /* COM marker code */
+
+
+/* If we have a brain-damaged compiler that emits warnings (or worse, errors)
+ * for structure definitions that are never filled in, keep it quiet by
+ * supplying dummy definitions for the various substructures.
+ */
+
+#ifdef INCOMPLETE_TYPES_BROKEN
+#ifndef JPEG_INTERNALS		/* will be defined in jpegint.h */
+struct jvirt_sarray_control { long dummy; };
+struct jvirt_barray_control { long dummy; };
+struct jpeg_comp_master { long dummy; };
+struct jpeg_c_main_controller { long dummy; };
+struct jpeg_c_prep_controller { long dummy; };
+struct jpeg_c_coef_controller { long dummy; };
+struct jpeg_marker_writer { long dummy; };
+struct jpeg_color_converter { long dummy; };
+struct jpeg_downsampler { long dummy; };
+struct jpeg_forward_dct { long dummy; };
+struct jpeg_entropy_encoder { long dummy; };
+struct jpeg_decomp_master { long dummy; };
+struct jpeg_d_main_controller { long dummy; };
+struct jpeg_d_coef_controller { long dummy; };
+struct jpeg_d_post_controller { long dummy; };
+struct jpeg_input_controller { long dummy; };
+struct jpeg_marker_reader { long dummy; };
+struct jpeg_entropy_decoder { long dummy; };
+struct jpeg_inverse_dct { long dummy; };
+struct jpeg_upsampler { long dummy; };
+struct jpeg_color_deconverter { long dummy; };
+struct jpeg_color_quantizer { long dummy; };
+#endif /* JPEG_INTERNALS */
+#endif /* INCOMPLETE_TYPES_BROKEN */
+
+
+/*
+ * The JPEG library modules define JPEG_INTERNALS before including this file.
+ * The internal structure declarations are read only when that is true.
+ * Applications using the library should not include jpegint.h, but may wish
+ * to include jerror.h.
+ */
+
+#ifdef JPEG_INTERNALS
+#include "jpegint.h"		/* fetch private declarations */
+#include "jerror.h"		/* fetch error codes too */
+#endif
+
+#ifndef ANDROID
+#ifdef __cplusplus
+#ifndef DONT_USE_EXTERN_C
+}
+#endif
+#endif
+#endif
+
+#endif /* JPEGLIB_H */

+ 666 - 0
camerakit/src/main/cpp/libjpeg/include/jsimd.h

@@ -0,0 +1,666 @@
+/*
+ * simd/jsimd.h
+ *
+ * Copyright 2009 Pierre Ossman <ossman@cendio.se> for Cendio AB
+ * Copyright 2011 D. R. Commander
+ * 
+ * Based on the x86 SIMD extension for IJG JPEG library,
+ * Copyright (C) 1999-2006, MIYASAKA Masaru.
+ * For conditions of distribution and use, see copyright notice in jsimdext.inc
+ *
+ */
+
+/* Bitmask for supported acceleration methods */
+
+#define JSIMD_NONE       0x00
+#define JSIMD_MMX        0x01
+#define JSIMD_3DNOW      0x02
+#define JSIMD_SSE        0x04
+#define JSIMD_SSE2       0x08
+#define JSIMD_ARM_NEON   0x10
+
+/* Short forms of external names for systems with brain-damaged linkers. */
+
+#ifdef NEED_SHORT_EXTERNAL_NAMES
+#define jpeg_simd_cpu_support                 jSiCpuSupport
+#define jsimd_rgb_ycc_convert_mmx             jSRGBYCCM
+#define jsimd_extrgb_ycc_convert_mmx          jSEXTRGBYCCM
+#define jsimd_extrgbx_ycc_convert_mmx         jSEXTRGBXYCCM
+#define jsimd_extbgr_ycc_convert_mmx          jSEXTBGRYCCM
+#define jsimd_extbgrx_ycc_convert_mmx         jSEXTBGRXYCCM
+#define jsimd_extxbgr_ycc_convert_mmx         jSEXTXBGRYCCM
+#define jsimd_extxrgb_ycc_convert_mmx         jSEXTXRGBYCCM
+#define jsimd_rgb_gray_convert_mmx            jSRGBGRYM
+#define jsimd_extrgb_gray_convert_mmx         jSEXTRGBGRYM
+#define jsimd_extrgbx_gray_convert_mmx        jSEXTRGBXGRYM
+#define jsimd_extbgr_gray_convert_mmx         jSEXTBGRGRYM
+#define jsimd_extbgrx_gray_convert_mmx        jSEXTBGRXGRYM
+#define jsimd_extxbgr_gray_convert_mmx        jSEXTXBGRGRYM
+#define jsimd_extxrgb_gray_convert_mmx        jSEXTXRGBGRYM
+#define jsimd_ycc_rgb_convert_mmx             jSYCCRGBM
+#define jsimd_ycc_extrgb_convert_mmx          jSYCCEXTRGBM
+#define jsimd_ycc_extrgbx_convert_mmx         jSYCCEXTRGBXM
+#define jsimd_ycc_extbgr_convert_mmx          jSYCCEXTBGRM
+#define jsimd_ycc_extbgrx_convert_mmx         jSYCCEXTBGRXM
+#define jsimd_ycc_extxbgr_convert_mmx         jSYCCEXTXBGRM
+#define jsimd_ycc_extxrgb_convert_mmx         jSYCCEXTXRGBM
+#define jconst_rgb_ycc_convert_sse2           jSCRGBYCCS2
+#define jsimd_rgb_ycc_convert_sse2            jSRGBYCCS2
+#define jsimd_extrgb_ycc_convert_sse2         jSEXTRGBYCCS2
+#define jsimd_extrgbx_ycc_convert_sse2        jSEXTRGBXYCCS2
+#define jsimd_extbgr_ycc_convert_sse2         jSEXTBGRYCCS2
+#define jsimd_extbgrx_ycc_convert_sse2        jSEXTBGRXYCCS2
+#define jsimd_extxbgr_ycc_convert_sse2        jSEXTXBGRYCCS2
+#define jsimd_extxrgb_ycc_convert_sse2        jSEXTXRGBYCCS2
+#define jconst_rgb_gray_convert_sse2          jSCRGBGRYS2
+#define jsimd_rgb_gray_convert_sse2           jSRGBGRYS2
+#define jsimd_extrgb_gray_convert_sse2        jSEXTRGBGRYS2
+#define jsimd_extrgbx_gray_convert_sse2       jSEXTRGBXGRYS2
+#define jsimd_extbgr_gray_convert_sse2        jSEXTBGRGRYS2
+#define jsimd_extbgrx_gray_convert_sse2       jSEXTBGRXGRYS2
+#define jsimd_extxbgr_gray_convert_sse2       jSEXTXBGRGRYS2
+#define jsimd_extxrgb_gray_convert_sse2       jSEXTXRGBGRYS2
+#define jconst_ycc_rgb_convert_sse2           jSCYCCRGBS2
+#define jsimd_ycc_rgb_convert_sse2            jSYCCRGBS2
+#define jsimd_ycc_extrgb_convert_sse2         jSYCCEXTRGBS2
+#define jsimd_ycc_extrgbx_convert_sse2        jSYCCEXTRGBXS2
+#define jsimd_ycc_extbgr_convert_sse2         jSYCCEXTBGRS2
+#define jsimd_ycc_extbgrx_convert_sse2        jSYCCEXTBGRXS2
+#define jsimd_ycc_extxbgr_convert_sse2        jSYCCEXTXBGRS2
+#define jsimd_ycc_extxrgb_convert_sse2        jSYCCEXTXRGBS2
+#define jsimd_h2v2_downsample_mmx             jSDnH2V2M
+#define jsimd_h2v1_downsample_mmx             jSDnH2V1M
+#define jsimd_h2v2_downsample_sse2            jSDnH2V2S2
+#define jsimd_h2v1_downsample_sse2            jSDnH2V1S2
+#define jsimd_h2v2_upsample_mmx               jSUpH2V2M
+#define jsimd_h2v1_upsample_mmx               jSUpH2V1M
+#define jsimd_h2v2_fancy_upsample_mmx         jSFUpH2V2M
+#define jsimd_h2v1_fancy_upsample_mmx         jSFUpH2V1M
+#define jsimd_h2v2_merged_upsample_mmx        jSMUpH2V2M
+#define jsimd_h2v2_extrgb_merged_upsample_mmx jSMUpH2V2EXTRGBM
+#define jsimd_h2v2_extrgbx_merged_upsample_mmx jSMUpH2V2EXTRGBXM
+#define jsimd_h2v2_extbgr_merged_upsample_mmx jSMUpH2V2EXTBGRM
+#define jsimd_h2v2_extbgrx_merged_upsample_mmx jSMUpH2V2EXTBGRXM
+#define jsimd_h2v2_extxbgr_merged_upsample_mmx jSMUpH2V2EXTXBGRM
+#define jsimd_h2v2_extxrgb_merged_upsample_mmx jSMUpH2V2EXTXRGBM
+#define jsimd_h2v1_merged_upsample_mmx        jSMUpH2V1M
+#define jsimd_h2v1_extrgb_merged_upsample_mmx jSMUpH2V1EXTRGBM
+#define jsimd_h2v1_extrgbx_merged_upsample_mmx jSMUpH2V1EXTRGBXM
+#define jsimd_h2v1_extbgr_merged_upsample_mmx jSMUpH2V1EXTBGRM
+#define jsimd_h2v1_extbgrx_merged_upsample_mmx jSMUpH2V1EXTBGRXM
+#define jsimd_h2v1_extxbgr_merged_upsample_mmx jSMUpH2V1EXTXBGRM
+#define jsimd_h2v1_extxrgb_merged_upsample_mmx jSMUpH2V1EXTXRGBM
+#define jsimd_h2v2_upsample_sse2              jSUpH2V2S2
+#define jsimd_h2v1_upsample_sse2              jSUpH2V1S2
+#define jconst_fancy_upsample_sse2            jSCFUpS2
+#define jsimd_h2v2_fancy_upsample_sse2        jSFUpH2V2S2
+#define jsimd_h2v1_fancy_upsample_sse2        jSFUpH2V1S2
+#define jconst_merged_upsample_sse2           jSCMUpS2
+#define jsimd_h2v2_merged_upsample_sse2       jSMUpH2V2S2
+#define jsimd_h2v2_extrgb_merged_upsample_sse2 jSMUpH2V2EXTRGBS2
+#define jsimd_h2v2_extrgbx_merged_upsample_sse2 jSMUpH2V2EXTRGBXS2
+#define jsimd_h2v2_extbgr_merged_upsample_sse2 jSMUpH2V2EXTBGRS2
+#define jsimd_h2v2_extbgrx_merged_upsample_sse2 jSMUpH2V2EXTBGRXS2
+#define jsimd_h2v2_extxbgr_merged_upsample_sse2 jSMUpH2V2EXTXBGRS2
+#define jsimd_h2v2_extxrgb_merged_upsample_sse2 jSMUpH2V2EXTXRGBS2
+#define jsimd_h2v1_merged_upsample_sse2       jSMUpH2V1S2
+#define jsimd_h2v1_extrgb_merged_upsample_sse2 jSMUpH2V1EXTRGBS2
+#define jsimd_h2v1_extrgbx_merged_upsample_sse2 jSMUpH2V1EXTRGBXS2
+#define jsimd_h2v1_extbgr_merged_upsample_sse2 jSMUpH2V1EXTBGRS2
+#define jsimd_h2v1_extbgrx_merged_upsample_sse2 jSMUpH2V1EXTBGRXS2
+#define jsimd_h2v1_extxbgr_merged_upsample_sse2 jSMUpH2V1EXTXBGRS2
+#define jsimd_h2v1_extxrgb_merged_upsample_sse2 jSMUpH2V1EXTXRGBS2
+#define jsimd_convsamp_mmx                    jSConvM
+#define jsimd_convsamp_sse2                   jSConvS2
+#define jsimd_convsamp_float_3dnow            jSConvF3D
+#define jsimd_convsamp_float_sse              jSConvFS
+#define jsimd_convsamp_float_sse2             jSConvFS2
+#define jsimd_fdct_islow_mmx                  jSFDMIS
+#define jsimd_fdct_ifast_mmx                  jSFDMIF
+#define jconst_fdct_islow_sse2                jSCFDS2IS
+#define jsimd_fdct_islow_sse2                 jSFDS2IS
+#define jconst_fdct_ifast_sse2                jSCFDS2IF
+#define jsimd_fdct_ifast_sse2                 jSFDS2IF
+#define jsimd_fdct_float_3dnow                jSFD3DF
+#define jconst_fdct_float_sse                 jSCFDSF
+#define jsimd_fdct_float_sse                  jSFDSF
+#define jsimd_quantize_mmx                    jSQuantM
+#define jsimd_quantize_sse2                   jSQuantS2
+#define jsimd_quantize_float_3dnow            jSQuantF3D
+#define jsimd_quantize_float_sse              jSQuantFS
+#define jsimd_quantize_float_sse2             jSQuantFS2
+#define jsimd_idct_2x2_mmx                    jSIDM22
+#define jsimd_idct_4x4_mmx                    jSIDM44
+#define jconst_idct_red_sse2                  jSCIDS2R
+#define jsimd_idct_2x2_sse2                   jSIDS222
+#define jsimd_idct_4x4_sse2                   jSIDS244
+#define jsimd_idct_islow_mmx                  jSIDMIS
+#define jsimd_idct_ifast_mmx                  jSIDMIF
+#define jconst_idct_islow_sse2                jSCIDS2IS
+#define jsimd_idct_islow_sse2                 jSIDS2IS
+#define jconst_idct_ifast_sse2                jSCIDS2IF
+#define jsimd_idct_ifast_sse2                 jSIDS2IF
+#define jsimd_idct_float_3dnow                jSID3DF
+#define jconst_fdct_float_sse                 jSCIDSF
+#define jsimd_idct_float_sse                  jSIDSF
+#define jconst_fdct_float_sse2                jSCIDS2F
+#define jsimd_idct_float_sse2                 jSIDS2F
+#endif /* NEED_SHORT_EXTERNAL_NAMES */
+
+/* SIMD Ext: retrieve SIMD/CPU information */
+EXTERN(unsigned int) jpeg_simd_cpu_support JPP((void));
+
+/* SIMD Color Space Conversion */
+EXTERN(void) jsimd_rgb_ycc_convert_mmx
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extrgb_ycc_convert_mmx
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extrgbx_ycc_convert_mmx
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extbgr_ycc_convert_mmx
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extbgrx_ycc_convert_mmx
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extxbgr_ycc_convert_mmx
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extxrgb_ycc_convert_mmx
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+
+EXTERN(void) jsimd_rgb_gray_convert_mmx
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extrgb_gray_convert_mmx
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extrgbx_gray_convert_mmx
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extbgr_gray_convert_mmx
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extbgrx_gray_convert_mmx
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extxbgr_gray_convert_mmx
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extxrgb_gray_convert_mmx
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+
+EXTERN(void) jsimd_ycc_rgb_convert_mmx
+        JPP((JDIMENSION out_width,
+             JSAMPIMAGE input_buf, JDIMENSION input_row,
+             JSAMPARRAY output_buf, int num_rows));
+EXTERN(void) jsimd_ycc_extrgb_convert_mmx
+        JPP((JDIMENSION out_width,
+             JSAMPIMAGE input_buf, JDIMENSION input_row,
+             JSAMPARRAY output_buf, int num_rows));
+EXTERN(void) jsimd_ycc_extrgbx_convert_mmx
+        JPP((JDIMENSION out_width,
+             JSAMPIMAGE input_buf, JDIMENSION input_row,
+             JSAMPARRAY output_buf, int num_rows));
+EXTERN(void) jsimd_ycc_extbgr_convert_mmx
+        JPP((JDIMENSION out_width,
+             JSAMPIMAGE input_buf, JDIMENSION input_row,
+             JSAMPARRAY output_buf, int num_rows));
+EXTERN(void) jsimd_ycc_extbgrx_convert_mmx
+        JPP((JDIMENSION out_width,
+             JSAMPIMAGE input_buf, JDIMENSION input_row,
+             JSAMPARRAY output_buf, int num_rows));
+EXTERN(void) jsimd_ycc_extxbgr_convert_mmx
+        JPP((JDIMENSION out_width,
+             JSAMPIMAGE input_buf, JDIMENSION input_row,
+             JSAMPARRAY output_buf, int num_rows));
+EXTERN(void) jsimd_ycc_extxrgb_convert_mmx
+        JPP((JDIMENSION out_width,
+             JSAMPIMAGE input_buf, JDIMENSION input_row,
+             JSAMPARRAY output_buf, int num_rows));
+
+extern const int jconst_rgb_ycc_convert_sse2[];
+EXTERN(void) jsimd_rgb_ycc_convert_sse2
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extrgb_ycc_convert_sse2
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extrgbx_ycc_convert_sse2
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extbgr_ycc_convert_sse2
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extbgrx_ycc_convert_sse2
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extxbgr_ycc_convert_sse2
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extxrgb_ycc_convert_sse2
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+
+extern const int jconst_rgb_gray_convert_sse2[];
+EXTERN(void) jsimd_rgb_gray_convert_sse2
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extrgb_gray_convert_sse2
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extrgbx_gray_convert_sse2
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extbgr_gray_convert_sse2
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extbgrx_gray_convert_sse2
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extxbgr_gray_convert_sse2
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extxrgb_gray_convert_sse2
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+
+extern const int jconst_ycc_rgb_convert_sse2[];
+EXTERN(void) jsimd_ycc_rgb_convert_sse2
+        JPP((JDIMENSION out_width,
+             JSAMPIMAGE input_buf, JDIMENSION input_row,
+             JSAMPARRAY output_buf, int num_rows));
+EXTERN(void) jsimd_ycc_extrgb_convert_sse2
+        JPP((JDIMENSION out_width,
+             JSAMPIMAGE input_buf, JDIMENSION input_row,
+             JSAMPARRAY output_buf, int num_rows));
+EXTERN(void) jsimd_ycc_extrgbx_convert_sse2
+        JPP((JDIMENSION out_width,
+             JSAMPIMAGE input_buf, JDIMENSION input_row,
+             JSAMPARRAY output_buf, int num_rows));
+EXTERN(void) jsimd_ycc_extbgr_convert_sse2
+        JPP((JDIMENSION out_width,
+             JSAMPIMAGE input_buf, JDIMENSION input_row,
+             JSAMPARRAY output_buf, int num_rows));
+EXTERN(void) jsimd_ycc_extbgrx_convert_sse2
+        JPP((JDIMENSION out_width,
+             JSAMPIMAGE input_buf, JDIMENSION input_row,
+             JSAMPARRAY output_buf, int num_rows));
+EXTERN(void) jsimd_ycc_extxbgr_convert_sse2
+        JPP((JDIMENSION out_width,
+             JSAMPIMAGE input_buf, JDIMENSION input_row,
+             JSAMPARRAY output_buf, int num_rows));
+EXTERN(void) jsimd_ycc_extxrgb_convert_sse2
+        JPP((JDIMENSION out_width,
+             JSAMPIMAGE input_buf, JDIMENSION input_row,
+             JSAMPARRAY output_buf, int num_rows));
+
+EXTERN(void) jsimd_rgb_ycc_convert_neon
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extrgb_ycc_convert_neon
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extrgbx_ycc_convert_neon
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extbgr_ycc_convert_neon
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extbgrx_ycc_convert_neon
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extxbgr_ycc_convert_neon
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+EXTERN(void) jsimd_extxrgb_ycc_convert_neon
+        JPP((JDIMENSION img_width,
+             JSAMPARRAY input_buf, JSAMPIMAGE output_buf,
+             JDIMENSION output_row, int num_rows));
+
+EXTERN(void) jsimd_ycc_rgb_convert_neon
+        JPP((JDIMENSION out_width,
+             JSAMPIMAGE input_buf, JDIMENSION input_row,
+             JSAMPARRAY output_buf, int num_rows));
+EXTERN(void) jsimd_ycc_extrgb_convert_neon
+        JPP((JDIMENSION out_width,
+             JSAMPIMAGE input_buf, JDIMENSION input_row,
+             JSAMPARRAY output_buf, int num_rows));
+EXTERN(void) jsimd_ycc_extrgbx_convert_neon
+        JPP((JDIMENSION out_width,
+             JSAMPIMAGE input_buf, JDIMENSION input_row,
+             JSAMPARRAY output_buf, int num_rows));
+EXTERN(void) jsimd_ycc_extbgr_convert_neon
+        JPP((JDIMENSION out_width,
+             JSAMPIMAGE input_buf, JDIMENSION input_row,
+             JSAMPARRAY output_buf, int num_rows));
+EXTERN(void) jsimd_ycc_extbgrx_convert_neon
+        JPP((JDIMENSION out_width,
+             JSAMPIMAGE input_buf, JDIMENSION input_row,
+             JSAMPARRAY output_buf, int num_rows));
+EXTERN(void) jsimd_ycc_extxbgr_convert_neon
+        JPP((JDIMENSION out_width,
+             JSAMPIMAGE input_buf, JDIMENSION input_row,
+             JSAMPARRAY output_buf, int num_rows));
+EXTERN(void) jsimd_ycc_extxrgb_convert_neon
+        JPP((JDIMENSION out_width,
+             JSAMPIMAGE input_buf, JDIMENSION input_row,
+             JSAMPARRAY output_buf, int num_rows));
+
+/* SIMD Downsample */
+EXTERN(void) jsimd_h2v2_downsample_mmx
+        JPP((JDIMENSION image_width, int max_v_samp_factor,
+             JDIMENSION v_samp_factor, JDIMENSION width_blocks,
+             JSAMPARRAY input_data, JSAMPARRAY output_data));
+EXTERN(void) jsimd_h2v1_downsample_mmx
+        JPP((JDIMENSION image_width, int max_v_samp_factor,
+             JDIMENSION v_samp_factor, JDIMENSION width_blocks,
+             JSAMPARRAY input_data, JSAMPARRAY output_data));
+
+EXTERN(void) jsimd_h2v2_downsample_sse2
+        JPP((JDIMENSION image_width, int max_v_samp_factor,
+             JDIMENSION v_samp_factor, JDIMENSION width_blocks,
+             JSAMPARRAY input_data, JSAMPARRAY output_data));
+EXTERN(void) jsimd_h2v1_downsample_sse2
+        JPP((JDIMENSION image_width, int max_v_samp_factor,
+             JDIMENSION v_samp_factor, JDIMENSION width_blocks,
+             JSAMPARRAY input_data, JSAMPARRAY output_data));
+
+/* SIMD Upsample */
+EXTERN(void) jsimd_h2v2_upsample_mmx
+        JPP((int max_v_samp_factor, JDIMENSION output_width,
+             JSAMPARRAY input_data, JSAMPARRAY * output_data_ptr));
+EXTERN(void) jsimd_h2v1_upsample_mmx
+        JPP((int max_v_samp_factor, JDIMENSION output_width,
+             JSAMPARRAY input_data, JSAMPARRAY * output_data_ptr));
+
+EXTERN(void) jsimd_h2v2_fancy_upsample_mmx
+        JPP((int max_v_samp_factor, JDIMENSION downsampled_width,
+             JSAMPARRAY input_data, JSAMPARRAY * output_data_ptr));
+EXTERN(void) jsimd_h2v1_fancy_upsample_mmx
+        JPP((int max_v_samp_factor, JDIMENSION downsampled_width,
+             JSAMPARRAY input_data, JSAMPARRAY * output_data_ptr));
+
+EXTERN(void) jsimd_h2v2_merged_upsample_mmx
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v2_extrgb_merged_upsample_mmx
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v2_extrgbx_merged_upsample_mmx
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v2_extbgr_merged_upsample_mmx
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v2_extbgrx_merged_upsample_mmx
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v2_extxbgr_merged_upsample_mmx
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v2_extxrgb_merged_upsample_mmx
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v1_merged_upsample_mmx
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v1_extrgb_merged_upsample_mmx
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v1_extrgbx_merged_upsample_mmx
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v1_extbgr_merged_upsample_mmx
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v1_extbgrx_merged_upsample_mmx
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v1_extxbgr_merged_upsample_mmx
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v1_extxrgb_merged_upsample_mmx
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+
+EXTERN(void) jsimd_h2v2_upsample_sse2
+        JPP((int max_v_samp_factor, JDIMENSION output_width,
+             JSAMPARRAY input_data, JSAMPARRAY * output_data_ptr));
+EXTERN(void) jsimd_h2v1_upsample_sse2
+        JPP((int max_v_samp_factor, JDIMENSION output_width,
+             JSAMPARRAY input_data, JSAMPARRAY * output_data_ptr));
+
+extern const int jconst_fancy_upsample_sse2[];
+EXTERN(void) jsimd_h2v2_fancy_upsample_sse2
+        JPP((int max_v_samp_factor, JDIMENSION downsampled_width,
+             JSAMPARRAY input_data, JSAMPARRAY * output_data_ptr));
+EXTERN(void) jsimd_h2v1_fancy_upsample_sse2
+        JPP((int max_v_samp_factor, JDIMENSION downsampled_width,
+             JSAMPARRAY input_data, JSAMPARRAY * output_data_ptr));
+
+extern const int jconst_merged_upsample_sse2[];
+EXTERN(void) jsimd_h2v2_merged_upsample_sse2
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v2_extrgb_merged_upsample_sse2
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v2_extrgbx_merged_upsample_sse2
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v2_extbgr_merged_upsample_sse2
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v2_extbgrx_merged_upsample_sse2
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v2_extxbgr_merged_upsample_sse2
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v2_extxrgb_merged_upsample_sse2
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v1_merged_upsample_sse2
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v1_extrgb_merged_upsample_sse2
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v1_extrgbx_merged_upsample_sse2
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v1_extbgr_merged_upsample_sse2
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v1_extbgrx_merged_upsample_sse2
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v1_extxbgr_merged_upsample_sse2
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+EXTERN(void) jsimd_h2v1_extxrgb_merged_upsample_sse2
+        JPP((JDIMENSION output_width, JSAMPIMAGE input_buf,
+             JDIMENSION in_row_group_ctr, JSAMPARRAY output_buf));
+
+/* SIMD Sample Conversion */
+EXTERN(void) jsimd_convsamp_mmx JPP((JSAMPARRAY sample_data,
+                                     JDIMENSION start_col,
+                                     DCTELEM * workspace));
+
+EXTERN(void) jsimd_convsamp_sse2 JPP((JSAMPARRAY sample_data,
+                                      JDIMENSION start_col,
+                                      DCTELEM * workspace));
+
+EXTERN(void) jsimd_convsamp_neon JPP((JSAMPARRAY sample_data,
+                                      JDIMENSION start_col,
+                                      DCTELEM * workspace));
+
+EXTERN(void) jsimd_convsamp_float_3dnow JPP((JSAMPARRAY sample_data,
+                                             JDIMENSION start_col,
+                                             FAST_FLOAT * workspace));
+
+EXTERN(void) jsimd_convsamp_float_sse JPP((JSAMPARRAY sample_data,
+                                           JDIMENSION start_col,
+                                           FAST_FLOAT * workspace));
+
+EXTERN(void) jsimd_convsamp_float_sse2 JPP((JSAMPARRAY sample_data,
+                                            JDIMENSION start_col,
+                                            FAST_FLOAT * workspace));
+
+/* SIMD Forward DCT */
+EXTERN(void) jsimd_fdct_islow_mmx JPP((DCTELEM * data));
+EXTERN(void) jsimd_fdct_ifast_mmx JPP((DCTELEM * data));
+
+extern const int jconst_fdct_ifast_sse2[];
+EXTERN(void) jsimd_fdct_islow_sse2 JPP((DCTELEM * data));
+extern const int jconst_fdct_islow_sse2[];
+EXTERN(void) jsimd_fdct_ifast_sse2 JPP((DCTELEM * data));
+
+EXTERN(void) jsimd_fdct_ifast_neon JPP((DCTELEM * data));
+
+EXTERN(void) jsimd_fdct_float_3dnow JPP((FAST_FLOAT * data));
+
+extern const int jconst_fdct_float_sse[];
+EXTERN(void) jsimd_fdct_float_sse JPP((FAST_FLOAT * data));
+
+/* SIMD Quantization */
+EXTERN(void) jsimd_quantize_mmx JPP((JCOEFPTR coef_block,
+                                     DCTELEM * divisors,
+                                     DCTELEM * workspace));
+
+EXTERN(void) jsimd_quantize_sse2 JPP((JCOEFPTR coef_block,
+                                      DCTELEM * divisors,
+                                      DCTELEM * workspace));
+
+EXTERN(void) jsimd_quantize_neon JPP((JCOEFPTR coef_block,
+                                      DCTELEM * divisors,
+                                      DCTELEM * workspace));
+
+EXTERN(void) jsimd_quantize_float_3dnow JPP((JCOEFPTR coef_block,
+                                             FAST_FLOAT * divisors,
+                                             FAST_FLOAT * workspace));
+
+EXTERN(void) jsimd_quantize_float_sse JPP((JCOEFPTR coef_block,
+                                           FAST_FLOAT * divisors,
+                                           FAST_FLOAT * workspace));
+
+EXTERN(void) jsimd_quantize_float_sse2 JPP((JCOEFPTR coef_block,
+                                            FAST_FLOAT * divisors,
+                                            FAST_FLOAT * workspace));
+
+/* SIMD Reduced Inverse DCT */
+EXTERN(void) jsimd_idct_2x2_mmx JPP((void * dct_table,
+                                     JCOEFPTR coef_block,
+                                     JSAMPARRAY output_buf,
+                                     JDIMENSION output_col));
+EXTERN(void) jsimd_idct_4x4_mmx JPP((void * dct_table,
+                                     JCOEFPTR coef_block,
+                                     JSAMPARRAY output_buf,
+                                     JDIMENSION output_col));
+
+extern const int jconst_idct_red_sse2[];
+EXTERN(void) jsimd_idct_2x2_sse2 JPP((void * dct_table,
+                                      JCOEFPTR coef_block,
+                                      JSAMPARRAY output_buf,
+                                      JDIMENSION output_col));
+EXTERN(void) jsimd_idct_4x4_sse2 JPP((void * dct_table,
+                                      JCOEFPTR coef_block,
+                                      JSAMPARRAY output_buf,
+                                      JDIMENSION output_col));
+
+EXTERN(void) jsimd_idct_2x2_neon JPP((void * dct_table,
+                                      JCOEFPTR coef_block,
+                                      JSAMPARRAY output_buf,
+                                      JDIMENSION output_col));
+EXTERN(void) jsimd_idct_4x4_neon JPP((void * dct_table,
+                                      JCOEFPTR coef_block,
+                                      JSAMPARRAY output_buf,
+                                      JDIMENSION output_col));
+
+/* SIMD Inverse DCT */
+EXTERN(void) jsimd_idct_islow_mmx JPP((void * dct_table,
+                                       JCOEFPTR coef_block,
+                                       JSAMPARRAY output_buf,
+                                       JDIMENSION output_col));
+EXTERN(void) jsimd_idct_ifast_mmx JPP((void * dct_table,
+                                       JCOEFPTR coef_block,
+                                       JSAMPARRAY output_buf,
+                                       JDIMENSION output_col));
+
+extern const int jconst_idct_islow_sse2[];
+EXTERN(void) jsimd_idct_islow_sse2 JPP((void * dct_table,
+                                        JCOEFPTR coef_block,
+                                        JSAMPARRAY output_buf,
+                                        JDIMENSION output_col));
+extern const int jconst_idct_ifast_sse2[];
+EXTERN(void) jsimd_idct_ifast_sse2 JPP((void * dct_table,
+                                        JCOEFPTR coef_block,
+                                        JSAMPARRAY output_buf,
+                                        JDIMENSION output_col));
+
+EXTERN(void) jsimd_idct_islow_neon JPP((void * dct_table,
+                                        JCOEFPTR coef_block,
+                                        JSAMPARRAY output_buf,
+                                        JDIMENSION output_col));
+EXTERN(void) jsimd_idct_ifast_neon JPP((void * dct_table,
+                                        JCOEFPTR coef_block,
+                                        JSAMPARRAY output_buf,
+                                        JDIMENSION output_col));
+
+EXTERN(void) jsimd_idct_float_3dnow JPP((void * dct_table,
+                                         JCOEFPTR coef_block,
+                                         JSAMPARRAY output_buf,
+                                         JDIMENSION output_col));
+
+extern const int jconst_idct_float_sse[];
+EXTERN(void) jsimd_idct_float_sse JPP((void * dct_table,
+                                       JCOEFPTR coef_block,
+                                       JSAMPARRAY output_buf,
+                                       JDIMENSION output_col));
+
+extern const int jconst_idct_float_sse2[];
+EXTERN(void) jsimd_idct_float_sse2 JPP((void * dct_table,
+                                        JCOEFPTR coef_block,
+                                        JSAMPARRAY output_buf,
+                                        JDIMENSION output_col));
+

+ 199 - 0
camerakit/src/main/cpp/libjpeg/include/jsimdcfg.inc.h

@@ -0,0 +1,199 @@
+// This file generates the include file for the assembly
+// implementations by abusing the C preprocessor.
+//
+// Note: Some things are manually defined as they need to
+// be mapped to NASM types.
+
+;
+; Automatically generated include file from jsimdcfg.inc.h
+;
+
+#define JPEG_INTERNALS
+
+#include "../jpeglib.h"
+#include "../jconfig.h"
+#include "../jmorecfg.h"
+#include "jsimd.h"
+
+#define define(var) %define _cpp_protection_##var
+#define definev(var) %define _cpp_protection_##var var
+
+;
+; -- jpeglib.h
+;
+
+definev(DCTSIZE)
+definev(DCTSIZE2)
+
+;
+; -- jmorecfg.h
+;
+
+definev(RGB_RED)
+definev(RGB_GREEN)
+definev(RGB_BLUE)
+definev(RGB_PIXELSIZE)
+
+definev(EXT_RGB_RED)
+definev(EXT_RGB_GREEN)
+definev(EXT_RGB_BLUE)
+definev(EXT_RGB_PIXELSIZE)
+
+definev(EXT_RGBX_RED)
+definev(EXT_RGBX_GREEN)
+definev(EXT_RGBX_BLUE)
+definev(EXT_RGBX_PIXELSIZE)
+
+definev(EXT_BGR_RED)
+definev(EXT_BGR_GREEN)
+definev(EXT_BGR_BLUE)
+definev(EXT_BGR_PIXELSIZE)
+
+definev(EXT_BGRX_RED)
+definev(EXT_BGRX_GREEN)
+definev(EXT_BGRX_BLUE)
+definev(EXT_BGRX_PIXELSIZE)
+
+definev(EXT_XBGR_RED)
+definev(EXT_XBGR_GREEN)
+definev(EXT_XBGR_BLUE)
+definev(EXT_XBGR_PIXELSIZE)
+
+definev(EXT_XRGB_RED)
+definev(EXT_XRGB_GREEN)
+definev(EXT_XRGB_BLUE)
+definev(EXT_XRGB_PIXELSIZE)
+
+%define RGBX_FILLER_0XFF        1
+
+; Representation of a single sample (pixel element value).
+; On this SIMD implementation, this must be 'unsigned char'.
+;
+
+%define JSAMPLE                 byte          ; unsigned char
+%define SIZEOF_JSAMPLE          SIZEOF_BYTE   ; sizeof(JSAMPLE)
+
+definev(CENTERJSAMPLE)
+
+; Representation of a DCT frequency coefficient.
+; On this SIMD implementation, this must be 'short'.
+;
+%define JCOEF                   word          ; short
+%define SIZEOF_JCOEF            SIZEOF_WORD   ; sizeof(JCOEF)
+
+; Datatype used for image dimensions.
+; On this SIMD implementation, this must be 'unsigned int'.
+;
+%define JDIMENSION              dword         ; unsigned int
+%define SIZEOF_JDIMENSION       SIZEOF_DWORD  ; sizeof(JDIMENSION)
+
+%define JSAMPROW                POINTER       ; JSAMPLE FAR * (jpeglib.h)
+%define JSAMPARRAY              POINTER       ; JSAMPROW *    (jpeglib.h)
+%define JSAMPIMAGE              POINTER       ; JSAMPARRAY *  (jpeglib.h)
+%define JCOEFPTR                POINTER       ; JCOEF FAR *   (jpeglib.h)
+%define SIZEOF_JSAMPROW         SIZEOF_POINTER  ; sizeof(JSAMPROW)
+%define SIZEOF_JSAMPARRAY       SIZEOF_POINTER  ; sizeof(JSAMPARRAY)
+%define SIZEOF_JSAMPIMAGE       SIZEOF_POINTER  ; sizeof(JSAMPIMAGE)
+%define SIZEOF_JCOEFPTR         SIZEOF_POINTER  ; sizeof(JCOEFPTR)
+
+;
+; -- jdct.h
+;
+
+; A forward DCT routine is given a pointer to a work area of type DCTELEM[];
+; the DCT is to be performed in-place in that buffer.
+; To maximize parallelism, Type DCTELEM is changed to short (originally, int).
+;
+%define DCTELEM                 word          ; short
+%define SIZEOF_DCTELEM          SIZEOF_WORD   ; sizeof(DCTELEM)
+
+%define FAST_FLOAT              FP32            ; float
+%define SIZEOF_FAST_FLOAT       SIZEOF_FP32     ; sizeof(FAST_FLOAT)
+
+; To maximize parallelism, Type MULTIPLIER is changed to short.
+;
+%define ISLOW_MULT_TYPE         word          ; must be short
+%define SIZEOF_ISLOW_MULT_TYPE  SIZEOF_WORD   ; sizeof(ISLOW_MULT_TYPE)
+
+%define IFAST_MULT_TYPE         word          ; must be short
+%define SIZEOF_IFAST_MULT_TYPE  SIZEOF_WORD   ; sizeof(IFAST_MULT_TYPE)
+%define IFAST_SCALE_BITS        2             ; fractional bits in scale factors
+
+%define FLOAT_MULT_TYPE         FP32          ; must be float
+%define SIZEOF_FLOAT_MULT_TYPE  SIZEOF_FP32   ; sizeof(FLOAT_MULT_TYPE)
+
+;
+; -- jsimd.h
+;
+
+definev(JSIMD_NONE)
+definev(JSIMD_MMX)
+definev(JSIMD_3DNOW)
+definev(JSIMD_SSE)
+definev(JSIMD_SSE2)
+
+; Short forms of external names for systems with brain-damaged linkers.
+;
+#ifdef NEED_SHORT_EXTERNAL_NAMES
+definev(jpeg_simd_cpu_support)
+definev(jsimd_rgb_ycc_convert_mmx)
+definev(jsimd_ycc_rgb_convert_mmx)
+definev(jconst_rgb_ycc_convert_sse2)
+definev(jsimd_rgb_ycc_convert_sse2)
+definev(jconst_ycc_rgb_convert_sse2)
+definev(jsimd_ycc_rgb_convert_sse2)
+definev(jsimd_h2v2_downsample_mmx)
+definev(jsimd_h2v1_downsample_mmx)
+definev(jsimd_h2v2_downsample_sse2)
+definev(jsimd_h2v1_downsample_sse2)
+definev(jsimd_h2v2_upsample_mmx)
+definev(jsimd_h2v1_upsample_mmx)
+definev(jsimd_h2v1_fancy_upsample_mmx)
+definev(jsimd_h2v2_fancy_upsample_mmx)
+definev(jsimd_h2v1_merged_upsample_mmx)
+definev(jsimd_h2v2_merged_upsample_mmx)
+definev(jsimd_h2v2_upsample_sse2)
+definev(jsimd_h2v1_upsample_sse2)
+definev(jconst_fancy_upsample_sse2)
+definev(jsimd_h2v1_fancy_upsample_sse2)
+definev(jsimd_h2v2_fancy_upsample_sse2)
+definev(jconst_merged_upsample_sse2)
+definev(jsimd_h2v1_merged_upsample_sse2)
+definev(jsimd_h2v2_merged_upsample_sse2)
+definev(jsimd_convsamp_mmx)
+definev(jsimd_convsamp_sse2)
+definev(jsimd_convsamp_float_3dnow)
+definev(jsimd_convsamp_float_sse)
+definev(jsimd_convsamp_float_sse2)
+definev(jsimd_fdct_islow_mmx)
+definev(jsimd_fdct_ifast_mmx)
+definev(jconst_fdct_islow_sse2)
+definev(jsimd_fdct_islow_sse2)
+definev(jconst_fdct_ifast_sse2)
+definev(jsimd_fdct_ifast_sse2)
+definev(jsimd_fdct_float_3dnow)
+definev(jconst_fdct_float_sse)
+definev(jsimd_fdct_float_sse)
+definev(jsimd_quantize_mmx)
+definev(jsimd_quantize_sse2)
+definev(jsimd_quantize_float_3dnow)
+definev(jsimd_quantize_float_sse)
+definev(jsimd_quantize_float_sse2)
+definev(jsimd_idct_2x2_mmx)
+definev(jsimd_idct_4x4_mmx)
+definev(jconst_idct_red_sse2)
+definev(jsimd_idct_2x2_sse2)
+definev(jsimd_idct_4x4_sse2)
+definev(jsimd_idct_islow_mmx)
+definev(jsimd_idct_ifast_mmx)
+definev(jconst_idct_islow_sse2)
+definev(jsimd_idct_islow_sse2)
+definev(jconst_idct_ifast_sse2)
+definev(jsimd_idct_ifast_sse2)
+definev(jsimd_idct_float_3dnow)
+definev(jconst_idct_float_sse)
+definev(jsimd_idct_float_sse)
+definev(jconst_idct_float_sse2)
+definev(jsimd_idct_float_sse2)
+#endif /* NEED_SHORT_EXTERNAL_NAMES */
+

+ 102 - 0
camerakit/src/main/cpp/libjpeg/include/jsimddct.h

@@ -0,0 +1,102 @@
+/*
+ * jsimddct.h
+ *
+ * Copyright 2009 Pierre Ossman <ossman@cendio.se> for Cendio AB
+ * 
+ * Based on the x86 SIMD extension for IJG JPEG library,
+ * Copyright (C) 1999-2006, MIYASAKA Masaru.
+ * For conditions of distribution and use, see copyright notice in jsimdext.inc
+ *
+ */
+
+/* Short forms of external names for systems with brain-damaged linkers. */
+
+#ifdef NEED_SHORT_EXTERNAL_NAMES
+#define jsimd_can_convsamp                jSCanConv
+#define jsimd_can_convsamp_float          jSCanConvF
+#define jsimd_convsamp                    jSConv
+#define jsimd_convsamp_float              jSConvF
+#define jsimd_can_fdct_islow              jSCanFDCTIS
+#define jsimd_can_fdct_ifast              jSCanFDCTIF
+#define jsimd_can_fdct_float              jSCanFDCTFl
+#define jsimd_fdct_islow                  jSFDCTIS
+#define jsimd_fdct_ifast                  jSFDCTIF
+#define jsimd_fdct_float                  jSFDCTFl
+#define jsimd_can_quantize                jSCanQuant
+#define jsimd_can_quantize_float          jSCanQuantF
+#define jsimd_quantize                    jSQuant
+#define jsimd_quantize_float              jSQuantF
+#define jsimd_can_idct_2x2                jSCanIDCT22
+#define jsimd_can_idct_4x4                jSCanIDCT44
+#define jsimd_idct_2x2                    jSIDCT22
+#define jsimd_idct_4x4                    jSIDCT44
+#define jsimd_can_idct_islow              jSCanIDCTIS
+#define jsimd_can_idct_ifast              jSCanIDCTIF
+#define jsimd_can_idct_float              jSCanIDCTFl
+#define jsimd_idct_islow                  jSIDCTIS
+#define jsimd_idct_ifast                  jSIDCTIF
+#define jsimd_idct_float                  jSIDCTFl
+#endif /* NEED_SHORT_EXTERNAL_NAMES */
+
+EXTERN(int) jsimd_can_convsamp JPP((void));
+EXTERN(int) jsimd_can_convsamp_float JPP((void));
+
+EXTERN(void) jsimd_convsamp JPP((JSAMPARRAY sample_data,
+                                 JDIMENSION start_col,
+                                 DCTELEM * workspace));
+EXTERN(void) jsimd_convsamp_float JPP((JSAMPARRAY sample_data,
+                                       JDIMENSION start_col,
+                                       FAST_FLOAT * workspace));
+
+EXTERN(int) jsimd_can_fdct_islow JPP((void));
+EXTERN(int) jsimd_can_fdct_ifast JPP((void));
+EXTERN(int) jsimd_can_fdct_float JPP((void));
+
+EXTERN(void) jsimd_fdct_islow JPP((DCTELEM * data));
+EXTERN(void) jsimd_fdct_ifast JPP((DCTELEM * data));
+EXTERN(void) jsimd_fdct_float JPP((FAST_FLOAT * data));
+
+EXTERN(int) jsimd_can_quantize JPP((void));
+EXTERN(int) jsimd_can_quantize_float JPP((void));
+
+EXTERN(void) jsimd_quantize JPP((JCOEFPTR coef_block,
+                                 DCTELEM * divisors,
+                                 DCTELEM * workspace));
+EXTERN(void) jsimd_quantize_float JPP((JCOEFPTR coef_block,
+                                       FAST_FLOAT * divisors,
+                                       FAST_FLOAT * workspace));
+
+EXTERN(int) jsimd_can_idct_2x2 JPP((void));
+EXTERN(int) jsimd_can_idct_4x4 JPP((void));
+
+EXTERN(void) jsimd_idct_2x2 JPP((j_decompress_ptr cinfo,
+                                 jpeg_component_info * compptr,
+                                 JCOEFPTR coef_block,
+                                 JSAMPARRAY output_buf,
+                                 JDIMENSION output_col));
+EXTERN(void) jsimd_idct_4x4 JPP((j_decompress_ptr cinfo,
+                                 jpeg_component_info * compptr,
+                                 JCOEFPTR coef_block,
+                                 JSAMPARRAY output_buf,
+                                 JDIMENSION output_col));
+
+EXTERN(int) jsimd_can_idct_islow JPP((void));
+EXTERN(int) jsimd_can_idct_ifast JPP((void));
+EXTERN(int) jsimd_can_idct_float JPP((void));
+
+EXTERN(void) jsimd_idct_islow JPP((j_decompress_ptr cinfo,
+                                   jpeg_component_info * compptr,
+                                   JCOEFPTR coef_block,
+                                   JSAMPARRAY output_buf,
+                                   JDIMENSION output_col));
+EXTERN(void) jsimd_idct_ifast JPP((j_decompress_ptr cinfo,
+                                   jpeg_component_info * compptr,
+                                   JCOEFPTR coef_block,
+                                   JSAMPARRAY output_buf,
+                                   JDIMENSION output_col));
+EXTERN(void) jsimd_idct_float JPP((j_decompress_ptr cinfo,
+                                   jpeg_component_info * compptr,
+                                   JCOEFPTR coef_block,
+                                   JSAMPARRAY output_buf,
+                                   JDIMENSION output_col));
+

+ 36 - 0
camerakit/src/main/cpp/libjpeg/include/jversion.h

@@ -0,0 +1,36 @@
+/*
+ * jversion.h
+ *
+ * Copyright (C) 1991-2010, Thomas G. Lane, Guido Vollbeding.
+ * Copyright (C) 2010, D. R. Commander.
+ * This file is part of the Independent JPEG Group's software.
+ * For conditions of distribution and use, see the accompanying README file.
+ *
+ * This file contains software version identification.
+ */
+
+
+#if JPEG_LIB_VERSION >= 80
+
+#define JVERSION	"8b  16-May-2010"
+
+#define JCOPYRIGHT	"Copyright (C) 2010, Thomas G. Lane, Guido Vollbeding"
+
+#elif JPEG_LIB_VERSION >= 70
+
+#define JVERSION        "7  27-Jun-2009"
+
+#define JCOPYRIGHT      "Copyright (C) 2009, Thomas G. Lane, Guido Vollbeding"
+
+#else
+
+#define JVERSION	"6b  27-Mar-1998"
+
+#define JCOPYRIGHT	"Copyright (C) 1998, Thomas G. Lane"
+
+#endif
+
+#define LJTCOPYRIGHT	"Copyright (C) 1999-2006 MIYASAKA Masaru\n" \
+			"Copyright (C) 2009 Pierre Ossman for Cendio AB\n" \
+			"Copyright (C) 2009-2011 D. R. Commander\n" \
+			"Copyright (C) 2009-2011 Nokia Corporation and/or its subsidiary(-ies)"

+ 47 - 0
camerakit/src/main/cpp/libjpeg/include/tjutil.h

@@ -0,0 +1,47 @@
+/*
+ * Copyright (C)2011 D. R. Commander.  All Rights Reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright notice,
+ *   this list of conditions and the following disclaimer.
+ * - Redistributions in binary form must reproduce the above copyright notice,
+ *   this list of conditions and the following disclaimer in the documentation
+ *   and/or other materials provided with the distribution.
+ * - Neither the name of the libjpeg-turbo Project nor the names of its
+ *   contributors may be used to endorse or promote products derived from this
+ *   software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS",
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifdef _WIN32
+	#ifndef __MINGW32__
+		#include <stdio.h>
+		#define snprintf(str, n, format, ...)  \
+			_snprintf_s(str, n, _TRUNCATE, format, __VA_ARGS__)
+	#endif
+	#define strcasecmp stricmp
+	#define strncasecmp strnicmp
+#endif
+
+#ifndef min
+ #define min(a,b) ((a)<(b)?(a):(b))
+#endif
+
+#ifndef max
+ #define max(a,b) ((a)>(b)?(a):(b))
+#endif
+
+extern double gettime(void);

+ 217 - 0
camerakit/src/main/cpp/libjpeg/include/transupp.h

@@ -0,0 +1,217 @@
+/*
+ * transupp.h
+ *
+ * Copyright (C) 1997-2009, Thomas G. Lane, Guido Vollbeding.
+ * This file is part of the Independent JPEG Group's software.
+ * For conditions of distribution and use, see the accompanying README file.
+ *
+ * This file contains declarations for image transformation routines and
+ * other utility code used by the jpegtran sample application.  These are
+ * NOT part of the core JPEG library.  But we keep these routines separate
+ * from jpegtran.c to ease the task of maintaining jpegtran-like programs
+ * that have other user interfaces.
+ *
+ * NOTE: all the routines declared here have very specific requirements
+ * about when they are to be executed during the reading and writing of the
+ * source and destination files.  See the comments in transupp.c, or see
+ * jpegtran.c for an example of correct usage.
+ */
+
+/* If you happen not to want the image transform support, disable it here */
+#ifndef TRANSFORMS_SUPPORTED
+#define TRANSFORMS_SUPPORTED 1		/* 0 disables transform code */
+#endif
+
+/*
+ * Although rotating and flipping data expressed as DCT coefficients is not
+ * hard, there is an asymmetry in the JPEG format specification for images
+ * whose dimensions aren't multiples of the iMCU size.  The right and bottom
+ * image edges are padded out to the next iMCU boundary with junk data; but
+ * no padding is possible at the top and left edges.  If we were to flip
+ * the whole image including the pad data, then pad garbage would become
+ * visible at the top and/or left, and real pixels would disappear into the
+ * pad margins --- perhaps permanently, since encoders & decoders may not
+ * bother to preserve DCT blocks that appear to be completely outside the
+ * nominal image area.  So, we have to exclude any partial iMCUs from the
+ * basic transformation.
+ *
+ * Transpose is the only transformation that can handle partial iMCUs at the
+ * right and bottom edges completely cleanly.  flip_h can flip partial iMCUs
+ * at the bottom, but leaves any partial iMCUs at the right edge untouched.
+ * Similarly flip_v leaves any partial iMCUs at the bottom edge untouched.
+ * The other transforms are defined as combinations of these basic transforms
+ * and process edge blocks in a way that preserves the equivalence.
+ *
+ * The "trim" option causes untransformable partial iMCUs to be dropped;
+ * this is not strictly lossless, but it usually gives the best-looking
+ * result for odd-size images.  Note that when this option is active,
+ * the expected mathematical equivalences between the transforms may not hold.
+ * (For example, -rot 270 -trim trims only the bottom edge, but -rot 90 -trim
+ * followed by -rot 180 -trim trims both edges.)
+ *
+ * We also offer a lossless-crop option, which discards data outside a given
+ * image region but losslessly preserves what is inside.  Like the rotate and
+ * flip transforms, lossless crop is restricted by the JPEG format: the upper
+ * left corner of the selected region must fall on an iMCU boundary.  If this
+ * does not hold for the given crop parameters, we silently move the upper left
+ * corner up and/or left to make it so, simultaneously increasing the region
+ * dimensions to keep the lower right crop corner unchanged.  (Thus, the
+ * output image covers at least the requested region, but may cover more.)
+ *
+ * We also provide a lossless-resize option, which is kind of a lossless-crop
+ * operation in the DCT coefficient block domain - it discards higher-order
+ * coefficients and losslessly preserves lower-order coefficients of a
+ * sub-block.
+ *
+ * Rotate/flip transform, resize, and crop can be requested together in a
+ * single invocation.  The crop is applied last --- that is, the crop region
+ * is specified in terms of the destination image after transform/resize.
+ *
+ * We also offer a "force to grayscale" option, which simply discards the
+ * chrominance channels of a YCbCr image.  This is lossless in the sense that
+ * the luminance channel is preserved exactly.  It's not the same kind of
+ * thing as the rotate/flip transformations, but it's convenient to handle it
+ * as part of this package, mainly because the transformation routines have to
+ * be aware of the option to know how many components to work on.
+ */
+
+
+/* Short forms of external names for systems with brain-damaged linkers. */
+
+#ifdef NEED_SHORT_EXTERNAL_NAMES
+#define jtransform_parse_crop_spec	jTrParCrop
+#define jtransform_request_workspace	jTrRequest
+#define jtransform_adjust_parameters	jTrAdjust
+#define jtransform_execute_transform	jTrExec
+#define jtransform_perfect_transform	jTrPerfect
+#define jcopy_markers_setup		jCMrkSetup
+#define jcopy_markers_execute		jCMrkExec
+#endif /* NEED_SHORT_EXTERNAL_NAMES */
+
+
+/*
+ * Codes for supported types of image transformations.
+ */
+
+typedef enum {
+	JXFORM_NONE,		/* no transformation */
+	JXFORM_FLIP_H,		/* horizontal flip */
+	JXFORM_FLIP_V,		/* vertical flip */
+	JXFORM_TRANSPOSE,	/* transpose across UL-to-LR axis */
+	JXFORM_TRANSVERSE,	/* transpose across UR-to-LL axis */
+	JXFORM_ROT_90,		/* 90-degree clockwise rotation */
+	JXFORM_ROT_180,		/* 180-degree rotation */
+	JXFORM_ROT_270		/* 270-degree clockwise (or 90 ccw) */
+} JXFORM_CODE;
+
+/*
+ * Codes for crop parameters, which can individually be unspecified,
+ * positive, or negative.  (Negative width or height makes no sense, though.)
+ */
+
+typedef enum {
+	JCROP_UNSET,
+	JCROP_POS,
+	JCROP_NEG
+} JCROP_CODE;
+
+/*
+ * Transform parameters struct.
+ * NB: application must not change any elements of this struct after
+ * calling jtransform_request_workspace.
+ */
+
+typedef struct {
+  /* Options: set by caller */
+  JXFORM_CODE transform;	/* image transform operator */
+  boolean perfect;		/* if TRUE, fail if partial MCUs are requested */
+  boolean trim;			/* if TRUE, trim partial MCUs as needed */
+  boolean force_grayscale;	/* if TRUE, convert color image to grayscale */
+  boolean crop;			/* if TRUE, crop source image */
+  boolean slow_hflip;  /* For best performance, the JXFORM_FLIP_H transform
+                          normally modifies the source coefficients in place.
+                          Setting this to TRUE will instead use a slower,
+                          double-buffered algorithm, which leaves the source
+                          coefficients in tact (necessary if other transformed
+                          images must be generated from the same set of
+                          coefficients. */
+
+  /* Crop parameters: application need not set these unless crop is TRUE.
+   * These can be filled in by jtransform_parse_crop_spec().
+   */
+  JDIMENSION crop_width;	/* Width of selected region */
+  JCROP_CODE crop_width_set;
+  JDIMENSION crop_height;	/* Height of selected region */
+  JCROP_CODE crop_height_set;
+  JDIMENSION crop_xoffset;	/* X offset of selected region */
+  JCROP_CODE crop_xoffset_set;	/* (negative measures from right edge) */
+  JDIMENSION crop_yoffset;	/* Y offset of selected region */
+  JCROP_CODE crop_yoffset_set;	/* (negative measures from bottom edge) */
+
+  /* Internal workspace: caller should not touch these */
+  int num_components;		/* # of components in workspace */
+  jvirt_barray_ptr * workspace_coef_arrays; /* workspace for transformations */
+  JDIMENSION output_width;	/* cropped destination dimensions */
+  JDIMENSION output_height;
+  JDIMENSION x_crop_offset;	/* destination crop offsets measured in iMCUs */
+  JDIMENSION y_crop_offset;
+  int iMCU_sample_width;	/* destination iMCU size */
+  int iMCU_sample_height;
+} jpeg_transform_info;
+
+
+#if TRANSFORMS_SUPPORTED
+
+/* Parse a crop specification (written in X11 geometry style) */
+EXTERN(boolean) jtransform_parse_crop_spec
+	JPP((jpeg_transform_info *info, const char *spec));
+/* Request any required workspace */
+EXTERN(boolean) jtransform_request_workspace
+	JPP((j_decompress_ptr srcinfo, jpeg_transform_info *info));
+/* Adjust output image parameters */
+EXTERN(jvirt_barray_ptr *) jtransform_adjust_parameters
+	JPP((j_decompress_ptr srcinfo, j_compress_ptr dstinfo,
+	     jvirt_barray_ptr *src_coef_arrays,
+	     jpeg_transform_info *info));
+/* Execute the actual transformation, if any */
+EXTERN(void) jtransform_execute_transform
+	JPP((j_decompress_ptr srcinfo, j_compress_ptr dstinfo,
+	     jvirt_barray_ptr *src_coef_arrays,
+	     jpeg_transform_info *info));
+/* Determine whether lossless transformation is perfectly
+ * possible for a specified image and transformation.
+ */
+EXTERN(boolean) jtransform_perfect_transform
+	JPP((JDIMENSION image_width, JDIMENSION image_height,
+	     int MCU_width, int MCU_height,
+	     JXFORM_CODE transform));
+
+/* jtransform_execute_transform used to be called
+ * jtransform_execute_transformation, but some compilers complain about
+ * routine names that long.  This macro is here to avoid breaking any
+ * old source code that uses the original name...
+ */
+#define jtransform_execute_transformation	jtransform_execute_transform
+
+#endif /* TRANSFORMS_SUPPORTED */
+
+
+/*
+ * Support for copying optional markers from source to destination file.
+ */
+
+typedef enum {
+	JCOPYOPT_NONE,		/* copy no optional markers */
+	JCOPYOPT_COMMENTS,	/* copy only comment (COM) markers */
+	JCOPYOPT_ALL		/* copy all optional markers */
+} JCOPY_OPTION;
+
+#define JCOPYOPT_DEFAULT  JCOPYOPT_COMMENTS	/* recommended default */
+
+/* Setup decompression object to save desired markers in memory */
+EXTERN(void) jcopy_markers_setup
+	JPP((j_decompress_ptr srcinfo, JCOPY_OPTION option));
+/* Copy markers saved in the given source object to the destination object */
+EXTERN(void) jcopy_markers_execute
+	JPP((j_decompress_ptr srcinfo, j_compress_ptr dstinfo,
+	     JCOPY_OPTION option));

+ 897 - 0
camerakit/src/main/cpp/libjpeg/include/turbojpeg.h

@@ -0,0 +1,897 @@
+/*
+ * Copyright (C)2009-2011 D. R. Commander.  All Rights Reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright notice,
+ *   this list of conditions and the following disclaimer.
+ * - Redistributions in binary form must reproduce the above copyright notice,
+ *   this list of conditions and the following disclaimer in the documentation
+ *   and/or other materials provided with the distribution.
+ * - Neither the name of the libjpeg-turbo Project nor the names of its
+ *   contributors may be used to endorse or promote products derived from this
+ *   software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS",
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef __TURBOJPEG_H__
+#define __TURBOJPEG_H__
+
+#if defined(_WIN32) && defined(DLLDEFINE)
+#define DLLEXPORT __declspec(dllexport)
+#else
+#define DLLEXPORT
+#endif
+#define DLLCALL
+
+
+/**
+ * @addtogroup TurboJPEG
+ * TurboJPEG API.  This API provides an interface for generating, decoding, and
+ * transforming planar YUV and JPEG images in memory.
+ *
+ * @{
+ */
+
+
+/**
+ * The number of chrominance subsampling options
+ */
+#define TJ_NUMSAMP 5
+
+/**
+ * Chrominance subsampling options.
+ * When an image is converted from the RGB to the YCbCr colorspace as part of
+ * the JPEG compression process, some of the Cb and Cr (chrominance) components
+ * can be discarded or averaged together to produce a smaller image with little
+ * perceptible loss of image clarity (the human eye is more sensitive to small
+ * changes in brightness than small changes in color.)  This is called
+ * "chrominance subsampling".
+ */
+enum TJSAMP
+{
+  /**
+   * 4:4:4 chrominance subsampling (no chrominance subsampling).  The JPEG or
+   * YUV image will contain one chrominance component for every pixel in the
+   * source image.
+   */
+  TJSAMP_444=0,
+  /**
+   * 4:2:2 chrominance subsampling.  The JPEG or YUV image will contain one
+   * chrominance component for every 2x1 block of pixels in the source image.
+   */
+  TJSAMP_422,
+  /**
+   * 4:2:0 chrominance subsampling.  The JPEG or YUV image will contain one
+   * chrominance component for every 2x2 block of pixels in the source image.
+   */
+  TJSAMP_420,
+  /**
+   * Grayscale.  The JPEG or YUV image will contain no chrominance components.
+   */
+  TJSAMP_GRAY,
+  /**
+   * 4:4:0 chrominance subsampling.  The JPEG or YUV image will contain one
+   * chrominance component for every 1x2 block of pixels in the source image.
+   */
+  TJSAMP_440
+};
+
+/**
+ * MCU block width (in pixels) for a given level of chrominance subsampling.
+ * MCU block sizes:
+ * - 8x8 for no subsampling or grayscale
+ * - 16x8 for 4:2:2
+ * - 8x16 for 4:4:0
+ * - 16x16 for 4:2:0 
+ */
+static const int tjMCUWidth[TJ_NUMSAMP]  = {8, 16, 16, 8, 8};
+
+/**
+ * MCU block height (in pixels) for a given level of chrominance subsampling.
+ * MCU block sizes:
+ * - 8x8 for no subsampling or grayscale
+ * - 16x8 for 4:2:2
+ * - 8x16 for 4:4:0
+ * - 16x16 for 4:2:0 
+ */
+static const int tjMCUHeight[TJ_NUMSAMP] = {8, 8, 16, 8, 16};
+
+
+/**
+ * The number of pixel formats
+ */
+#define TJ_NUMPF 11
+
+/**
+ * Pixel formats
+ */
+enum TJPF
+{
+  /**
+   * RGB pixel format.  The red, green, and blue components in the image are
+   * stored in 3-byte pixels in the order R, G, B from lowest to highest byte
+   * address within each pixel.
+   */
+  TJPF_RGB=0,
+  /**
+   * BGR pixel format.  The red, green, and blue components in the image are
+   * stored in 3-byte pixels in the order B, G, R from lowest to highest byte
+   * address within each pixel.
+   */
+  TJPF_BGR,
+  /**
+   * RGBX pixel format.  The red, green, and blue components in the image are
+   * stored in 4-byte pixels in the order R, G, B from lowest to highest byte
+   * address within each pixel.  The X component is ignored when compressing
+   * and undefined when decompressing.
+   */
+  TJPF_RGBX,
+  /**
+   * BGRX pixel format.  The red, green, and blue components in the image are
+   * stored in 4-byte pixels in the order B, G, R from lowest to highest byte
+   * address within each pixel.  The X component is ignored when compressing
+   * and undefined when decompressing.
+   */
+  TJPF_BGRX,
+  /**
+   * XBGR pixel format.  The red, green, and blue components in the image are
+   * stored in 4-byte pixels in the order R, G, B from highest to lowest byte
+   * address within each pixel.  The X component is ignored when compressing
+   * and undefined when decompressing.
+   */
+  TJPF_XBGR,
+  /**
+   * XRGB pixel format.  The red, green, and blue components in the image are
+   * stored in 4-byte pixels in the order B, G, R from highest to lowest byte
+   * address within each pixel.  The X component is ignored when compressing
+   * and undefined when decompressing.
+   */
+  TJPF_XRGB,
+  /**
+   * Grayscale pixel format.  Each 1-byte pixel represents a luminance
+   * (brightness) level from 0 to 255.
+   */
+  TJPF_GRAY,
+  /**
+   * RGBA pixel format.  This is the same as @ref TJPF_RGBX, except that when
+   * decompressing, the X component is guaranteed to be 0xFF, which can be
+   * interpreted as an opaque alpha channel.
+   */
+  TJPF_RGBA,
+  /**
+   * BGRA pixel format.  This is the same as @ref TJPF_BGRX, except that when
+   * decompressing, the X component is guaranteed to be 0xFF, which can be
+   * interpreted as an opaque alpha channel.
+   */
+  TJPF_BGRA,
+  /**
+   * ABGR pixel format.  This is the same as @ref TJPF_XBGR, except that when
+   * decompressing, the X component is guaranteed to be 0xFF, which can be
+   * interpreted as an opaque alpha channel.
+   */
+  TJPF_ABGR,
+  /**
+   * ARGB pixel format.  This is the same as @ref TJPF_XRGB, except that when
+   * decompressing, the X component is guaranteed to be 0xFF, which can be
+   * interpreted as an opaque alpha channel.
+   */
+  TJPF_ARGB
+};
+
+/**
+ * Red offset (in bytes) for a given pixel format.  This specifies the number
+ * of bytes that the red component is offset from the start of the pixel.  For
+ * instance, if a pixel of format TJ_BGRX is stored in <tt>char pixel[]</tt>,
+ * then the red component will be <tt>pixel[tjRedOffset[TJ_BGRX]]</tt>.
+ */
+static const int tjRedOffset[TJ_NUMPF] = {0, 2, 0, 2, 3, 1, 0, 0, 2, 3, 1};
+/**
+ * Green offset (in bytes) for a given pixel format.  This specifies the number
+ * of bytes that the green component is offset from the start of the pixel.
+ * For instance, if a pixel of format TJ_BGRX is stored in
+ * <tt>char pixel[]</tt>, then the green component will be
+ * <tt>pixel[tjGreenOffset[TJ_BGRX]]</tt>.
+ */
+static const int tjGreenOffset[TJ_NUMPF] = {1, 1, 1, 1, 2, 2, 0, 1, 1, 2, 2};
+/**
+ * Blue offset (in bytes) for a given pixel format.  This specifies the number
+ * of bytes that the Blue component is offset from the start of the pixel.  For
+ * instance, if a pixel of format TJ_BGRX is stored in <tt>char pixel[]</tt>,
+ * then the blue component will be <tt>pixel[tjBlueOffset[TJ_BGRX]]</tt>.
+ */
+static const int tjBlueOffset[TJ_NUMPF] = {2, 0, 2, 0, 1, 3, 0, 2, 0, 1, 3};
+
+/**
+ * Pixel size (in bytes) for a given pixel format.
+ */
+static const int tjPixelSize[TJ_NUMPF] = {3, 3, 4, 4, 4, 4, 1, 4, 4, 4, 4};
+
+
+/**
+ * The uncompressed source/destination image is stored in bottom-up (Windows,
+ * OpenGL) order, not top-down (X11) order.
+ */
+#define TJFLAG_BOTTOMUP        2
+/**
+ * Turn off CPU auto-detection and force TurboJPEG to use MMX code (IPP and
+ * 32-bit libjpeg-turbo versions only.)
+ */
+#define TJFLAG_FORCEMMX        8
+/**
+ * Turn off CPU auto-detection and force TurboJPEG to use SSE code (32-bit IPP
+ * and 32-bit libjpeg-turbo versions only)
+ */
+#define TJFLAG_FORCESSE       16
+/**
+ * Turn off CPU auto-detection and force TurboJPEG to use SSE2 code (32-bit IPP
+ * and 32-bit libjpeg-turbo versions only)
+ */
+#define TJFLAG_FORCESSE2      32
+/**
+ * Turn off CPU auto-detection and force TurboJPEG to use SSE3 code (64-bit IPP
+ * version only)
+ */
+#define TJFLAG_FORCESSE3     128
+/**
+ * Use fast, inaccurate chrominance upsampling routines in the JPEG
+ * decompressor (libjpeg and libjpeg-turbo versions only)
+ */
+#define TJFLAG_FASTUPSAMPLE  256
+/**
+ * Disable buffer (re)allocation.  If passed to #tjCompress2() or
+ * #tjTransform(), this flag will cause those functions to generate an error if
+ * the JPEG image buffer is invalid or too small rather than attempting to
+ * allocate or reallocate that buffer.  This reproduces the behavior of earlier
+ * versions of TurboJPEG.
+ */
+#define TJFLAG_NOREALLOC     1024
+
+
+/**
+ * Number of transform operations
+ */
+#define TJ_NUMXOP 8
+
+/**
+ * Transform operations for #tjTransform()
+ */
+enum TJXOP
+{
+  /**
+   * Do not transform the position of the image pixels
+   */
+  TJXOP_NONE=0,
+  /**
+   * Flip (mirror) image horizontally.  This transform is imperfect if there
+   * are any partial MCU blocks on the right edge (see #TJXOPT_PERFECT.)
+   */
+  TJXOP_HFLIP,
+  /**
+   * Flip (mirror) image vertically.  This transform is imperfect if there are
+   * any partial MCU blocks on the bottom edge (see #TJXOPT_PERFECT.)
+   */
+  TJXOP_VFLIP,
+  /**
+   * Transpose image (flip/mirror along upper left to lower right axis.)  This
+   * transform is always perfect.
+   */
+  TJXOP_TRANSPOSE,
+  /**
+   * Transverse transpose image (flip/mirror along upper right to lower left
+   * axis.)  This transform is imperfect if there are any partial MCU blocks in
+   * the image (see #TJXOPT_PERFECT.)
+   */
+  TJXOP_TRANSVERSE,
+  /**
+   * Rotate image clockwise by 90 degrees.  This transform is imperfect if
+   * there are any partial MCU blocks on the bottom edge (see
+   * #TJXOPT_PERFECT.)
+   */
+  TJXOP_ROT90,
+  /**
+   * Rotate image 180 degrees.  This transform is imperfect if there are any
+   * partial MCU blocks in the image (see #TJXOPT_PERFECT.)
+   */
+  TJXOP_ROT180,
+  /**
+   * Rotate image counter-clockwise by 90 degrees.  This transform is imperfect
+   * if there are any partial MCU blocks on the right edge (see
+   * #TJXOPT_PERFECT.)
+   */
+  TJXOP_ROT270
+};
+
+
+/**
+ * This option will cause #tjTransform() to return an error if the transform is
+ * not perfect.  Lossless transforms operate on MCU blocks, whose size depends
+ * on the level of chrominance subsampling used (see #tjMCUWidth
+ * and #tjMCUHeight.)  If the image's width or height is not evenly divisible
+ * by the MCU block size, then there will be partial MCU blocks on the right
+ * and/or bottom edges.  It is not possible to move these partial MCU blocks to
+ * the top or left of the image, so any transform that would require that is
+ * "imperfect."  If this option is not specified, then any partial MCU blocks
+ * that cannot be transformed will be left in place, which will create
+ * odd-looking strips on the right or bottom edge of the image.
+ */
+#define TJXOPT_PERFECT  1
+/**
+ * This option will cause #tjTransform() to discard any partial MCU blocks that
+ * cannot be transformed.
+ */
+#define TJXOPT_TRIM     2
+/**
+ * This option will enable lossless cropping.  See #tjTransform() for more
+ * information.
+ */
+#define TJXOPT_CROP     4
+/**
+ * This option will discard the color data in the input image and produce
+ * a grayscale output image.
+ */
+#define TJXOPT_GRAY     8
+/**
+ * This option will prevent #tjTransform() from outputting a JPEG image for
+ * this particular transform (this can be used in conjunction with a custom
+ * filter to capture the transformed DCT coefficients without transcoding
+ * them.)
+ */
+#define TJXOPT_NOOUTPUT 16
+
+
+/**
+ * Scaling factor
+ */
+typedef struct
+{
+  /**
+   * Numerator
+   */
+  int num;
+  /**
+   * Denominator
+   */
+  int denom;
+} tjscalingfactor;
+
+/**
+ * Cropping region
+ */
+typedef struct
+{
+  /**
+   * The left boundary of the cropping region.  This must be evenly divisible
+   * by the MCU block width (see #tjMCUWidth.)
+   */
+  int x;
+  /**
+   * The upper boundary of the cropping region.  This must be evenly divisible
+   * by the MCU block height (see #tjMCUHeight.)
+   */
+  int y;
+  /**
+   * The width of the cropping region. Setting this to 0 is the equivalent of
+   * setting it to the width of the source JPEG image - x.
+   */
+  int w;
+  /**
+   * The height of the cropping region. Setting this to 0 is the equivalent of
+   * setting it to the height of the source JPEG image - y.
+   */
+  int h;
+} tjregion;
+
+/**
+ * Lossless transform
+ */
+typedef struct tjtransform
+{
+  /**
+   * Cropping region
+   */
+  tjregion r;
+  /**
+   * One of the @ref TJXOP "transform operations"
+   */
+  int op;
+  /**
+   * The bitwise OR of one of more of the @ref TJXOPT_CROP "transform options"
+   */
+  int options;
+  /**
+   * Arbitrary data that can be accessed within the body of the callback
+   * function
+   */
+  void *data;
+  /**
+   * A callback function that can be used to modify the DCT coefficients
+   * after they are losslessly transformed but before they are transcoded to a
+   * new JPEG file.  This allows for custom filters or other transformations to
+   * be applied in the frequency domain.
+   *
+   * @param coeffs pointer to an array of transformed DCT coefficients.  (NOTE:
+   *        this pointer is not guaranteed to be valid once the callback
+   *        returns, so applications wishing to hand off the DCT coefficients
+   *        to another function or library should make a copy of them within
+   *        the body of the callback.)
+   * @param arrayRegion #tjregion structure containing the width and height of
+   *        the array pointed to by <tt>coeffs</tt> as well as its offset
+   *        relative to the component plane.  TurboJPEG implementations may
+   *        choose to split each component plane into multiple DCT coefficient
+   *        arrays and call the callback function once for each array.
+   * @param planeRegion #tjregion structure containing the width and height of
+   *        the component plane to which <tt>coeffs</tt> belongs
+   * @param componentID ID number of the component plane to which
+   *        <tt>coeffs</tt> belongs (Y, Cb, and Cr have, respectively, ID's of
+   *        0, 1, and 2 in typical JPEG images.)
+   * @param transformID ID number of the transformed image to which
+   *        <tt>coeffs</tt> belongs.  This is the same as the index of the
+   *        transform in the transforms array that was passed to
+   *        #tjTransform().
+   * @param transform a pointer to a #tjtransform structure that specifies the
+   *        parameters and/or cropping region for this transform
+   *
+   * @return 0 if the callback was successful, or -1 if an error occurred.
+   */
+  int (*customFilter)(short *coeffs, tjregion arrayRegion,
+    tjregion planeRegion, int componentIndex, int transformIndex,
+    struct tjtransform *transform);
+} tjtransform;
+
+/**
+ * TurboJPEG instance handle
+ */
+typedef void* tjhandle;
+
+
+/**
+ * Pad the given width to the nearest 32-bit boundary
+ */
+#define TJPAD(width) (((width)+3)&(~3))
+
+/**
+ * Compute the scaled value of <tt>dimension</tt> using the given scaling
+ * factor.  This macro performs the integer equivalent of <tt>ceil(dimension *
+ * scalingFactor)</tt>. 
+ */
+#define TJSCALED(dimension, scalingFactor) ((dimension * scalingFactor.num \
+  + scalingFactor.denom - 1) / scalingFactor.denom)
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/**
+ * Create a TurboJPEG compressor instance.
+ *
+ * @return a handle to the newly-created instance, or NULL if an error
+ * occurred (see #tjGetErrorStr().)
+ */
+DLLEXPORT tjhandle DLLCALL tjInitCompress(void);
+
+
+/**
+ * Compress an RGB or grayscale image into a JPEG image.
+ *
+ * @param handle a handle to a TurboJPEG compressor or transformer instance
+ * @param srcBuf pointer to an image buffer containing RGB or grayscale pixels
+ *        to be compressed
+ * @param width width (in pixels) of the source image
+ * @param pitch bytes per line of the source image.  Normally, this should be
+ *        <tt>width * #tjPixelSize[pixelFormat]</tt> if the image is unpadded,
+ *        or <tt>#TJPAD(width * #tjPixelSize[pixelFormat])</tt> if each line of
+ *        the image is padded to the nearest 32-bit boundary, as is the case
+ *        for Windows bitmaps.  You can also be clever and use this parameter
+ *        to skip lines, etc.  Setting this parameter to 0 is the equivalent of
+ *        setting it to <tt>width * #tjPixelSize[pixelFormat]</tt>.
+ * @param height height (in pixels) of the source image
+ * @param pixelFormat pixel format of the source image (see @ref TJPF
+ *        "Pixel formats".)
+ * @param jpegBuf address of a pointer to an image buffer that will receive the
+ *        JPEG image.  TurboJPEG has the ability to reallocate the JPEG buffer
+ *        to accommodate the size of the JPEG image.  Thus, you can choose to:
+ *        -# pre-allocate the JPEG buffer with an arbitrary size using
+ *        #tjAlloc() and let TurboJPEG grow the buffer as needed,
+ *        -# set <tt>*jpegBuf</tt> to NULL to tell TurboJPEG to allocate the
+ *        buffer for you, or
+ *        -# pre-allocate the buffer to a "worst case" size determined by
+ *        calling #tjBufSize().  This should ensure that the buffer never has
+ *        to be re-allocated (setting #TJFLAG_NOREALLOC guarantees this.)
+ *        .
+ *        If you choose option 1, <tt>*jpegSize</tt> should be set to the
+ *        size of your pre-allocated buffer.  In any case, unless you have
+ *        set #TJFLAG_NOREALLOC, you should always check <tt>*jpegBuf</tt> upon
+ *        return from this function, as it may have changed.
+ * @param jpegSize pointer to an unsigned long variable that holds the size of
+ *        the JPEG image buffer.  If <tt>*jpegBuf</tt> points to a
+ *        pre-allocated buffer, then <tt>*jpegSize</tt> should be set to the
+ *        size of the buffer.  Upon return, <tt>*jpegSize</tt> will contain the
+ *        size of the JPEG image (in bytes.)
+ * @param jpegSubsamp the level of chrominance subsampling to be used when
+ *        generating the JPEG image (see @ref TJSAMP
+ *        "Chrominance subsampling options".)
+ * @param jpegQual the image quality of the generated JPEG image (1 = worst,
+          100 = best)
+ * @param flags the bitwise OR of one or more of the @ref TJFLAG_BOTTOMUP
+ *        "flags".
+ *
+ * @return 0 if successful, or -1 if an error occurred (see #tjGetErrorStr().)
+*/
+DLLEXPORT int DLLCALL tjCompress2(tjhandle handle, unsigned char *srcBuf,
+  int width, int pitch, int height, int pixelFormat, unsigned char **jpegBuf,
+  unsigned long *jpegSize, int jpegSubsamp, int jpegQual, int flags);
+
+
+/**
+ * The maximum size of the buffer (in bytes) required to hold a JPEG image with
+ * the given parameters.  The number of bytes returned by this function is
+ * larger than the size of the uncompressed source image.  The reason for this
+ * is that the JPEG format uses 16-bit coefficients, and it is thus possible
+ * for a very high-quality JPEG image with very high frequency content to
+ * expand rather than compress when converted to the JPEG format.  Such images
+ * represent a very rare corner case, but since there is no way to predict the
+ * size of a JPEG image prior to compression, the corner case has to be
+ * handled.
+ *
+ * @param width width of the image (in pixels)
+ * @param height height of the image (in pixels)
+ * @param jpegSubsamp the level of chrominance subsampling to be used when
+ *        generating the JPEG image (see @ref TJSAMP
+ *        "Chrominance subsampling options".)
+ *
+ * @return the maximum size of the buffer (in bytes) required to hold the
+ * image, or -1 if the arguments are out of bounds.
+ */
+DLLEXPORT unsigned long DLLCALL tjBufSize(int width, int height,
+  int jpegSubsamp);
+
+
+/**
+ * The size of the buffer (in bytes) required to hold a YUV planar image with
+ * the given parameters.
+ *
+ * @param width width of the image (in pixels)
+ * @param height height of the image (in pixels)
+ * @param subsamp level of chrominance subsampling in the image (see
+ *        @ref TJSAMP "Chrominance subsampling options".)
+ *
+ * @return the size of the buffer (in bytes) required to hold the image, or
+ * -1 if the arguments are out of bounds.
+ */
+DLLEXPORT unsigned long DLLCALL tjBufSizeYUV(int width, int height,
+  int subsamp);
+
+
+/**
+ * Encode an RGB or grayscale image into a YUV planar image.  This function
+ * uses the accelerated color conversion routines in TurboJPEG's underlying
+ * codec to produce a planar YUV image that is suitable for X Video.
+ * Specifically, if the chrominance components are subsampled along the
+ * horizontal dimension, then the width of the luminance plane is padded to 2
+ * in the output image (same goes for the height of the luminance plane, if the
+ * chrominance components are subsampled along the vertical dimension.)  Also,
+ * each line of each plane in the output image is padded to 4 bytes.  Although
+ * this will work with any subsampling option, it is really only useful in
+ * combination with TJ_420, which produces an image compatible with the I420
+ * (AKA "YUV420P") format.
+ *
+ * @param handle a handle to a TurboJPEG compressor or transformer instance
+ * @param srcBuf pointer to an image buffer containing RGB or grayscale pixels
+ *        to be encoded
+ * @param width width (in pixels) of the source image
+ * @param pitch bytes per line of the source image.  Normally, this should be
+ *        <tt>width * #tjPixelSize[pixelFormat]</tt> if the image is unpadded,
+ *        or <tt>#TJPAD(width * #tjPixelSize[pixelFormat])</tt> if each line of
+ *        the image is padded to the nearest 32-bit boundary, as is the case
+ *        for Windows bitmaps.  You can also be clever and use this parameter
+ *        to skip lines, etc.  Setting this parameter to 0 is the equivalent of
+ *        setting it to <tt>width * #tjPixelSize[pixelFormat]</tt>.
+ * @param height height (in pixels) of the source image
+ * @param pixelFormat pixel format of the source image (see @ref TJPF
+ *        "Pixel formats".)
+ * @param dstBuf pointer to an image buffer that will receive the YUV image.
+ *        Use #tjBufSizeYUV() to determine the appropriate size for this buffer
+ *        based on the image width, height, and level of chrominance
+ *        subsampling.
+ * @param subsamp the level of chrominance subsampling to be used when
+ *        generating the YUV image (see @ref TJSAMP
+ *        "Chrominance subsampling options".)
+ * @param flags the bitwise OR of one or more of the @ref TJFLAG_BOTTOMUP
+ *        "flags".
+ *
+ * @return 0 if successful, or -1 if an error occurred (see #tjGetErrorStr().)
+*/
+DLLEXPORT int DLLCALL tjEncodeYUV2(tjhandle handle,
+  unsigned char *srcBuf, int width, int pitch, int height, int pixelFormat,
+  unsigned char *dstBuf, int subsamp, int flags);
+
+
+/**
+ * Create a TurboJPEG decompressor instance.
+ *
+ * @return a handle to the newly-created instance, or NULL if an error
+ * occurred (see #tjGetErrorStr().)
+*/
+DLLEXPORT tjhandle DLLCALL tjInitDecompress(void);
+
+
+/**
+ * Retrieve information about a JPEG image without decompressing it.
+ *
+ * @param handle a handle to a TurboJPEG decompressor or transformer instance
+ * @param jpegBuf pointer to a buffer containing a JPEG image
+ * @param jpegSize size of the JPEG image (in bytes)
+ * @param width pointer to an integer variable that will receive the width (in
+ *        pixels) of the JPEG image
+ * @param height pointer to an integer variable that will receive the height
+ *        (in pixels) of the JPEG image
+ * @param jpegSubsamp pointer to an integer variable that will receive the
+ *        level of chrominance subsampling used when compressing the JPEG image
+ *        (see @ref TJSAMP "Chrominance subsampling options".)
+ *
+ * @return 0 if successful, or -1 if an error occurred (see #tjGetErrorStr().)
+*/
+DLLEXPORT int DLLCALL tjDecompressHeader2(tjhandle handle,
+  unsigned char *jpegBuf, unsigned long jpegSize, int *width, int *height,
+  int *jpegSubsamp);
+
+
+/**
+ * Returns a list of fractional scaling factors that the JPEG decompressor in
+ * this implementation of TurboJPEG supports.
+ *
+ * @param numscalingfactors pointer to an integer variable that will receive
+ *        the number of elements in the list
+ *
+ * @return a pointer to a list of fractional scaling factors, or NULL if an
+ * error is encountered (see #tjGetErrorStr().)
+*/
+DLLEXPORT tjscalingfactor* DLLCALL tjGetScalingFactors(int *numscalingfactors);
+
+
+/**
+ * Decompress a JPEG image to an RGB or grayscale image.
+ *
+ * @param handle a handle to a TurboJPEG decompressor or transformer instance
+ * @param jpegBuf pointer to a buffer containing the JPEG image to decompress
+ * @param jpegSize size of the JPEG image (in bytes)
+ * @param dstBuf pointer to an image buffer that will receive the decompressed
+ *        image.  This buffer should normally be <tt>pitch * scaledHeight</tt>
+ *        bytes in size, where <tt>scaledHeight</tt> can be determined by
+ *        calling #TJSCALED() with the JPEG image height and one of the scaling
+ *        factors returned by #tjGetScalingFactors().  The dstBuf pointer may
+ *        also be used to decompress into a specific region of a larger buffer.
+ * @param width desired width (in pixels) of the destination image.  If this is
+ *        smaller than the width of the JPEG image being decompressed, then
+ *        TurboJPEG will use scaling in the JPEG decompressor to generate the
+ *        largest possible image that will fit within the desired width.  If
+ *        width is set to 0, then only the height will be considered when
+ *        determining the scaled image size.
+ * @param pitch bytes per line of the destination image.  Normally, this is
+ *        <tt>scaledWidth * #tjPixelSize[pixelFormat]</tt> if the decompressed
+ *        image is unpadded, else <tt>#TJPAD(scaledWidth *
+ *        #tjPixelSize[pixelFormat])</tt> if each line of the decompressed
+ *        image is padded to the nearest 32-bit boundary, as is the case for
+ *        Windows bitmaps.  (NOTE: <tt>scaledWidth</tt> can be determined by
+ *        calling #TJSCALED() with the JPEG image width and one of the scaling
+ *        factors returned by #tjGetScalingFactors().)  You can also be clever
+ *        and use the pitch parameter to skip lines, etc.  Setting this
+ *        parameter to 0 is the equivalent of setting it to <tt>scaledWidth
+ *        * #tjPixelSize[pixelFormat]</tt>.
+ * @param height desired height (in pixels) of the destination image.  If this
+ *        is smaller than the height of the JPEG image being decompressed, then
+ *        TurboJPEG will use scaling in the JPEG decompressor to generate the
+ *        largest possible image that will fit within the desired height.  If
+ *        height is set to 0, then only the width will be considered when
+ *        determining the scaled image size.
+ * @param pixelFormat pixel format of the destination image (see @ref
+ *        TJPF "Pixel formats".)
+ * @param flags the bitwise OR of one or more of the @ref TJFLAG_BOTTOMUP
+ *        "flags".
+ *
+ * @return 0 if successful, or -1 if an error occurred (see #tjGetErrorStr().)
+ */
+DLLEXPORT int DLLCALL tjDecompress2(tjhandle handle,
+  unsigned char *jpegBuf, unsigned long jpegSize, unsigned char *dstBuf,
+  int width, int pitch, int height, int pixelFormat, int flags);
+
+
+/**
+ * Decompress a JPEG image to a YUV planar image.  This function performs JPEG
+ * decompression but leaves out the color conversion step, so a planar YUV
+ * image is generated instead of an RGB image.  The padding of the planes in
+ * this image is the same as the images generated by #tjEncodeYUV2().  Note
+ * that, if the width or height of the image is not an even multiple of the MCU
+ * block size (see #tjMCUWidth and #tjMCUHeight), then an intermediate buffer
+ * copy will be performed within TurboJPEG.
+ *
+ * @param handle a handle to a TurboJPEG decompressor or transformer instance
+ * @param jpegBuf pointer to a buffer containing the JPEG image to decompress
+ * @param jpegSize size of the JPEG image (in bytes)
+ * @param dstBuf pointer to an image buffer that will receive the YUV image.
+ *        Use #tjBufSizeYUV to determine the appropriate size for this buffer
+ *        based on the image width, height, and level of subsampling.
+ * @param flags the bitwise OR of one or more of the @ref TJFLAG_BOTTOMUP
+ *        "flags".
+ *
+ * @return 0 if successful, or -1 if an error occurred (see #tjGetErrorStr().)
+ */
+DLLEXPORT int DLLCALL tjDecompressToYUV(tjhandle handle,
+  unsigned char *jpegBuf, unsigned long jpegSize, unsigned char *dstBuf,
+  int flags);
+
+
+/**
+ * Create a new TurboJPEG transformer instance.
+ *
+ * @return a handle to the newly-created instance, or NULL if an error
+ * occurred (see #tjGetErrorStr().)
+ */
+DLLEXPORT tjhandle DLLCALL tjInitTransform(void);
+
+
+/**
+ * Losslessly transform a JPEG image into another JPEG image.  Lossless
+ * transforms work by moving the raw coefficients from one JPEG image structure
+ * to another without altering the values of the coefficients.  While this is
+ * typically faster than decompressing the image, transforming it, and
+ * re-compressing it, lossless transforms are not free.  Each lossless
+ * transform requires reading and Huffman decoding all of the coefficients in
+ * the source image, regardless of the size of the destination image.  Thus,
+ * this function provides a means of generating multiple transformed images
+ * from the same source or of applying multiple transformations simultaneously,
+ * in order to eliminate the need to read the source coefficients multiple
+ * times.
+ *
+ * @param handle a handle to a TurboJPEG transformer instance
+ * @param jpegBuf pointer to a buffer containing the JPEG image to transform
+ * @param jpegSize size of the JPEG image (in bytes)
+ * @param n the number of transformed JPEG images to generate
+ * @param dstBufs pointer to an array of n image buffers.  <tt>dstBufs[i]</tt>
+ *        will receive a JPEG image that has been transformed using the
+ *        parameters in <tt>transforms[i]</tt>.  TurboJPEG has the ability to
+ *        reallocate the JPEG buffer to accommodate the size of the JPEG image.
+ *        Thus, you can choose to:
+ *        -# pre-allocate the JPEG buffer with an arbitrary size using
+ *        #tjAlloc() and let TurboJPEG grow the buffer as needed,
+ *        -# set <tt>dstBufs[i]</tt> to NULL to tell TurboJPEG to allocate the
+ *        buffer for you, or
+ *        -# pre-allocate the buffer to a "worst case" size determined by
+ *        calling #tjBufSize() with the cropped width and height.  This should
+ *        ensure that the buffer never has to be re-allocated (setting
+ *        #TJFLAG_NOREALLOC guarantees this.)
+ *        .
+ *        If you choose option 1, <tt>dstSizes[i]</tt> should be set to
+ *        the size of your pre-allocated buffer.  In any case, unless you have
+ *        set #TJFLAG_NOREALLOC, you should always check <tt>dstBufs[i]</tt>
+ *        upon return from this function, as it may have changed.
+ * @param dstSizes pointer to an array of n unsigned long variables that will
+ *        receive the actual sizes (in bytes) of each transformed JPEG image.
+ *        If <tt>dstBufs[i]</tt> points to a pre-allocated buffer, then
+ *        <tt>dstSizes[i]</tt> should be set to the size of the buffer.  Upon
+ *        return, <tt>dstSizes[i]</tt> will contain the size of the JPEG image
+ *        (in bytes.)
+ * @param transforms pointer to an array of n tjtransform structures, each of
+ *        which specifies the transform parameters and/or cropping region for
+ *        the corresponding transformed output image.
+ * @param flags the bitwise OR of one or more of the @ref TJFLAG_BOTTOMUP
+ *        "flags".
+ *
+ * @return 0 if successful, or -1 if an error occurred (see #tjGetErrorStr().)
+ */
+DLLEXPORT int DLLCALL tjTransform(tjhandle handle, unsigned char *jpegBuf,
+  unsigned long jpegSize, int n, unsigned char **dstBufs,
+  unsigned long *dstSizes, tjtransform *transforms, int flags);
+
+
+/**
+ * Destroy a TurboJPEG compressor, decompressor, or transformer instance.
+ *
+ * @param handle a handle to a TurboJPEG compressor, decompressor or
+ *        transformer instance
+ *
+ * @return 0 if successful, or -1 if an error occurred (see #tjGetErrorStr().)
+ */
+DLLEXPORT int DLLCALL tjDestroy(tjhandle handle);
+
+
+/**
+ * Allocate an image buffer for use with TurboJPEG.  You should always use
+ * this function to allocate the JPEG destination buffer(s) for #tjCompress2()
+ * and #tjTransform() unless you are disabling automatic buffer
+ * (re)allocation (by setting #TJFLAG_NOREALLOC.)
+ *
+ * @param bytes the number of bytes to allocate
+ * 
+ * @return a pointer to a newly-allocated buffer with the specified number of
+ *         bytes
+ *
+ * @sa tjFree()
+ */
+DLLEXPORT unsigned char* DLLCALL tjAlloc(int bytes);
+
+
+/**
+ * Free an image buffer previously allocated by TurboJPEG.  You should always
+ * use this function to free JPEG destination buffer(s) that were automatically
+ * (re)allocated by #tjCompress2() or #tjTransform() or that were manually
+ * allocated using #tjAlloc().
+ *
+ * @param buffer address of the buffer to free
+ *
+ * @sa tjAlloc()
+ */
+DLLEXPORT void DLLCALL tjFree(unsigned char *buffer);
+
+
+/**
+ * Returns a descriptive error message explaining why the last command failed.
+ *
+ * @return a descriptive error message explaining why the last command failed.
+ */
+DLLEXPORT char* DLLCALL tjGetErrorStr(void);
+
+
+/* Backward compatibility functions and macros (nothing to see here) */
+#define NUMSUBOPT TJ_NUMSAMP
+#define TJ_444 TJSAMP_444
+#define TJ_422 TJSAMP_422
+#define TJ_420 TJSAMP_420
+#define TJ_411 TJSAMP_420
+#define TJ_GRAYSCALE TJSAMP_GRAY
+
+#define TJ_BGR 1
+#define TJ_BOTTOMUP TJFLAG_BOTTOMUP
+#define TJ_FORCEMMX TJFLAG_FORCEMMX
+#define TJ_FORCESSE TJFLAG_FORCESSE
+#define TJ_FORCESSE2 TJFLAG_FORCESSE2
+#define TJ_ALPHAFIRST 64
+#define TJ_FORCESSE3 TJFLAG_FORCESSE3
+#define TJ_FASTUPSAMPLE TJFLAG_FASTUPSAMPLE
+#define TJ_YUV 512
+
+DLLEXPORT unsigned long DLLCALL TJBUFSIZE(int width, int height);
+
+DLLEXPORT unsigned long DLLCALL TJBUFSIZEYUV(int width, int height,
+  int jpegSubsamp);
+
+DLLEXPORT int DLLCALL tjCompress(tjhandle handle, unsigned char *srcBuf,
+  int width, int pitch, int height, int pixelSize, unsigned char *dstBuf,
+  unsigned long *compressedSize, int jpegSubsamp, int jpegQual, int flags);
+
+DLLEXPORT int DLLCALL tjEncodeYUV(tjhandle handle,
+  unsigned char *srcBuf, int width, int pitch, int height, int pixelSize,
+  unsigned char *dstBuf, int subsamp, int flags);
+
+DLLEXPORT int DLLCALL tjDecompressHeader(tjhandle handle,
+  unsigned char *jpegBuf, unsigned long jpegSize, int *width, int *height);
+
+DLLEXPORT int DLLCALL tjDecompress(tjhandle handle,
+  unsigned char *jpegBuf, unsigned long jpegSize, unsigned char *dstBuf,
+  int width, int pitch, int height, int pixelSize, int flags);
+
+
+/**
+ * @}
+ */
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif

+ 25 - 0
camerakit/src/main/cpp/main.cpp

@@ -0,0 +1,25 @@
+/**
+ *
+ */
+
+#include <jni.h>
+
+namespace camerakit {
+  namespace jni {
+    extern int register_CameraSurfaceView(JNIEnv* env);
+    extern int register_CameraSurfaceTexture(JNIEnv* env);
+  }
+}
+
+using namespace camerakit;
+
+JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved) {
+    JNIEnv* env;
+    if (vm->GetEnv(reinterpret_cast<void**>(&env), JNI_VERSION_1_6) != JNI_OK) {
+        return -1;
+    }
+
+    jni::register_CameraSurfaceView(env);
+    jni::register_CameraSurfaceTexture(env);
+    return JNI_VERSION_1_6;
+}

+ 4 - 0
camerakit/src/main/java/com/wonderkiln/camerakit/CameraConfig.java

@@ -0,0 +1,4 @@
+package com.wonderkiln.camerakit;
+
+public class CameraConfig {
+}

+ 65 - 0
camerakit/src/main/java/com/wonderkiln/camerakit/CameraKit.java

@@ -0,0 +1,65 @@
+package com.wonderkiln.camerakit;
+
+import android.content.res.Resources;
+
+public class CameraKit {
+
+    public static class Internal {
+        public static final int screenWidth  = Resources.getSystem().getDisplayMetrics().widthPixels;
+        public static final int screenHeight = Resources.getSystem().getDisplayMetrics().heightPixels;
+    }
+
+    public static class Constants {
+
+        public static final int PERMISSION_REQUEST_CAMERA = 16;
+
+        public static final int FACING_BACK  = 0;
+        public static final int FACING_FRONT = 1;
+
+        public static final int FLASH_OFF   = 0;
+        public static final int FLASH_ON    = 1;
+        public static final int FLASH_AUTO  = 2;
+        public static final int FLASH_TORCH = 3;
+
+        public static final int FOCUS_OFF             = 0;
+        public static final int FOCUS_CONTINUOUS      = 1;
+        public static final int FOCUS_TAP             = 2;
+        public static final int FOCUS_TAP_WITH_MARKER = 3;
+
+        public static final int METHOD_STANDARD = 0;        public static final int METHOD_STILL    = 1;
+
+        public static final int PERMISSIONS_STRICT  = 0;
+        public static final int PERMISSIONS_LAZY    = 1;
+        public static final int PERMISSIONS_PICTURE = 2;
+
+        public static final int VIDEO_QUALITY_480P    = 0;
+        public static final int VIDEO_QUALITY_720P    = 1;
+        public static final int VIDEO_QUALITY_1080P   = 2;
+        public static final int VIDEO_QUALITY_2160P   = 3;
+        public static final int VIDEO_QUALITY_HIGHEST = 4;
+        public static final int VIDEO_QUALITY_LOWEST  = 5;
+        public static final int VIDEO_QUALITY_QVGA    = 6;
+
+    }
+
+    public static class Defaults {
+
+        static final int     DEFAULT_FACING        = Constants.FACING_BACK;
+        static final int     DEFAULT_FLASH         = Constants.FLASH_OFF;
+        static final int     DEFAULT_FOCUS         = Constants.FOCUS_CONTINUOUS;
+        static final boolean DEFAULT_PINCH_TO_ZOOM = true;
+        static final float   DEFAULT_ZOOM          = 1.f;
+        static final int     DEFAULT_METHOD        = Constants.METHOD_STANDARD;
+        static final int     DEFAULT_PERMISSIONS   = Constants.PERMISSIONS_STRICT;
+        static final int     DEFAULT_VIDEO_QUALITY = Constants.VIDEO_QUALITY_480P;
+
+        static final int     DEFAULT_JPEG_QUALITY                = 100;
+        static final int     DEFAULT_VIDEO_BIT_RATE              = 0;
+        static final int     DEFAULT_DEVICE_ORIENTATION          = 0;
+        static final boolean DEFAULT_CROP_OUTPUT                 = false;
+        static final boolean DEFAULT_DOUBLE_TAP_TO_TOGGLE_FACING = false;
+        static final boolean DEFAULT_ADJUST_VIEW_BOUNDS          = false;
+
+    }
+
+}

+ 4 - 0
camerakit/src/main/java/com/wonderkiln/camerakit/CameraKitController.java

@@ -0,0 +1,4 @@
+package com.wonderkiln.camerakit;
+
+public class CameraKitController {
+}

+ 5 - 0
camerakit/src/main/java/com/wonderkiln/camerakit/CameraKitHandler.java

@@ -0,0 +1,5 @@
+package com.wonderkiln.camerakit;
+
+
+public class CameraKitHandler {
+}

+ 11 - 0
camerakit/src/main/java/com/wonderkiln/camerakit/CameraProperties.java

@@ -0,0 +1,11 @@
+package com.wonderkiln.camerakit;
+
+public class CameraProperties {
+    public final float verticalViewingAngle;
+    public final float horizontalViewingAngle;
+
+    public CameraProperties(float verticalViewingAngle, float horizontalViewingAngle) {
+        this.verticalViewingAngle = verticalViewingAngle;
+        this.horizontalViewingAngle = horizontalViewingAngle;
+    }
+}

+ 577 - 0
camerakit/src/main/java/com/wonderkiln/camerakit/CameraView.java

@@ -0,0 +1,577 @@
+package com.wonderkiln.camerakit;
+
+import android.Manifest;
+import android.app.Activity;
+import android.content.Context;
+import android.content.ContextWrapper;
+import android.content.pm.PackageManager;
+import android.content.res.TypedArray;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.util.AttributeSet;
+import android.view.Display;
+import android.view.Surface;
+import android.view.WindowManager;
+
+import com.camerakit.R;
+import com.google.android.gms.common.ConnectionResult;
+import com.google.android.gms.common.GoogleApiAvailability;
+import com.google.android.gms.vision.text.TextRecognizer;
+import com.wonderkiln.camerakit.api16.Camera1;
+import com.wonderkiln.camerakit.base.CameraImpl;
+import com.wonderkiln.camerakit.base.CameraViewLayout;
+import com.wonderkiln.camerakit.base.PreviewImpl;
+import com.wonderkiln.camerakit.base.SurfaceViewPreview;
+import com.wonderkiln.camerakit.events.CameraKitEventCallback;
+import com.wonderkiln.camerakit.events.CameraKitEventListener;
+import com.wonderkiln.camerakit.events.CameraKitImage;
+import com.wonderkiln.camerakit.events.CameraKitVideo;
+import com.wonderkiln.camerakit.events.EventDispatcher;
+import com.wonderkiln.camerakit.types.CaptureMethod;
+import com.wonderkiln.camerakit.types.Facing;
+import com.wonderkiln.camerakit.types.Flash;
+import com.wonderkiln.camerakit.types.Focus;
+import com.wonderkiln.camerakit.types.Permissions;
+import com.wonderkiln.camerakit.types.VideoQuality;
+import com.wonderkiln.camerakit.utils.AspectRatio;
+import com.wonderkiln.camerakit.utils.DisplayOrientationDetector;
+import com.wonderkiln.camerakit.utils.PostProcessor;
+import com.wonderkiln.camerakit.utils.Size;
+import com.wonderkiln.camerakit.vision.CameraKitTextDetect;
+import com.wonderkiln.camerakit.vision.GooglePlayServicesUnavailableException;
+import com.wonderkiln.camerakit.vision.TextProcessor;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.core.app.ActivityCompat;
+import androidx.core.content.ContextCompat;
+import androidx.core.hardware.display.DisplayManagerCompat;
+import androidx.core.view.ViewCompat;
+
+import static com.wonderkiln.camerakit.CameraKit.Constants.FACING_BACK;
+import static com.wonderkiln.camerakit.CameraKit.Constants.FACING_FRONT;
+import static com.wonderkiln.camerakit.CameraKit.Constants.FLASH_AUTO;
+import static com.wonderkiln.camerakit.CameraKit.Constants.FLASH_OFF;
+import static com.wonderkiln.camerakit.CameraKit.Constants.FLASH_ON;
+import static com.wonderkiln.camerakit.CameraKit.Constants.FLASH_TORCH;
+import static com.wonderkiln.camerakit.CameraKit.Constants.PERMISSIONS_LAZY;
+import static com.wonderkiln.camerakit.CameraKit.Constants.PERMISSIONS_PICTURE;
+import static com.wonderkiln.camerakit.CameraKit.Constants.PERMISSIONS_STRICT;
+
+public class CameraView extends CameraViewLayout {
+
+    private static Handler sWorkerHandler;
+
+    static {
+        // Initialize a single worker thread. This can be static since only a single camera
+        // reference can exist at a time.
+        HandlerThread workerThread = new HandlerThread("CameraViewWorker");
+        workerThread.setDaemon(true);
+        workerThread.start();
+        sWorkerHandler = new Handler(workerThread.getLooper());
+    }
+
+    @Facing
+    private int mFacing;
+
+    @Flash
+    private int mFlash;
+
+    @Focus
+    private int mFocus;
+
+    @CaptureMethod
+    private int mMethod;
+
+    private boolean mPinchToZoom;
+
+    private float mZoom;
+
+    @Permissions
+    private int mPermissions;
+
+    @VideoQuality
+    private int     mVideoQuality;
+    private int     mJpegQuality;
+    private int     mVideoBitRate;
+    private int     mDeviceOrientation;
+    private boolean mLockVideoAspectRatio;
+    private boolean mCropOutput;
+    private boolean mDoubleTapToToggleFacing;
+
+    private boolean mAdjustViewBounds;
+
+    private DisplayOrientationDetector mDisplayOrientationDetector;
+    private CameraImpl                 mCameraImpl;
+
+    private PreviewImpl mPreviewImpl;
+
+    private boolean mIsStarted;
+
+    private EventDispatcher mEventDispatcher;
+
+    private FocusMarkerLayout focusMarkerLayout;
+
+    public CameraView(@NonNull Context context) {
+        this(context, null);
+    }
+
+    public CameraView(@NonNull Context context, @Nullable AttributeSet attrs) {
+        this(context, attrs, 0);
+    }
+
+    public CameraView(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
+        super(context, attrs, defStyleAttr);
+        if (attrs != null) {
+            TypedArray a = context.getTheme().obtainStyledAttributes(
+                    attrs,
+                    R.styleable.CameraView,
+                    0, 0);
+
+            try {
+                mFacing = a.getInteger(R.styleable.CameraView_ckFacing, CameraKit.Defaults.DEFAULT_FACING);
+                mFlash = a.getInteger(R.styleable.CameraView_ckFlash, CameraKit.Defaults.DEFAULT_FLASH);
+                mFocus = a.getInteger(R.styleable.CameraView_ckFocus, CameraKit.Defaults.DEFAULT_FOCUS);
+                mMethod = a.getInteger(R.styleable.CameraView_ckMethod, CameraKit.Defaults.DEFAULT_METHOD);
+                mPinchToZoom = a.getBoolean(R.styleable.CameraView_ckPinchToZoom, CameraKit.Defaults.DEFAULT_PINCH_TO_ZOOM);
+                mZoom = a.getFloat(R.styleable.CameraView_ckZoom, CameraKit.Defaults.DEFAULT_ZOOM);
+                mPermissions = a.getInteger(R.styleable.CameraView_ckPermissions, CameraKit.Defaults.DEFAULT_PERMISSIONS);
+                mVideoQuality = a.getInteger(R.styleable.CameraView_ckVideoQuality, CameraKit.Defaults.DEFAULT_VIDEO_QUALITY);
+                mJpegQuality = a.getInteger(R.styleable.CameraView_ckJpegQuality, CameraKit.Defaults.DEFAULT_JPEG_QUALITY);
+                mCropOutput = a.getBoolean(R.styleable.CameraView_ckCropOutput, CameraKit.Defaults.DEFAULT_CROP_OUTPUT);
+                mVideoBitRate = a.getInteger(R.styleable.CameraView_ckVideoBitRate, CameraKit.Defaults.DEFAULT_VIDEO_BIT_RATE);
+                mDeviceOrientation = a.getInteger(R.styleable.CameraView_deviceOrientation, CameraKit.Defaults.DEFAULT_DEVICE_ORIENTATION);
+                mDoubleTapToToggleFacing = a.getBoolean(R.styleable.CameraView_ckDoubleTapToToggleFacing, CameraKit.Defaults.DEFAULT_DOUBLE_TAP_TO_TOGGLE_FACING);
+                mLockVideoAspectRatio = a.getBoolean(R.styleable.CameraView_ckLockVideoAspectRatio, true);
+                mAdjustViewBounds = a.getBoolean(R.styleable.CameraView_android_adjustViewBounds, CameraKit.Defaults.DEFAULT_ADJUST_VIEW_BOUNDS);
+            } finally {
+                a.recycle();
+            }
+        }
+
+        mEventDispatcher = new EventDispatcher();
+
+        mPreviewImpl = new SurfaceViewPreview(context, this);
+        mCameraImpl = new Camera1(mEventDispatcher, mPreviewImpl);
+
+        mIsStarted = false;
+
+        // Handle situations where there's only 1 camera & it's front facing OR it's a chromebook in laptop mode
+        WindowManager windowService = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
+        boolean isChromebookInLaptopMode = (context.getPackageManager().hasSystemFeature("org.chromium.arc.device_management") && windowService.getDefaultDisplay().getRotation() == Surface.ROTATION_0);
+        if (mCameraImpl.frontCameraOnly() || isChromebookInLaptopMode) {
+            mFacing = FACING_FRONT;
+        }
+        setDeviceOrientation(mDeviceOrientation);
+        setFacing(mFacing);
+        setFlash(mFlash);
+        setFocus(mFocus);
+        setMethod(mMethod);
+        setPinchToZoom(mPinchToZoom);
+        setZoom(mZoom);
+        setPermissions(mPermissions);
+        setVideoQuality(mVideoQuality);
+        setVideoBitRate(mVideoBitRate);
+        setLockVideoAspectRatio(mLockVideoAspectRatio);
+        if (!isInEditMode()) {
+            mDisplayOrientationDetector = new DisplayOrientationDetector(context) {
+                @Override
+                public void onDisplayOrDeviceOrientationChanged(int displayOrientation, int deviceOrientation) {
+                    mCameraImpl.setDisplayAndDeviceOrientation(displayOrientation, deviceOrientation);
+                    mPreviewImpl.setDisplayOrientation(displayOrientation);
+                }
+            };
+
+            focusMarkerLayout = new FocusMarkerLayout(getContext());
+            addView(focusMarkerLayout);
+        }
+    }
+
+    private void setDeviceOrientation(int mDeviceOrientation) {
+        if (mDeviceOrientation == 0) {
+            mCameraImpl.setDeviceOrientation(90);
+        } else {
+            mCameraImpl.setDeviceOrientation(180);
+        }
+    }
+
+    @Override
+    protected void onAttachedToWindow() {
+        super.onAttachedToWindow();
+        if (!isInEditMode()) {
+            mDisplayOrientationDetector.enable(
+                    ViewCompat.isAttachedToWindow(this)
+                            ? DisplayManagerCompat.getInstance(getContext().getApplicationContext())
+                            .getDisplay(Display.DEFAULT_DISPLAY)
+                            : null
+            );
+        }
+    }
+
+    @Override
+    protected void onDetachedFromWindow() {
+        if (!isInEditMode()) {
+            mDisplayOrientationDetector.disable();
+        }
+        super.onDetachedFromWindow();
+    }
+
+    @Override
+    protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
+        if (mAdjustViewBounds) {
+            Size previewSize = getPreviewSize();
+            if (previewSize != null) {
+                if (getLayoutParams().width == LayoutParams.WRAP_CONTENT) {
+                    int height = MeasureSpec.getSize(heightMeasureSpec);
+                    float ratio = (float) height / (float) previewSize.getHeight();
+                    int width = (int) (previewSize.getWidth() * ratio);
+                    super.onMeasure(
+                            MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY),
+                            heightMeasureSpec
+                    );
+                    return;
+                } else if (getLayoutParams().height == LayoutParams.WRAP_CONTENT) {
+                    int width = MeasureSpec.getSize(widthMeasureSpec);
+                    float ratio = (float) width / (float) previewSize.getWidth();
+                    int height = (int) (previewSize.getHeight() * ratio);
+                    super.onMeasure(
+                            widthMeasureSpec,
+                            MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY)
+                    );
+                    return;
+                }
+            } else {
+                super.onMeasure(widthMeasureSpec, heightMeasureSpec);
+                return;
+            }
+        }
+
+        super.onMeasure(widthMeasureSpec, heightMeasureSpec);
+    }
+
+    public boolean isStarted() {
+        return mIsStarted;
+    }
+
+    public void addController(CameraKitController controller) {
+
+    }
+
+    public void start() {
+        if (mIsStarted || !isEnabled()) {
+            // Already started, do nothing.
+            return;
+        }
+        mIsStarted = true;
+        int cameraCheck = ContextCompat.checkSelfPermission(getContext(), Manifest.permission.CAMERA);
+        int audioCheck = ContextCompat.checkSelfPermission(getContext(), Manifest.permission.RECORD_AUDIO);
+
+        switch (mPermissions) {
+            case PERMISSIONS_STRICT:
+                if (cameraCheck != PackageManager.PERMISSION_GRANTED || audioCheck != PackageManager.PERMISSION_GRANTED) {
+                    requestPermissions(true, true);
+                    return;
+                }
+                break;
+
+            case PERMISSIONS_LAZY:
+                if (cameraCheck != PackageManager.PERMISSION_GRANTED) {
+                    requestPermissions(true, true);
+                    return;
+                }
+                break;
+
+            case PERMISSIONS_PICTURE:
+                if (cameraCheck != PackageManager.PERMISSION_GRANTED) {
+                    requestPermissions(true, false);
+                    return;
+                }
+                break;
+        }
+        mCameraImpl.start();
+    }
+
+    public void stop() {
+        if (!mIsStarted) {
+            // Already stopped, do nothing.
+            return;
+        }
+        mIsStarted = false;
+        mCameraImpl.stop();
+    }
+
+
+    @Override
+    protected CameraImpl getCameraImpl() {
+        return mCameraImpl;
+    }
+
+    @Override
+    protected PreviewImpl getPreviewImpl() {
+        return mPreviewImpl;
+    }
+
+    @Override
+    protected void onZoom(float modifier, boolean start) {
+        if (mPinchToZoom) {
+            mCameraImpl.modifyZoom((modifier - 1) * 0.8f + 1);
+        }
+    }
+
+    @Override
+    protected void onTapToFocus(float x, float y) {
+        if (mFocus == CameraKit.Constants.FOCUS_TAP || mFocus == CameraKit.Constants.FOCUS_TAP_WITH_MARKER) {
+            focusMarkerLayout.focus(x, y);
+
+            float px = x - getPreviewImpl().getX();
+            float py = y - getPreviewImpl().getY();
+            mCameraImpl.setFocusArea(px / (float) getPreviewImpl().getWidth(), py / (float) getPreviewImpl().getHeight());
+        }
+    }
+
+    @Override
+    protected void onToggleFacing() {
+        if (mDoubleTapToToggleFacing) {
+            toggleFacing();
+        }
+    }
+
+    @Nullable
+    public CameraProperties getCameraProperties() {
+        return mCameraImpl.getCameraProperties();
+    }
+
+    @Facing
+    public int getFacing() {
+        return mFacing;
+    }
+
+    public boolean isFacingFront() {
+        return mFacing == CameraKit.Constants.FACING_FRONT;
+    }
+
+    public boolean isFacingBack() {
+        return mFacing == CameraKit.Constants.FACING_BACK;
+    }
+
+    public void setFacing(@Facing final int facing) {
+        this.mFacing = facing;
+        sWorkerHandler.post(new Runnable() {
+            @Override
+            public void run() {
+                mCameraImpl.setFacing(facing);
+            }
+        });
+    }
+
+    public void setFlash(@Flash int flash) {
+        this.mFlash = flash;
+        mCameraImpl.setFlash(flash);
+    }
+
+    @Flash
+    public int getFlash() {
+        return mFlash;
+    }
+
+    public void setFocus(@Focus int focus) {
+        this.mFocus = focus;
+        if (this.mFocus == CameraKit.Constants.FOCUS_TAP_WITH_MARKER) {
+            mCameraImpl.setFocus(CameraKit.Constants.FOCUS_TAP);
+            return;
+        }
+
+        mCameraImpl.setFocus(mFocus);
+    }
+
+    public void setMethod(@CaptureMethod int method) {
+        this.mMethod = method;
+        mCameraImpl.setMethod(mMethod);
+    }
+
+    public void setPinchToZoom(boolean zoom) {
+        this.mPinchToZoom = zoom;
+    }
+
+    public void setZoom(float zoom) {
+        this.mZoom = zoom;
+        mCameraImpl.setZoom(zoom);
+    }
+
+    public void setPermissions(@Permissions int permissions) {
+        this.mPermissions = permissions;
+    }
+
+    public void setVideoQuality(@VideoQuality int videoQuality) {
+        this.mVideoQuality = videoQuality;
+        mCameraImpl.setVideoQuality(mVideoQuality);
+    }
+
+    public void setVideoBitRate(int videoBirRate) {
+        this.mVideoBitRate = videoBirRate;
+        mCameraImpl.setVideoBitRate(mVideoBitRate);
+    }
+
+    public void setLockVideoAspectRatio(boolean lockVideoAspectRatio) {
+        this.mLockVideoAspectRatio = lockVideoAspectRatio;
+        mCameraImpl.setLockVideoAspectRatio(lockVideoAspectRatio);
+    }
+
+    public void setJpegQuality(int jpegQuality) {
+        this.mJpegQuality = jpegQuality;
+    }
+
+    public void setCropOutput(boolean cropOutput) {
+        this.mCropOutput = cropOutput;
+    }
+
+    @Facing
+    public int toggleFacing() {
+        switch (mFacing) {
+            case FACING_BACK:
+                setFacing(FACING_FRONT);
+                break;
+
+            case FACING_FRONT:
+                setFacing(FACING_BACK);
+                break;
+        }
+
+        return mFacing;
+    }
+
+    @Flash
+    public int toggleFlash() {
+        switch (mFlash) {
+            case FLASH_OFF:
+                setFlash(FLASH_ON);
+                break;
+
+            case FLASH_ON:
+                setFlash(FLASH_AUTO);
+                break;
+
+            case FLASH_AUTO:
+            case FLASH_TORCH:
+                setFlash(FLASH_OFF);
+                break;
+        }
+
+        return mFlash;
+    }
+
+    public void captureImage() {
+        captureImage(null);
+    }
+
+    public boolean setTextDetectionListener(final CameraKitEventCallback<CameraKitTextDetect> callback) throws GooglePlayServicesUnavailableException {
+        TextRecognizer textRecognizer = new TextRecognizer.Builder(getContext()).build();
+        textRecognizer.setProcessor(new TextProcessor(mEventDispatcher, callback));
+        int code = GoogleApiAvailability.getInstance().isGooglePlayServicesAvailable(getContext().getApplicationContext());
+        if (code != ConnectionResult.SUCCESS) {
+            throw new GooglePlayServicesUnavailableException();
+        }
+
+        if (textRecognizer.isOperational()) {
+            mCameraImpl.setTextDetector(textRecognizer);
+            return true;
+        } else {
+            return false;
+        }
+    }
+
+    public void captureImage(final CameraKitEventCallback<CameraKitImage> callback) {
+        mCameraImpl.captureImage(new CameraImpl.ImageCapturedCallback() {
+            @Override
+            public void imageCaptured(byte[] jpeg) {
+                PostProcessor postProcessor = new PostProcessor(jpeg);
+                postProcessor.setJpegQuality(mJpegQuality);
+                postProcessor.setFacing(mFacing);
+                if (mCropOutput)
+                    postProcessor.setCropOutput(AspectRatio.of(getWidth(), getHeight()));
+
+                CameraKitImage image = new CameraKitImage(postProcessor.getJpeg());
+                if (callback != null)
+                    callback.callback(image);
+                mEventDispatcher.dispatch(image);
+            }
+        });
+    }
+
+    public void captureVideo() {
+        captureVideo(null, null);
+    }
+
+    public void captureVideo(File videoFile) {
+        captureVideo(videoFile, null);
+    }
+
+    public void captureVideo(CameraKitEventCallback<CameraKitVideo> callback) {
+        captureVideo(null, callback);
+    }
+
+    public void captureVideo(File videoFile, CameraKitEventCallback<CameraKitVideo> callback) {
+        captureVideo(videoFile, 0, callback);
+    }
+
+    public void captureVideo(File videoFile, int maxDuration, final CameraKitEventCallback<CameraKitVideo> callback) {
+        mCameraImpl.captureVideo(videoFile, maxDuration, new CameraImpl.VideoCapturedCallback() {
+            @Override
+            public void videoCaptured(File file) {
+                CameraKitVideo video = new CameraKitVideo(file);
+                if (callback != null)
+                    callback.callback(video);
+                mEventDispatcher.dispatch(video);
+            }
+        });
+    }
+
+    public void stopVideo() {
+        mCameraImpl.stopVideo();
+    }
+
+    public Size getPreviewSize() {
+        return mCameraImpl != null ? mCameraImpl.getPreviewResolution() : null;
+    }
+
+    public Size getCaptureSize() {
+        return mCameraImpl != null ? mCameraImpl.getCaptureResolution() : null;
+    }
+
+    private void requestPermissions(boolean requestCamera, boolean requestAudio) {
+        Activity activity = null;
+        Context context = getContext();
+        while (context instanceof ContextWrapper) {
+            if (context instanceof Activity) {
+                activity = (Activity) context;
+            }
+            context = ((ContextWrapper) context).getBaseContext();
+        }
+
+        List<String> permissions = new ArrayList<>();
+        if (requestCamera)
+            permissions.add(Manifest.permission.CAMERA);
+        if (requestAudio)
+            permissions.add(Manifest.permission.RECORD_AUDIO);
+
+        if (activity != null) {
+            ActivityCompat.requestPermissions(
+                    activity,
+                    permissions.toArray(new String[permissions.size()]),
+                    CameraKit.Constants.PERMISSION_REQUEST_CAMERA);
+        }
+    }
+
+    public void addCameraKitListener(CameraKitEventListener CameraKitEventListener) {
+        mEventDispatcher.addListener(CameraKitEventListener);
+    }
+
+    public void bindCameraKitListener(Object object) {
+        mEventDispatcher.addBinding(object);
+    }
+
+}

+ 76 - 0
camerakit/src/main/java/com/wonderkiln/camerakit/FocusMarkerLayout.java

@@ -0,0 +1,76 @@
+package com.wonderkiln.camerakit;
+
+import android.animation.Animator;
+import android.animation.AnimatorListenerAdapter;
+import android.content.Context;
+import android.util.AttributeSet;
+import android.view.LayoutInflater;
+import android.widget.FrameLayout;
+import android.widget.ImageView;
+
+import com.camerakit.R;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+
+public class FocusMarkerLayout extends FrameLayout {
+
+    private FrameLayout mFocusMarkerContainer;
+    private ImageView mFill;
+
+    public FocusMarkerLayout(@NonNull Context context) {
+        this(context, null);
+    }
+
+    public FocusMarkerLayout(@NonNull Context context, @Nullable AttributeSet attrs) {
+        super(context, attrs);
+        LayoutInflater.from(getContext()).inflate(R.layout.layout_focus_marker, this);
+
+        mFocusMarkerContainer = findViewById(R.id.focusMarkerContainer);
+        mFill = findViewById(R.id.fill);
+
+        mFocusMarkerContainer.setAlpha(0);
+    }
+
+    public void focus(float mx, float my) {
+        mx *= getWidth();
+        my *= getHeight();
+        int x = (int) (mx - mFocusMarkerContainer.getWidth() / 2);
+        int y = (int) (my - mFocusMarkerContainer.getWidth() / 2);
+
+        mFocusMarkerContainer.setTranslationX(x);
+        mFocusMarkerContainer.setTranslationY(y);
+
+        mFocusMarkerContainer.animate().setListener(null).cancel();
+        mFill.animate().setListener(null).cancel();
+
+        mFill.setScaleX(0);
+        mFill.setScaleY(0);
+        mFill.setAlpha(1f);
+
+        mFocusMarkerContainer.setScaleX(1.36f);
+        mFocusMarkerContainer.setScaleY(1.36f);
+        mFocusMarkerContainer.setAlpha(1f);
+
+        mFocusMarkerContainer.animate().scaleX(1).scaleY(1).setStartDelay(0).setDuration(330)
+                .setListener(new AnimatorListenerAdapter() {
+                    @Override
+                    public void onAnimationEnd(Animator animation) {
+                        super.onAnimationEnd(animation);
+                        mFocusMarkerContainer.animate().alpha(0).setStartDelay(750).setDuration(800).setListener(null).start();
+                    }
+                }).start();
+
+        mFill.animate().scaleX(1).scaleY(1).setDuration(330)
+                .setListener(new AnimatorListenerAdapter() {
+                    @Override
+                    public void onAnimationEnd(Animator animation) {
+                        super.onAnimationEnd(animation);
+                        mFill.animate().alpha(0).setDuration(800).setListener(null).start();
+                    }
+                }).start();
+
+    }
+
+
+}

+ 1147 - 0
camerakit/src/main/java/com/wonderkiln/camerakit/api16/Camera1.java

@@ -0,0 +1,1147 @@
+package com.wonderkiln.camerakit.api16;
+
+import android.graphics.Rect;
+import android.graphics.YuvImage;
+import android.hardware.Camera;
+import android.media.CamcorderProfile;
+import android.media.MediaRecorder;
+import android.os.Build;
+import android.os.Environment;
+import android.os.Handler;
+import android.os.Looper;
+import android.util.Log;
+
+import com.google.android.gms.vision.Detector;
+import com.google.android.gms.vision.text.TextBlock;
+import com.wonderkiln.camerakit.CameraKit;
+import com.wonderkiln.camerakit.CameraProperties;
+import com.wonderkiln.camerakit.base.CameraImpl;
+import com.wonderkiln.camerakit.base.PreviewImpl;
+import com.wonderkiln.camerakit.events.CameraKitError;
+import com.wonderkiln.camerakit.events.CameraKitEvent;
+import com.wonderkiln.camerakit.events.EventDispatcher;
+import com.wonderkiln.camerakit.types.CaptureMethod;
+import com.wonderkiln.camerakit.types.Facing;
+import com.wonderkiln.camerakit.types.Flash;
+import com.wonderkiln.camerakit.types.Focus;
+import com.wonderkiln.camerakit.types.VideoQuality;
+import com.wonderkiln.camerakit.utils.AspectRatio;
+import com.wonderkiln.camerakit.utils.Size;
+import com.wonderkiln.camerakit.utils.YuvOperator;
+import com.wonderkiln.camerakit.vision.FrameProcessingRunnable;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+import java.util.TreeSet;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+
+import static com.wonderkiln.camerakit.CameraKit.Constants.FLASH_OFF;
+import static com.wonderkiln.camerakit.CameraKit.Constants.FOCUS_CONTINUOUS;
+import static com.wonderkiln.camerakit.CameraKit.Constants.FOCUS_OFF;
+import static com.wonderkiln.camerakit.CameraKit.Constants.FOCUS_TAP;
+import static com.wonderkiln.camerakit.CameraKit.Constants.METHOD_STANDARD;
+import static com.wonderkiln.camerakit.CameraKit.Constants.METHOD_STILL;
+
+@SuppressWarnings("deprecation")
+public class Camera1 extends CameraImpl {
+
+    private static final String TAG = Camera1.class.getSimpleName();
+
+    private static final int FOCUS_AREA_SIZE_DEFAULT             = 300;
+    private static final int FOCUS_METERING_AREA_WEIGHT_DEFAULT  = 1000;
+    private static final int DELAY_MILLIS_BEFORE_RESETTING_FOCUS = 3000;
+
+    private int                      mCameraId;
+    private Camera                   mCamera;
+    private Camera.Parameters        mCameraParameters;
+    private CameraProperties         mCameraProperties;
+    private Camera.CameraInfo        mCameraInfo;
+    private Size                     mCaptureSize;
+    private Size                     mVideoSize;
+    private Size                     mPreviewSize;
+    private MediaRecorder            mMediaRecorder;
+    private Camera.AutoFocusCallback mAutofocusCallback;
+    private boolean                  capturingImage = false;
+
+    private boolean mShowingPreview;
+    private boolean mRecording;
+    private int     mDisplayOrientation;
+    private int     mDeviceOrientation;
+
+    @Facing
+    private int mFacing;
+
+    @Flash
+    private int mFlash;
+
+    @Focus
+    private int mFocus;
+
+    @CaptureMethod
+    private int mMethod;
+
+    @VideoQuality
+    private int mVideoQuality;
+
+    private Detector<TextBlock> mTextDetector;
+
+    private int mVideoBitRate;
+
+    private boolean mLockVideoAspectRatio;
+
+    private Handler                 mainHandler = new Handler(Looper.getMainLooper());
+    private Handler                 mHandler    = new Handler();
+    private FrameProcessingRunnable mFrameProcessor;
+
+    private float mZoom = 1.f;
+
+    private VideoCapturedCallback mVideoCallback;
+
+    private final Object mCameraLock = new Object();
+
+    private File mMediaRecorderOutputFile;
+    private int  orientation;
+
+    public Camera1(EventDispatcher eventDispatcher, PreviewImpl preview) {
+        super(eventDispatcher, preview);
+
+        preview.setCallback(new PreviewImpl.Callback() {
+            @Override
+            public void onSurfaceChanged() {
+                if (mCamera != null) {
+                    if (mShowingPreview) {
+                        mCamera.stopPreview();
+                        mShowingPreview = false;
+                    }
+
+                    setDisplayAndDeviceOrientation();
+                    setupPreview();
+
+                    if (!mShowingPreview) {
+                        mCamera.startPreview();
+                        mShowingPreview = true;
+                    }
+                }
+            }
+        });
+
+        mCameraInfo = new Camera.CameraInfo();
+    }
+
+    // CameraImpl:
+
+    @Override
+    public void start() {
+        setFacing(mFacing);
+        openCamera();
+        if (mPreview.isReady()) {
+            setDisplayAndDeviceOrientation();
+            setupPreview();
+            mCamera.startPreview();
+            mShowingPreview = true;
+        }
+    }
+
+    @Override
+    public void stop() {
+        mHandler.removeCallbacksAndMessages(null);
+        if (mCamera != null) {
+            try {
+                mCamera.stopPreview();
+            } catch (Exception e) {
+                notifyErrorListener(e);
+            }
+        }
+        mShowingPreview = false;
+
+        releaseMediaRecorder();
+        releaseCamera();
+        if (mFrameProcessor != null) {
+            mFrameProcessor.cleanup();
+        }
+    }
+
+    public void setDisplayAndDeviceOrientation() {
+        setDisplayAndDeviceOrientation(this.mDisplayOrientation, this.mDeviceOrientation);
+    }
+
+    @Override
+    public void setDisplayAndDeviceOrientation(int displayOrientation, int deviceOrientation) {
+        this.mDisplayOrientation = displayOrientation;
+        this.mDeviceOrientation = deviceOrientation;
+
+        synchronized (mCameraLock) {
+            if (isCameraOpened()) {
+                try {
+                    mCamera.setDisplayOrientation(calculatePreviewRotation());
+                } catch (RuntimeException e) {
+                    // Camera is released. Ignore. Orientations are still valid in local member fields
+                    // so next time camera starts it will have correct configuration.
+                }
+            }
+        }
+    }
+
+    @Override
+    public void setFacing(@Facing int facing) {
+        synchronized (mCameraLock) {
+            int internalFacing = new ConstantMapper.Facing(facing).map();
+            if (internalFacing == -1) {
+                return;
+            }
+
+            for (int i = 0, count = Camera.getNumberOfCameras(); i < count; i++) {
+                Camera.getCameraInfo(i, mCameraInfo);
+                if (mCameraInfo.facing == internalFacing) {
+                    mCameraId = i;
+                    mFacing = facing;
+                    break;
+                }
+            }
+
+            if (mFacing == facing && isCameraOpened()) {
+                stop();
+                start();
+            }
+        }
+    }
+
+    @Override
+    public void setFlash(@Flash int flash) {
+        synchronized (mCameraLock) {
+            if (mCameraParameters != null) {
+                List<String> flashes = mCameraParameters.getSupportedFlashModes();
+                String internalFlash = new ConstantMapper.Flash(flash).map();
+                if (flashes != null && flashes.contains(internalFlash)) {
+                    mCameraParameters.setFlashMode(internalFlash);
+                    mFlash = flash;
+                } else {
+                    String currentFlash = new ConstantMapper.Flash(mFlash).map();
+                    if (flashes == null || !flashes.contains(currentFlash)) {
+                        mCameraParameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
+                        mFlash = FLASH_OFF;
+                    }
+                }
+
+                mCamera.setParameters(mCameraParameters);
+            } else {
+                mFlash = flash;
+            }
+        }
+    }
+
+    @Override
+    public void setFocus(@Focus int focus) {
+        synchronized (mCameraLock) {
+            this.mFocus = focus;
+            switch (focus) {
+                case FOCUS_CONTINUOUS:
+                    if (mCameraParameters != null) {
+                        final List<String> modes = mCameraParameters.getSupportedFocusModes();
+                        if (modes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
+                            mCameraParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
+                        } else {
+                            setFocus(FOCUS_OFF);
+                        }
+                    }
+                    break;
+
+                case FOCUS_TAP:
+                    if (mCameraParameters != null) {
+                        final List<String> modes = mCameraParameters.getSupportedFocusModes();
+                        if (modes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
+                            mCameraParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
+                        }
+                    }
+                    break;
+
+                case FOCUS_OFF:
+                    if (mCameraParameters != null) {
+                        final List<String> modes = mCameraParameters.getSupportedFocusModes();
+                        if (modes.contains(Camera.Parameters.FOCUS_MODE_FIXED)) {
+                            mCameraParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_FIXED);
+                        } else if (modes.contains(Camera.Parameters.FOCUS_MODE_INFINITY)) {
+                            mCameraParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_INFINITY);
+                        } else {
+                            mCameraParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
+                        }
+                    }
+                    break;
+            }
+        }
+    }
+
+    @Override
+    public void setMethod(@CaptureMethod int method) {
+        this.mMethod = method;
+    }
+
+    @Override
+    public void setTextDetector(Detector<TextBlock> detector) {
+        this.mTextDetector = detector;
+    }
+
+    @Override
+    public void setVideoQuality(int videoQuality) {
+        this.mVideoQuality = videoQuality;
+    }
+
+    @Override
+    public void setVideoBitRate(int videoBitRate) {
+        this.mVideoBitRate = videoBitRate;
+    }
+
+    @Override
+    public void setZoom(float zoomFactor) {
+        synchronized (mCameraLock) {
+            try {
+                this.mZoom = zoomFactor;
+                if (zoomFactor <= 1) {
+                    mZoom = 1;
+                } else {
+                    mZoom = zoomFactor;
+                }
+                if (mCameraParameters != null && mCameraParameters.isZoomSupported()) {
+                    int zoomPercent = (int) (mZoom * 100);
+                    mCameraParameters.setZoom(getZoomForPercent(zoomPercent));
+                    mCamera.setParameters(mCameraParameters);
+                    float maxZoom = mCameraParameters.getZoomRatios().get(mCameraParameters.getZoomRatios().size() - 1) / 100f;
+                    if (mZoom > maxZoom)
+                        mZoom = maxZoom;
+                }
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+    }
+
+    @Override
+    public void modifyZoom(float modifier) {
+        synchronized (mCameraLock) {
+            setZoom(this.mZoom * modifier);
+        }
+    }
+
+    private int getZoomForPercent(int zoomPercent) {
+        List<Integer> zoomRatios = mCameraParameters.getZoomRatios();
+        int lowerIndex = -1;
+        int upperIndex = -1;
+
+        for (int i = 0; i < zoomRatios.size(); i++) {
+            if (zoomRatios.get(i) < zoomPercent) {
+                lowerIndex = i;
+            } else if (zoomRatios.get(i) > zoomPercent) {
+                upperIndex = i;
+                break;
+            }
+        }
+
+        if (lowerIndex < 0) {
+            return 0;
+        }
+
+        if (lowerIndex + 1 == upperIndex) {
+            return lowerIndex;
+        }
+
+        if (upperIndex >= 0) {
+            return upperIndex;
+        }
+
+        return zoomRatios.size() - 1;
+    }
+
+    @Override
+    public void setFocusArea(float x, float y) {
+        synchronized (mCameraLock) {
+            if (mCamera != null) {
+                Camera.Parameters parameters = getCameraParameters();
+                if (parameters == null)
+                    return;
+
+                String focusMode = parameters.getFocusMode();
+                Rect rect = calculateFocusArea(x, y);
+
+                List<Camera.Area> meteringAreas = new ArrayList<>();
+                meteringAreas.add(new Camera.Area(rect, getFocusMeteringAreaWeight()));
+                if (parameters.getMaxNumFocusAreas() != 0 && focusMode != null &&
+                        (focusMode.equals(Camera.Parameters.FOCUS_MODE_AUTO) ||
+                                focusMode.equals(Camera.Parameters.FOCUS_MODE_MACRO) ||
+                                focusMode.equals(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE) ||
+                                focusMode.equals(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
+                ) {
+                    parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
+                    parameters.setFocusAreas(meteringAreas);
+                    if (parameters.getMaxNumMeteringAreas() > 0) {
+                        parameters.setMeteringAreas(meteringAreas);
+                    }
+                    if (!parameters.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
+                        return; //cannot autoFocus
+                    }
+                    mCamera.setParameters(parameters);
+                    mCamera.autoFocus(new Camera.AutoFocusCallback() {
+                        @Override
+                        public void onAutoFocus(boolean success, Camera camera) {
+                            resetFocus(success, camera);
+                        }
+                    });
+                } else if (parameters.getMaxNumMeteringAreas() > 0) {
+                    if (!parameters.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
+                        return; //cannot autoFocus
+                    }
+                    parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
+                    parameters.setFocusAreas(meteringAreas);
+                    parameters.setMeteringAreas(meteringAreas);
+
+                    mCamera.setParameters(parameters);
+                    mCamera.autoFocus(new Camera.AutoFocusCallback() {
+                        @Override
+                        public void onAutoFocus(boolean success, Camera camera) {
+                            resetFocus(success, camera);
+                        }
+                    });
+                } else {
+                    mCamera.autoFocus(new Camera.AutoFocusCallback() {
+                        @Override
+                        public void onAutoFocus(boolean success, Camera camera) {
+                            if (mAutofocusCallback != null) {
+                                mAutofocusCallback.onAutoFocus(success, camera);
+                            }
+                        }
+                    });
+                }
+            }
+        }
+    }
+
+    @Override
+    public void setLockVideoAspectRatio(boolean lockVideoAspectRatio) {
+        this.mLockVideoAspectRatio = lockVideoAspectRatio;
+    }
+
+    @Override
+    public void captureImage(final ImageCapturedCallback callback) {
+        switch (mMethod) {
+            case METHOD_STANDARD:
+                synchronized (mCameraLock) {
+                    // Null check required for camera here as is briefly null when View is detached
+                    if (!capturingImage && mCamera != null) {
+
+                        // Set boolean to wait for image callback
+                        capturingImage = true;
+
+                        // Set the captureRotation right before taking a picture so it's accurate
+                        int captureRotation = calculateCaptureRotation();
+                        mCameraParameters.setRotation(captureRotation);
+                        mCamera.setParameters(mCameraParameters);
+
+                        mCamera.takePicture(null, null, null,
+                                new Camera.PictureCallback() {
+                                    @Override
+                                    public void onPictureTaken(byte[] data, Camera camera) {
+                                        callback.imageCaptured(data);
+
+                                        // Reset capturing state to allow photos to be taken
+                                        capturingImage = false;
+
+                                        synchronized (mCameraLock) {
+                                            if (isCameraOpened()) {
+                                                try {
+                                                    stop();
+                                                    start();
+                                                } catch (Exception e) {
+                                                    notifyErrorListener(e);
+                                                }
+                                            }
+                                        }
+                                    }
+                                });
+                    } else {
+                        Log.w(TAG, "Unable, waiting for picture to be taken");
+                    }
+                    break;
+                }
+
+            case METHOD_STILL:
+                synchronized (mCameraLock) {
+                    mCamera.setOneShotPreviewCallback(new Camera.PreviewCallback() {
+                        @Override
+                        public void onPreviewFrame(byte[] data, Camera camera) {
+                            Camera.Parameters parameters = camera.getParameters();
+                            int width = parameters.getPreviewSize().width;
+                            int height = parameters.getPreviewSize().height;
+                            int rotation = calculateCaptureRotation();
+
+                            YuvOperator yuvOperator = new YuvOperator(data, width, height);
+                            yuvOperator.rotate(rotation);
+                            data = yuvOperator.getYuvData();
+
+                            int yuvOutputWidth = width;
+                            int yuvOutputHeight = height;
+                            if (rotation == 90 || rotation == 270) {
+                                yuvOutputWidth = height;
+                                yuvOutputHeight = width;
+                            }
+
+                            YuvImage yuvImage = new YuvImage(data, parameters.getPreviewFormat(), yuvOutputWidth, yuvOutputHeight, null);
+                            ByteArrayOutputStream out = new ByteArrayOutputStream();
+                            yuvImage.compressToJpeg(new Rect(0, 0, yuvImage.getWidth(), yuvImage.getHeight()), 100, out);
+                            callback.imageCaptured(out.toByteArray());
+                        }
+                    });
+                    break;
+                }
+        }
+    }
+
+    @Override
+    public void captureVideo(File videoFile, int maxDuration, VideoCapturedCallback callback) {
+        synchronized (mCameraLock) {
+            try {
+                if (prepareMediaRecorder(videoFile, maxDuration)) {
+                    mMediaRecorder.start();
+                    mRecording = true;
+                    this.mVideoCallback = callback;
+                } else {
+                    releaseMediaRecorder();
+                }
+            } catch (IOException e) {
+                releaseMediaRecorder();
+            } catch (RuntimeException e) {
+                releaseMediaRecorder();
+            }
+        }
+    }
+
+    @Override
+    public void stopVideo() {
+        synchronized (mCameraLock) {
+            if (mRecording) {
+                try {
+                    mMediaRecorder.stop();
+                    if (this.mVideoCallback != null) {
+                        mVideoCallback.videoCaptured(mMediaRecorderOutputFile);
+                        mVideoCallback = null;
+                    }
+                } catch (RuntimeException e) {
+                    mMediaRecorderOutputFile.delete();
+                }
+
+                releaseMediaRecorder();
+                mRecording = false;
+            }
+
+            stop();
+            start();
+        }
+    }
+
+    @Override
+    public Size getCaptureResolution() {
+        if (mCaptureSize == null && mCameraParameters != null) {
+            TreeSet<Size> sizes = new TreeSet<>();
+            for (Camera.Size size : mCameraParameters.getSupportedPictureSizes()) {
+                sizes.add(new Size(size.width, size.height));
+            }
+
+            TreeSet<AspectRatio> aspectRatios = findCommonAspectRatios(
+                    mCameraParameters.getSupportedPreviewSizes(),
+                    mCameraParameters.getSupportedPictureSizes()
+            );
+            AspectRatio targetRatio = aspectRatios.size() > 0 ? aspectRatios.last() : null;
+
+            Iterator<Size> descendingSizes = sizes.descendingIterator();
+            Size size;
+            while (descendingSizes.hasNext() && mCaptureSize == null) {
+                size = descendingSizes.next();
+                if (targetRatio == null || targetRatio.matches(size)) {
+                    mCaptureSize = size;
+                    break;
+                }
+            }
+        }
+
+        return mCaptureSize;
+    }
+
+    @Override
+    public Size getVideoResolution() {
+        if (mVideoSize == null && mCameraParameters != null) {
+            if (mCameraParameters.getSupportedVideoSizes() == null) {
+                mVideoSize = getCaptureResolution();
+                return mVideoSize;
+            }
+
+            TreeSet<Size> sizes = new TreeSet<>();
+            for (Camera.Size size : mCameraParameters.getSupportedVideoSizes()) {
+                sizes.add(new Size(size.width, size.height));
+            }
+
+            TreeSet<AspectRatio> aspectRatios = findCommonAspectRatios(
+                    mCameraParameters.getSupportedPreviewSizes(),
+                    mCameraParameters.getSupportedVideoSizes()
+            );
+            AspectRatio targetRatio = aspectRatios.size() > 0 ? aspectRatios.last() : null;
+
+            Iterator<Size> descendingSizes = sizes.descendingIterator();
+            Size size;
+            while (descendingSizes.hasNext() && mVideoSize == null) {
+                size = descendingSizes.next();
+                if (targetRatio == null || targetRatio.matches(size)) {
+                    mVideoSize = size;
+                    break;
+                }
+            }
+        }
+
+        return mVideoSize;
+    }
+
+    @Override
+    public void setDeviceOrientation(int orientation) {
+        this.orientation = orientation;
+    }
+
+    @Override
+    public Size getPreviewResolution() {
+        Size cameraPreviewResolution = getCameraPreviewResolution();
+        if (cameraPreviewResolution == null) {
+            return new Size(480, 640);
+        }
+        int mWidth = cameraPreviewResolution.getHeight();
+        int mHeight = cameraPreviewResolution.getWidth();
+        if (orientation == 180) {
+            mWidth = cameraPreviewResolution.getWidth();
+            mHeight = cameraPreviewResolution.getHeight();
+        }
+        return new Size(mWidth, mHeight);
+    }
+
+    public Size getCameraPreviewResolution() {
+        if (mPreviewSize == null && mCameraParameters != null) {
+            TreeSet<Size> sizes = new TreeSet<>();
+            for (Camera.Size size : mCameraParameters.getSupportedPreviewSizes()) {
+                sizes.add(new Size(size.width, size.height));
+            }
+
+            TreeSet<AspectRatio> aspectRatios = findCommonAspectRatios(
+                    mCameraParameters.getSupportedPreviewSizes(),
+                    mCameraParameters.getSupportedPictureSizes()
+            );
+
+            AspectRatio targetRatio = null;
+
+            if (mLockVideoAspectRatio) {
+                TreeSet<AspectRatio> videoAspectRatios = findCommonAspectRatios(
+                        mCameraParameters.getSupportedPreviewSizes(),
+                        mCameraParameters.getSupportedPictureSizes()
+                );
+
+                Iterator<AspectRatio> descendingIterator = aspectRatios.descendingIterator();
+                while (targetRatio == null && descendingIterator.hasNext()) {
+                    AspectRatio ratio = descendingIterator.next();
+                    if (videoAspectRatios.contains(ratio)) {
+                        targetRatio = ratio;
+                    }
+                }
+            }
+
+            if (targetRatio == null) {
+                targetRatio = aspectRatios.size() > 0 ? aspectRatios.last() : null;
+            }
+
+            Iterator<Size> descendingSizes = sizes.descendingIterator();
+            Size size;
+            while (descendingSizes.hasNext() && mPreviewSize == null) {
+                size = descendingSizes.next();
+                if (targetRatio == null || targetRatio.matches(size)) {
+                    mPreviewSize = size;
+                    break;
+                }
+            }
+        }
+        return mPreviewSize;
+    }
+
+    @Override
+    public boolean isCameraOpened() {
+        return mCamera != null;
+    }
+
+    @Override
+    public boolean frontCameraOnly() {
+        Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
+        Camera.getCameraInfo(0, cameraInfo);
+        boolean isFrontCameraOnly = (Camera.getNumberOfCameras() == 1 && cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT);
+        return isFrontCameraOnly;
+    }
+
+    @Nullable
+    @Override
+    public CameraProperties getCameraProperties() {
+        return mCameraProperties;
+    }
+
+    // Internal:
+
+    private void openCamera() {
+        synchronized (mCameraLock) {
+            if (mCamera != null) {
+                releaseCamera();
+            }
+
+            mCamera = Camera.open(mCameraId);
+            mCameraParameters = mCamera.getParameters();
+
+            collectCameraProperties();
+            adjustCameraParameters();
+
+            if (Build.VERSION.SDK_INT >= 16) {
+                mCamera.setAutoFocusMoveCallback(new Camera.AutoFocusMoveCallback() {
+                    @Override
+                    public void onAutoFocusMoving(boolean b, Camera camera) {
+                        CameraKitEvent event = new CameraKitEvent(CameraKitEvent.TYPE_FOCUS_MOVED);
+                        event.getData().putBoolean("started", b);
+                        mEventDispatcher.dispatch(event);
+                    }
+                });
+            }
+
+            mEventDispatcher.dispatch(new CameraKitEvent(CameraKitEvent.TYPE_CAMERA_OPEN));
+
+            if (mTextDetector != null) {
+                mFrameProcessor = new FrameProcessingRunnable(mTextDetector, mPreviewSize, mCamera);
+                mFrameProcessor.start();
+            }
+        }
+    }
+
+    private void setupPreview() {
+        synchronized (mCameraLock) {
+            if (mCamera != null) {
+                try {
+                    mCamera.reconnect();
+                    mCamera.setPreviewDisplay(mPreview.getSurfaceHolder());
+                } catch (IOException e) {
+                    throw new RuntimeException(e);
+                }
+            }
+        }
+    }
+
+    private void releaseCamera() {
+        synchronized (mCameraLock) {
+            if (mCamera != null) {
+                mCamera.lock();
+                mCamera.release();
+                mCamera = null;
+                mCameraParameters = null;
+                mPreviewSize = null;
+                mCaptureSize = null;
+                mVideoSize = null;
+
+                mEventDispatcher.dispatch(new CameraKitEvent(CameraKitEvent.TYPE_CAMERA_CLOSE));
+                if (mFrameProcessor != null) {
+                    mFrameProcessor.release();
+                }
+            }
+        }
+    }
+
+    private int calculatePreviewRotation() {
+        if (mCameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
+            return (360 - ((mCameraInfo.orientation + mDisplayOrientation) % 360)) % 360;
+        } else {
+            return (mCameraInfo.orientation - mDisplayOrientation + 360) % 360;
+        }
+    }
+
+    private int calculateCaptureRotation() {
+        int captureRotation = 0;
+        if (mCameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
+            captureRotation = (mCameraInfo.orientation + mDisplayOrientation) % 360;
+        } else {  // back-facing camera
+            captureRotation = (mCameraInfo.orientation - mDisplayOrientation + 360) % 360;
+        }
+
+        // Accommodate for any extra device rotation relative to fixed screen orientations
+        // (e.g. activity fixed in portrait, but user took photo/video in landscape)
+        if (mCameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
+            captureRotation = ((captureRotation - (mDisplayOrientation - mDeviceOrientation)) + 360) % 360;
+        } else {  // back-facing camera
+            captureRotation = (captureRotation + (mDisplayOrientation - mDeviceOrientation) + 360) % 360;
+        }
+
+        return captureRotation;
+    }
+
+    private void notifyErrorListener(@NonNull final String details) {
+        CameraKitError error = new CameraKitError();
+        error.setMessage(details);
+        mEventDispatcher.dispatch(error);
+    }
+
+    private void notifyErrorListener(@NonNull final Exception e) {
+        CameraKitError error = new CameraKitError(e);
+        mEventDispatcher.dispatch(error);
+    }
+
+    private Camera.Parameters getCameraParameters() {
+        if (mCamera != null) {
+            try {
+                return mCamera.getParameters();
+            } catch (Exception e) {
+                return null;
+            }
+        }
+
+        return null;
+    }
+
+    private void adjustCameraParameters() {
+        synchronized (mCameraLock) {
+            if (mShowingPreview) {
+                mCamera.stopPreview();
+            }
+
+            adjustCameraParameters(0);
+
+            if (mShowingPreview) {
+                mCamera.startPreview();
+            }
+        }
+    }
+
+    private void adjustCameraParameters(int currentTry) {
+        boolean haveToReadjust = false;
+        Camera.Parameters resolutionLess = mCamera.getParameters();
+
+        if (getPreviewResolution() != null) {
+            mPreview.setPreviewParameters(
+                    getPreviewResolution().getWidth(),
+                    getPreviewResolution().getHeight(),
+                    mCameraParameters.getPreviewFormat()
+            );
+
+            mCameraParameters.setPreviewSize(
+                    getCameraPreviewResolution().getWidth(),
+                    getCameraPreviewResolution().getHeight()
+            );
+
+            try {
+                mCamera.setParameters(mCameraParameters);
+                resolutionLess = mCameraParameters;
+            } catch (Exception e) {
+                notifyErrorListener(e);
+                // Some phones can't set parameters that camerakit has chosen, so fallback to defaults
+                mCameraParameters = resolutionLess;
+            }
+        } else {
+            haveToReadjust = true;
+        }
+
+        if (getCaptureResolution() != null) {
+            mCameraParameters.setPictureSize(
+                    getCaptureResolution().getWidth(),
+                    getCaptureResolution().getHeight()
+            );
+
+            try {
+                mCamera.setParameters(mCameraParameters);
+                resolutionLess = mCameraParameters;
+            } catch (Exception e) {
+                notifyErrorListener(e);
+                //Some phones can't set parameters that camerakit has chosen, so fallback to defaults
+                mCameraParameters = resolutionLess;
+            }
+        } else {
+            haveToReadjust = true;
+        }
+
+        int rotation = calculateCaptureRotation();
+        mCameraParameters.setRotation(rotation);
+
+        setFocus(mFocus);
+
+        try {
+            setFlash(mFlash);
+        } catch (Exception e) {
+            notifyErrorListener(e);
+        }
+
+        if (mCameraParameters.isZoomSupported()) {
+            setZoom(mZoom);
+        }
+
+        mCamera.setParameters(mCameraParameters);
+
+        if (haveToReadjust && currentTry < 100) {
+            try {
+                Thread.sleep(1);
+            } catch (InterruptedException e) {
+                e.printStackTrace();
+            }
+
+            notifyErrorListener(String.format("retryAdjustParam Failed, attempt #: %d", currentTry));
+            adjustCameraParameters(currentTry + 1);
+        }
+    }
+
+    private void collectCameraProperties() {
+        mCameraProperties = new CameraProperties(mCameraParameters.getVerticalViewAngle(),
+                mCameraParameters.getHorizontalViewAngle());
+    }
+
+    private TreeSet<AspectRatio> findCommonAspectRatios(List<Camera.Size> previewSizes, List<Camera.Size> pictureSizes) {
+        Set<AspectRatio> previewAspectRatios = new HashSet<>();
+        for (Camera.Size size : previewSizes) {
+            CamcorderProfile camcorderProfile = getCamcorderProfile(mVideoQuality);
+            AspectRatio deviceRatio;
+            if (camcorderProfile == null) {
+                deviceRatio = AspectRatio.of(CameraKit.Internal.screenHeight, CameraKit.Internal.screenWidth); //全屏预览和视频大小不一样
+            } else {
+                deviceRatio = AspectRatio.of(camcorderProfile.videoFrameWidth, camcorderProfile.videoFrameHeight);//控制预览和视频大小一样
+            }
+
+            AspectRatio previewRatio = AspectRatio.of(size.width, size.height);
+            if (deviceRatio.equals(previewRatio)) {
+                previewAspectRatios.add(previewRatio);
+            }
+        }
+
+        Set<AspectRatio> captureAspectRatios = new HashSet<>();
+        for (Camera.Size size : pictureSizes) {
+            captureAspectRatios.add(AspectRatio.of(size.width, size.height));
+        }
+
+        TreeSet<AspectRatio> output = new TreeSet<>();
+        if (previewAspectRatios.size() == 0) {
+            // if no common aspect ratios
+            Camera.Size maxSize = previewSizes.get(0);
+            AspectRatio maxPreviewAspectRatio = AspectRatio.of(maxSize.width, maxSize.height);
+            for (AspectRatio aspectRatio : captureAspectRatios) {
+                if (aspectRatio.equals(maxPreviewAspectRatio)) {
+                    output.add(aspectRatio);
+                }
+            }
+        } else {
+            // if common aspect ratios exist
+            for (AspectRatio aspectRatio : previewAspectRatios) {
+                if (captureAspectRatios.contains(aspectRatio)) {
+                    output.add(aspectRatio);
+                }
+            }
+        }
+        return output;
+    }
+
+    private boolean prepareMediaRecorder(File videoFile, int maxDuration) throws IOException {
+        synchronized (mCameraLock) {
+            mCamera.unlock();
+
+            mMediaRecorder = new MediaRecorder();
+            mMediaRecorder.setCamera(mCamera);
+            mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
+            mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
+            mMediaRecorder.setVideoEncodingBitRate(900 * 1024);
+            CamcorderProfile profile = getCamcorderProfile(mVideoQuality);
+            profile.videoBitRate = 900 * 1024;
+            mMediaRecorder.setProfile(profile);
+            if (videoFile == null)
+                videoFile = getVideoFile();
+            if (videoFile == null) {
+                return false;
+            }
+
+            mMediaRecorderOutputFile = videoFile;
+            mMediaRecorder.setOutputFile(videoFile.getPath());
+            mMediaRecorder.setPreviewDisplay(mPreview.getSurface());
+            mMediaRecorder.setOrientationHint(calculateCaptureRotation());
+
+            if (maxDuration > 0) {
+                mMediaRecorder.setMaxDuration(maxDuration);
+                mMediaRecorder.setOnInfoListener(new MediaRecorder.OnInfoListener() {
+                    @Override
+                    public void onInfo(MediaRecorder mediaRecorder, int what, int extra) {
+                        if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED) {
+                            stopVideo();
+                        }
+                    }
+                });
+            }
+
+            try {
+                mMediaRecorder.prepare();
+            } catch (IllegalStateException e) {
+                releaseMediaRecorder();
+                return false;
+            } catch (IOException e) {
+                releaseMediaRecorder();
+                return false;
+            }
+
+            return true;
+        }
+    }
+
+    private void releaseMediaRecorder() {
+        synchronized (mCameraLock) {
+            if (mMediaRecorder != null) {
+                mMediaRecorder.reset();
+                mMediaRecorder.release();
+                mMediaRecorder = null;
+                mCamera.lock();
+            }
+        }
+    }
+
+    private File getVideoFile() {
+        if (!Environment.getExternalStorageState().equalsIgnoreCase(Environment.MEDIA_MOUNTED)) {
+            return null;
+        }
+
+        File mediaStorageDir = new File(Environment.getExternalStorageDirectory(), "Camera");
+
+        if (!mediaStorageDir.exists()) {
+            if (!mediaStorageDir.mkdirs()) {
+                return null;
+            }
+        }
+
+        return new File(mediaStorageDir.getPath() + File.separator + "video.mp4");
+    }
+
+    private CamcorderProfile getCamcorderProfile(@VideoQuality int videoQuality) {
+        CamcorderProfile camcorderProfile = null;
+        switch (videoQuality) {
+            case CameraKit.Constants.VIDEO_QUALITY_QVGA:
+                if (CamcorderProfile.hasProfile(mCameraId, CamcorderProfile.QUALITY_QVGA)) {
+                    camcorderProfile = CamcorderProfile.get(mCameraId, CamcorderProfile.QUALITY_QVGA);
+                } else {
+                    camcorderProfile = getCamcorderProfile(CameraKit.Constants.VIDEO_QUALITY_LOWEST);
+                }
+                break;
+
+            case CameraKit.Constants.VIDEO_QUALITY_480P:
+                if (CamcorderProfile.hasProfile(mCameraId, CamcorderProfile.QUALITY_480P)) {
+                    camcorderProfile = CamcorderProfile.get(mCameraId, CamcorderProfile.QUALITY_480P);
+                } else {
+                    camcorderProfile = getCamcorderProfile(CameraKit.Constants.VIDEO_QUALITY_QVGA);
+                }
+                break;
+
+            case CameraKit.Constants.VIDEO_QUALITY_720P:
+                if (CamcorderProfile.hasProfile(mCameraId, CamcorderProfile.QUALITY_720P)) {
+                    camcorderProfile = CamcorderProfile.get(mCameraId, CamcorderProfile.QUALITY_720P);
+                } else {
+                    camcorderProfile = getCamcorderProfile(CameraKit.Constants.VIDEO_QUALITY_480P);
+                }
+                break;
+
+            case CameraKit.Constants.VIDEO_QUALITY_1080P:
+                if (CamcorderProfile.hasProfile(mCameraId, CamcorderProfile.QUALITY_1080P)) {
+                    camcorderProfile = CamcorderProfile.get(mCameraId, CamcorderProfile.QUALITY_1080P);
+                } else {
+                    camcorderProfile = getCamcorderProfile(CameraKit.Constants.VIDEO_QUALITY_720P);
+                }
+                break;
+
+            case CameraKit.Constants.VIDEO_QUALITY_2160P:
+                try {
+                    camcorderProfile = CamcorderProfile.get(mCameraId, CamcorderProfile.QUALITY_2160P);
+                } catch (Exception e) {
+                    camcorderProfile = getCamcorderProfile(CameraKit.Constants.VIDEO_QUALITY_HIGHEST);
+                }
+                break;
+
+            case CameraKit.Constants.VIDEO_QUALITY_HIGHEST:
+                camcorderProfile = CamcorderProfile.get(mCameraId, CamcorderProfile.QUALITY_HIGH);
+                break;
+
+            case CameraKit.Constants.VIDEO_QUALITY_LOWEST:
+                camcorderProfile = CamcorderProfile.get(mCameraId, CamcorderProfile.QUALITY_LOW);
+                break;
+        }
+
+        if (camcorderProfile != null && mVideoBitRate != 0) {
+            camcorderProfile.videoBitRate = mVideoBitRate;
+        }
+
+        return camcorderProfile;
+    }
+
+    void setTapToAutofocusListener(Camera.AutoFocusCallback callback) {
+        if (this.mFocus != FOCUS_TAP) {
+            throw new IllegalArgumentException("Please set the camera to FOCUS_TAP.");
+        }
+
+        this.mAutofocusCallback = callback;
+    }
+
+    private int getFocusAreaSize() {
+        return FOCUS_AREA_SIZE_DEFAULT;
+    }
+
+    private int getFocusMeteringAreaWeight() {
+        return FOCUS_METERING_AREA_WEIGHT_DEFAULT;
+    }
+
+    private void resetFocus(final boolean success, final Camera camera) {
+        mHandler.removeCallbacksAndMessages(null);
+        mHandler.postDelayed(new Runnable() {
+            @Override
+            public void run() {
+                synchronized (mCameraLock) {
+                    if (mCamera != null) {
+                        mCamera.cancelAutoFocus();
+                        Camera.Parameters parameters = getCameraParameters();
+                        if (parameters == null)
+                            return;
+
+                        if (parameters.getFocusMode() != Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE) {
+                            parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
+                            parameters.setFocusAreas(null);
+                            parameters.setMeteringAreas(null);
+                            mCamera.setParameters(parameters);
+                        }
+
+                        if (mAutofocusCallback != null) {
+                            mAutofocusCallback.onAutoFocus(success, mCamera);
+                        }
+                    }
+                }
+            }
+        }, DELAY_MILLIS_BEFORE_RESETTING_FOCUS);
+    }
+
+    private Rect calculateFocusArea(float x, float y) {
+        int padding = getFocusAreaSize() / 2;
+        int centerX = (int) (x * 2000);
+        int centerY = (int) (y * 2000);
+
+        int left = centerX - padding;
+        int top = centerY - padding;
+        int right = centerX + padding;
+        int bottom = centerY + padding;
+
+        if (left < 0)
+            left = 0;
+        if (right > 2000)
+            right = 2000;
+        if (top < 0)
+            top = 0;
+        if (bottom > 2000)
+            bottom = 2000;
+
+        return new Rect(left - 1000, top - 1000, right - 1000, bottom - 1000);
+    }
+
+}

+ 104 - 0
camerakit/src/main/java/com/wonderkiln/camerakit/api16/ConstantMapper.java

@@ -0,0 +1,104 @@
+package com.wonderkiln.camerakit.api16;
+
+import android.annotation.TargetApi;
+import android.hardware.Camera;
+import android.hardware.camera2.CameraCharacteristics;
+import android.util.SparseIntArray;
+
+import com.wonderkiln.camerakit.CameraKit;
+
+import androidx.collection.SparseArrayCompat;
+
+public class ConstantMapper {
+
+    private abstract static class BaseMapper<T> {
+
+        protected int mCameraKitConstant;
+
+        protected BaseMapper(int cameraKitConstant) {
+            this.mCameraKitConstant = cameraKitConstant;
+        }
+
+        abstract T map();
+
+    }
+
+    static class Flash extends BaseMapper<String> {
+
+        private static final SparseArrayCompat<String> FLASH_MODES = new SparseArrayCompat<>();
+
+        static {
+            FLASH_MODES.put(CameraKit.Constants.FLASH_OFF, Camera.Parameters.FLASH_MODE_OFF);
+            FLASH_MODES.put(CameraKit.Constants.FLASH_ON, Camera.Parameters.FLASH_MODE_ON);
+            FLASH_MODES.put(CameraKit.Constants.FLASH_AUTO, Camera.Parameters.FLASH_MODE_AUTO);
+            FLASH_MODES.put(CameraKit.Constants.FLASH_TORCH, Camera.Parameters.FLASH_MODE_TORCH);
+        }
+
+        protected Flash(int cameraKitConstant) {
+            super(cameraKitConstant);
+        }
+
+        @Override
+        String map() {
+            return FLASH_MODES.get(mCameraKitConstant, FLASH_MODES.get(CameraKit.Constants.FLASH_OFF));
+        }
+
+    }
+
+    @TargetApi(21)
+    static class Flash2 extends BaseMapper<String> {
+
+        protected Flash2(int cameraKitConstant) {
+            super(cameraKitConstant);
+        }
+
+        @Override
+        String map() {
+            return null;
+        }
+
+    }
+
+    static class Facing extends BaseMapper<Integer> {
+
+        private static final SparseArrayCompat<Integer> FACING_MODES = new SparseArrayCompat<>();
+
+        static {
+            FACING_MODES.put(CameraKit.Constants.FACING_BACK, Camera.CameraInfo.CAMERA_FACING_BACK);
+            FACING_MODES.put(CameraKit.Constants.FACING_FRONT, Camera.CameraInfo.CAMERA_FACING_FRONT);
+        }
+
+        protected Facing(int cameraKitConstant) {
+            super(cameraKitConstant);
+        }
+
+        @Override
+        Integer map() {
+            return FACING_MODES.get(mCameraKitConstant, FACING_MODES.get(CameraKit.Constants.FACING_BACK));
+        }
+
+    }
+
+    @TargetApi(21)
+    static class Facing2 extends BaseMapper<Integer> {
+
+        private static final SparseIntArray FACING_MODES = new SparseIntArray();
+
+        static {
+            FACING_MODES.put(CameraKit.Constants.FACING_BACK, CameraCharacteristics.LENS_FACING_BACK);
+            FACING_MODES.put(CameraKit.Constants.FACING_FRONT, CameraCharacteristics.LENS_FACING_FRONT);
+        }
+
+        protected Facing2(int cameraKitConstant) {
+            super(cameraKitConstant);
+        }
+
+        @Override
+        Integer map() {
+            return FACING_MODES.get(mCameraKitConstant, FACING_MODES.get(CameraKit.Constants.FACING_BACK));
+        }
+
+    }
+
+
+}

+ 56 - 0
camerakit/src/main/java/com/wonderkiln/camerakit/api16/ProcessStillTask.java

@@ -0,0 +1,56 @@
+package com.wonderkiln.camerakit.api16;
+
+import android.graphics.YuvImage;
+import android.hardware.Camera;
+
+import com.wonderkiln.camerakit.utils.Rotation;
+
+class ProcessStillTask implements Runnable {
+
+    private byte[] data;
+    private Camera camera;
+    private int rotation;
+    private OnStillProcessedListener onStillProcessedListener;
+
+    public ProcessStillTask(byte[] data, Camera camera, int rotation, OnStillProcessedListener onStillProcessedListener) {
+        this.data = data;
+        this.camera = camera;
+        this.rotation = rotation;
+        this.onStillProcessedListener = onStillProcessedListener;
+    }
+
+    @Override
+    public void run() {
+        Camera.Parameters parameters = camera.getParameters();
+        int width = parameters.getPreviewSize().width;
+        int height = parameters.getPreviewSize().height;
+        byte[] rotatedData = new Rotation(data, width, height, rotation).getYuv();
+
+        int postWidth;
+        int postHeight;
+
+        switch (rotation) {
+            case 90:
+            case 270:
+                postWidth = height;
+                postHeight = width;
+                break;
+
+            case 0:
+            case 180:
+            default:
+                postWidth = width;
+                postHeight = height;
+                break;
+        }
+
+        YuvImage yuv = new YuvImage(rotatedData, parameters.getPreviewFormat(), postWidth, postHeight, null);
+
+        onStillProcessedListener.onStillProcessed(yuv);
+    }
+
+    interface OnStillProcessedListener {
+        void onStillProcessed(YuvImage yuv);
+    }
+
+}

+ 9 - 0
camerakit/src/main/java/com/wonderkiln/camerakit/api21/Camera2.java

@@ -0,0 +1,9 @@
+package com.wonderkiln.camerakit.api21;
+
+import android.annotation.TargetApi;
+
+@TargetApi(21)
+public class Camera2 {
+    Camera2() {
+    }
+}

+ 90 - 0
camerakit/src/main/java/com/wonderkiln/camerakit/base/CameraImpl.java

@@ -0,0 +1,90 @@
+package com.wonderkiln.camerakit.base;
+
+
+import com.google.android.gms.vision.Detector;
+import com.google.android.gms.vision.text.TextBlock;
+import com.wonderkiln.camerakit.CameraProperties;
+import com.wonderkiln.camerakit.events.EventDispatcher;
+import com.wonderkiln.camerakit.types.CaptureMethod;
+import com.wonderkiln.camerakit.types.Facing;
+import com.wonderkiln.camerakit.types.Flash;
+import com.wonderkiln.camerakit.types.Focus;
+import com.wonderkiln.camerakit.types.VideoQuality;
+import com.wonderkiln.camerakit.utils.Size;
+
+import java.io.File;
+
+import androidx.annotation.Nullable;
+
+public abstract class CameraImpl {
+
+    protected final EventDispatcher mEventDispatcher;
+    protected final PreviewImpl     mPreview;
+
+    public  CameraImpl(EventDispatcher eventDispatcher, PreviewImpl preview) {
+        mEventDispatcher = eventDispatcher;
+        mPreview = preview;
+    }
+
+    public abstract void start();
+
+    public abstract void stop();
+
+    public abstract void setDisplayAndDeviceOrientation(int displayOrientation, int deviceOrientation);
+
+    public abstract void setFacing(@Facing int facing);
+
+    public abstract void setFlash(@Flash int flash);
+
+    public abstract void setFocus(@Focus int focus);
+
+    public abstract void setMethod(@CaptureMethod int method);
+
+    public abstract void setTextDetector(Detector<TextBlock> detector);
+
+    public abstract void setVideoQuality(@VideoQuality int videoQuality);
+
+    public abstract void setVideoBitRate(int videoBitRate);
+
+    public abstract void setLockVideoAspectRatio(boolean lockVideoAspectRatio);
+
+    public abstract void setZoom(float zoomFactor);
+
+    public abstract void modifyZoom(float modifier);
+
+    public abstract void setDeviceOrientation(int orientation);
+
+    public abstract void setFocusArea(float x, float y);
+
+    public abstract void captureImage(ImageCapturedCallback callback);
+
+    public interface ImageCapturedCallback {
+        void imageCaptured(byte[] jpeg);
+    }
+
+    public void captureVideo(File videoFile, VideoCapturedCallback callback) {
+        captureVideo(videoFile, 0, callback);
+    }
+
+    public abstract void captureVideo(File videoFile, int maxDuration, VideoCapturedCallback callback);
+
+    public interface VideoCapturedCallback {
+        void videoCaptured(File file);
+    }
+
+    public abstract void stopVideo();
+
+    public abstract Size getCaptureResolution();
+
+    public abstract Size getVideoResolution();
+
+    public abstract Size getPreviewResolution();
+
+    public abstract boolean isCameraOpened();
+
+    public abstract boolean frontCameraOnly();
+
+    @Nullable
+    public abstract CameraProperties getCameraProperties();
+
+}

+ 89 - 0
camerakit/src/main/java/com/wonderkiln/camerakit/base/CameraViewLayout.java

@@ -0,0 +1,89 @@
+package com.wonderkiln.camerakit.base;
+
+import android.content.Context;
+import android.util.AttributeSet;
+import android.view.GestureDetector;
+import android.view.MotionEvent;
+import android.view.ScaleGestureDetector;
+import android.widget.FrameLayout;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+
+public abstract class CameraViewLayout extends FrameLayout {
+
+    private ScaleGestureDetector scaleGestureDetector;
+    private GestureDetector gestureDetector;
+
+    public CameraViewLayout(@NonNull Context context) {
+        this(context, null);
+    }
+
+    public CameraViewLayout(@NonNull Context context, @Nullable AttributeSet attrs) {
+        this(context, attrs, 0);
+    }
+
+    public CameraViewLayout(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
+        super(context, attrs, defStyleAttr);
+        gestureDetector = new GestureDetector(context, onGestureListener);
+        scaleGestureDetector = new ScaleGestureDetector(context, onScaleGestureListener);
+    }
+
+    @Override
+    public boolean onTouchEvent(MotionEvent event) {
+        gestureDetector.onTouchEvent(event);
+        scaleGestureDetector.onTouchEvent(event);
+        return true;
+    }
+
+    @Override
+    public boolean onInterceptTouchEvent(MotionEvent ev) {
+        return true;
+    }
+
+    protected abstract CameraImpl getCameraImpl();
+
+    protected abstract PreviewImpl getPreviewImpl();
+
+    protected abstract void onZoom(float zoom, boolean start);
+
+    protected abstract void onTapToFocus(float x, float y);
+
+    protected abstract void onToggleFacing();
+
+    private GestureDetector.SimpleOnGestureListener onGestureListener = new GestureDetector.SimpleOnGestureListener() {
+
+        @Override
+        public boolean onDoubleTap(MotionEvent e) {
+            onToggleFacing();
+            return super.onDoubleTap(e);
+        }
+
+        @Override
+        public boolean onSingleTapConfirmed(MotionEvent e) {
+            onTapToFocus(e.getX() / (float) getWidth(), e.getY() / (float) getHeight());
+            return super.onSingleTapConfirmed(e);
+        }
+    };
+
+    private ScaleGestureDetector.OnScaleGestureListener onScaleGestureListener = new ScaleGestureDetector.OnScaleGestureListener() {
+
+        @Override
+        public boolean onScale(ScaleGestureDetector scaleGestureDetector) {
+            onZoom(scaleGestureDetector.getScaleFactor(), false);
+            return true;
+        }
+
+        @Override
+        public boolean onScaleBegin(ScaleGestureDetector scaleGestureDetector) {
+            onZoom(scaleGestureDetector.getScaleFactor(), true);
+            return true;
+        }
+
+        @Override
+        public void onScaleEnd(ScaleGestureDetector scaleGestureDetector) {
+        }
+
+    };
+
+}

+ 83 - 0
camerakit/src/main/java/com/wonderkiln/camerakit/base/PreviewImpl.java

@@ -0,0 +1,83 @@
+package com.wonderkiln.camerakit.base;
+
+import android.graphics.SurfaceTexture;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+import android.view.View;
+
+public abstract class PreviewImpl {
+
+    public  interface Callback {
+        void onSurfaceChanged();
+    }
+
+    private Callback mCallback;
+
+    private int mWidth;
+    private int mHeight;
+
+    protected int mPreviewWidth;
+    protected int mPreviewHeight;
+    protected int mPreviewFormat;
+
+    public  void setCallback(Callback callback) {
+        mCallback = callback;
+    }
+
+    public  abstract Surface getSurface();
+
+    public abstract View getView();
+
+    public abstract Class getOutputClass();
+
+    public abstract void setDisplayOrientation(int displayOrientation);
+
+    public abstract boolean isReady();
+
+    protected void dispatchSurfaceChanged() {
+        mCallback.onSurfaceChanged();
+    }
+
+    public  SurfaceHolder getSurfaceHolder() {
+        return null;
+    }
+
+    public SurfaceTexture getSurfaceTexture() {
+        return null;
+    }
+
+    public  void setSize(int width, int height) {
+        mWidth = width;
+        mHeight = height;
+    }
+
+    public int getWidth() {
+        return mWidth;
+    }
+
+    public   int getHeight() {
+        return mHeight;
+    }
+
+    public abstract float getX();
+    public abstract float getY();
+
+    public  void setPreviewParameters(final int width, final int height, final int format) {
+        this.mPreviewWidth = width;
+        this.mPreviewHeight = height;
+        this.mPreviewFormat = format;
+    }
+
+    public int getPreviewWidth() {
+        return mPreviewWidth;
+    }
+
+    public int getPreviewHeight() {
+        return mPreviewHeight;
+    }
+
+    public int getPreviewFormat() {
+        return mPreviewFormat;
+    }
+
+}

+ 92 - 0
camerakit/src/main/java/com/wonderkiln/camerakit/base/SurfaceViewContainer.java

@@ -0,0 +1,92 @@
+package com.wonderkiln.camerakit.base;
+
+import android.content.Context;
+import android.util.AttributeSet;
+import android.view.View;
+import android.widget.FrameLayout;
+
+import com.wonderkiln.camerakit.utils.Size;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+
+public class SurfaceViewContainer extends FrameLayout {
+
+    private Size mPreviewSize;
+    private int  mDisplayOrientation;
+
+    public SurfaceViewContainer(@NonNull Context context) {
+        super(context);
+    }
+
+    public SurfaceViewContainer(@NonNull Context context, @Nullable AttributeSet attrs) {
+        super(context, attrs);
+    }
+
+    public SurfaceViewContainer(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
+        super(context, attrs, defStyleAttr);
+    }
+
+    @Override
+    protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
+        final int width = resolveSize(getSuggestedMinimumWidth(), widthMeasureSpec);
+        final int height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec);
+        setMeasuredDimension(width, height);
+    }
+
+    @Override
+    protected void onLayout(boolean changed, int l, int t, int r, int b) {
+        if (changed && getChildCount() > 0) {
+            layoutChild(r - l, b - t);
+        }
+    }
+
+    private void layoutChild(int width, int height) {
+        final View child = getChildAt(0);
+
+        int previewWidth = width;
+        int previewHeight = height;
+        if (mPreviewSize != null) {
+            previewWidth = mPreviewSize.getWidth();
+            previewHeight = mPreviewSize.getHeight();
+        }
+
+        if (width * previewHeight > height * previewWidth) {
+            final int scaledChildHeight = previewHeight * width / previewWidth;
+            child.layout(0, (height - scaledChildHeight) / 2, width, (height + scaledChildHeight) / 2);
+        } else {
+            final int scaledChildWidth = previewWidth * height / previewHeight;
+            child.layout((width - scaledChildWidth) / 2, 0, (width + scaledChildWidth) / 2, height);
+        }
+    }
+
+    public void setPreviewSize(Size previewSize) {
+        setPreviewSize(previewSize, mDisplayOrientation);
+    }
+
+    public void setPreviewSize(Size previewSize, int displayOrientation) {
+        if (mDisplayOrientation == 0 || mDisplayOrientation == 180) {
+            this.mPreviewSize = previewSize;
+        } else if ((displayOrientation == 90 || displayOrientation == 270) && (mDisplayOrientation != 90 && mDisplayOrientation != 270)) {
+            this.mPreviewSize = new Size(previewSize.getHeight(), previewSize.getWidth());
+        }
+
+        if (getChildCount() > 0) {
+            post(new Runnable() {
+                @Override
+                public void run() {
+                    layoutChild(getWidth(), getHeight());
+                }
+            });
+        }
+    }
+
+    public void setDisplayOrientation(int displayOrientation) {
+        if (mPreviewSize != null) {
+            setPreviewSize(mPreviewSize, displayOrientation);
+        } else {
+            this.mDisplayOrientation = displayOrientation;
+        }
+    }
+
+}

+ 112 - 0
camerakit/src/main/java/com/wonderkiln/camerakit/base/SurfaceViewPreview.java

@@ -0,0 +1,112 @@
+package com.wonderkiln.camerakit.base;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+import android.view.View;
+import android.view.ViewGroup;
+
+import com.camerakit.R;
+import com.wonderkiln.camerakit.utils.Size;
+
+
+public class SurfaceViewPreview extends PreviewImpl {
+
+    private Context mContext;
+    private ViewGroup mParent;
+    private SurfaceViewContainer mContainer;
+    private SurfaceView mSurfaceView;
+
+    private int mDisplayOrientation;
+
+    public  SurfaceViewPreview(final Context context, ViewGroup parent) {
+        this.mContext = context;
+        this.mParent = parent;
+
+        final View view = View.inflate(context, R.layout.surface_view, parent);
+        mContainer = view.findViewById(R.id.surface_view_container);
+        mContainer.addOnLayoutChangeListener(new View.OnLayoutChangeListener() {
+            @Override
+            public void onLayoutChange(View view, int i, int i1, int i2, int i3, int i4, int i5, int i6, int i7) {
+                setSize(mContainer.getWidth(), mContainer.getHeight());
+            }
+        });
+
+
+        mSurfaceView = mContainer.findViewById(R.id.surface_view);
+
+        final SurfaceHolder holder = mSurfaceView.getHolder();
+
+        holder.addCallback(new SurfaceHolder.Callback() {
+            @Override
+            public void surfaceCreated(SurfaceHolder holder) {
+            }
+
+            @Override
+            public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
+                if (isReady()) dispatchSurfaceChanged();
+            }
+
+            @Override
+            public void surfaceDestroyed(SurfaceHolder holder) {
+            }
+        });
+    }
+
+    @Override
+    public SurfaceHolder getSurfaceHolder() {
+        return mSurfaceView.getHolder();
+    }
+
+    @Override
+    public  Surface getSurface() {
+        return getSurfaceHolder().getSurface();
+    }
+
+    @Override
+    public  View getView() {
+        return mContainer;
+    }
+
+    @Override
+    public   Class getOutputClass() {
+        return SurfaceHolder.class;
+    }
+
+    @Override
+    public  void setDisplayOrientation(int displayOrientation) {
+        mDisplayOrientation = displayOrientation;
+        mContainer.setDisplayOrientation(displayOrientation);
+    }
+
+    @Override
+    public boolean isReady() {
+        return getPreviewWidth() != 0 && getPreviewHeight() != 0;
+    }
+
+    @Override
+    public  float getX() {
+        return mContainer.getChildAt(0).getX();
+    }
+
+    @Override
+    public  float getY() {
+        return mContainer.getChildAt(0).getY();
+    }
+
+    @TargetApi(15)
+    @Override
+    public void setPreviewParameters(final int width, final int height, final int format) {
+        super.setPreviewParameters(width, height, format);
+        mContainer.setPreviewSize(new Size(width, height));
+        mContainer.post(new Runnable() {
+            @Override
+            public void run() {
+                getSurfaceHolder().setFixedSize(getPreviewWidth(), getPreviewHeight());
+            }
+        });
+    }
+
+}

+ 26 - 0
camerakit/src/main/java/com/wonderkiln/camerakit/events/CameraKitError.java

@@ -0,0 +1,26 @@
+package com.wonderkiln.camerakit.events;
+
+
+import androidx.annotation.Nullable;
+
+public class CameraKitError extends CameraKitEvent {
+
+    private String type;
+    private String message;
+    private Exception exception;
+
+    public  CameraKitError() {
+        super(TYPE_ERROR);
+    }
+
+    public   CameraKitError(Exception exception) {
+        super(TYPE_ERROR);
+        this.exception = exception;
+    }
+
+    @Nullable
+    public Exception getException() {
+        return exception;
+    }
+
+}

+ 69 - 0
camerakit/src/main/java/com/wonderkiln/camerakit/events/CameraKitEvent.java

@@ -0,0 +1,69 @@
+package com.wonderkiln.camerakit.events;
+
+import android.os.Bundle;
+
+import androidx.annotation.NonNull;
+
+public class CameraKitEvent {
+
+    public static final String TYPE_ERROR = "CameraKitError";
+
+    public static final String TYPE_CAMERA_OPEN = "CKCameraOpenedEvent";
+    public static final String TYPE_CAMERA_CLOSE = "CKCameraStoppedEvent";
+
+    public static final String TYPE_FACING_CHANGED = "CKFacingChangedEvent";
+    public static final String TYPE_FLASH_CHANGED = "CKFlashChangedEvent";
+
+    public static final String TYPE_IMAGE_CAPTURED = "CKImageCapturedEvent";
+    public static final String TYPE_VIDEO_CAPTURED = "CKVideoCapturedEvent";
+
+    public static final String TYPE_FOCUS_MOVED = "CKFocusMovedEvent";
+
+    public static final String TYPE_TEXT_DETECTED = "CKTextDetectedEvent";
+
+    private String type;
+    private String message;
+
+    private Bundle data;
+
+    private CameraKitEvent() {
+    }
+
+    public CameraKitEvent(@NonNull String type) {
+        this.type = type;
+        data = new Bundle();
+    }
+
+    public void setMessage(String message) {
+        this.message = message;
+    }
+
+    @NonNull
+    public String getType() {
+        return type;
+    }
+
+    @NonNull
+    public String getMessage() {
+        if (message != null) {
+            return message;
+        }
+
+        return "";
+    }
+
+    @NonNull
+    public Bundle getData() {
+        if (data != null) {
+            return data;
+        }
+
+        return new Bundle();
+    }
+
+    @Override
+    public String toString() {
+        return String.format("%s: %s", getType(), getMessage());
+    }
+
+}

+ 5 - 0
camerakit/src/main/java/com/wonderkiln/camerakit/events/CameraKitEventCallback.java

@@ -0,0 +1,5 @@
+package com.wonderkiln.camerakit.events;
+
+public interface CameraKitEventCallback<T extends CameraKitEvent> {
+    void callback(T event);
+}

+ 8 - 0
camerakit/src/main/java/com/wonderkiln/camerakit/events/CameraKitEventListener.java

@@ -0,0 +1,8 @@
+package com.wonderkiln.camerakit.events;
+
+public interface CameraKitEventListener {
+    void onEvent(CameraKitEvent event);
+    void onError(CameraKitError error);
+    void onImage(CameraKitImage image);
+    void onVideo(CameraKitVideo video);
+}

+ 21 - 0
camerakit/src/main/java/com/wonderkiln/camerakit/events/CameraKitEventListenerAdapter.java

@@ -0,0 +1,21 @@
+package com.wonderkiln.camerakit.events;
+
+public abstract class CameraKitEventListenerAdapter implements CameraKitEventListener {
+
+    @Override
+    public void onEvent(CameraKitEvent event) {
+    }
+
+    @Override
+    public void onError(CameraKitError error) {
+    }
+
+    @Override
+    public void onImage(CameraKitImage image) {
+    }
+
+    @Override
+    public void onVideo(CameraKitVideo video) {
+    }
+
+}

+ 23 - 0
camerakit/src/main/java/com/wonderkiln/camerakit/events/CameraKitImage.java

@@ -0,0 +1,23 @@
+package com.wonderkiln.camerakit.events;
+
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+
+public class CameraKitImage extends CameraKitEvent {
+
+    private byte[] jpeg;
+
+    public CameraKitImage(byte[] jpeg) {
+        super(TYPE_IMAGE_CAPTURED);
+        this.jpeg = jpeg;
+    }
+
+    public byte[] getJpeg() {
+        return jpeg;
+    }
+
+    public Bitmap getBitmap() {
+        return BitmapFactory.decodeByteArray(jpeg, 0, jpeg.length);
+    }
+
+}

+ 18 - 0
camerakit/src/main/java/com/wonderkiln/camerakit/events/CameraKitVideo.java

@@ -0,0 +1,18 @@
+package com.wonderkiln.camerakit.events;
+
+import java.io.File;
+
+public class CameraKitVideo extends CameraKitEvent {
+
+    private File videoFile;
+
+    public  CameraKitVideo(File videoFile) {
+        super(TYPE_VIDEO_CAPTURED);
+        this.videoFile = videoFile;
+    }
+
+    public File getVideoFile() {
+        return videoFile;
+    }
+
+}

+ 120 - 0
camerakit/src/main/java/com/wonderkiln/camerakit/events/EventDispatcher.java

@@ -0,0 +1,120 @@
+package com.wonderkiln.camerakit.events;
+
+import android.os.Handler;
+import android.os.Looper;
+
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import androidx.annotation.NonNull;
+
+public class EventDispatcher {
+
+    private Handler mainThreadHandler;
+
+    private List<CameraKitEventListener> listeners;
+    private List<BindingHandler> bindings;
+
+    public EventDispatcher() {
+        this.mainThreadHandler = new Handler(Looper.getMainLooper());
+        this.listeners = new ArrayList<>();
+        this.bindings = new ArrayList<>();
+    }
+
+    public void addListener(CameraKitEventListener listener) {
+        this.listeners.add(listener);
+    }
+
+    public void addBinding(Object binding) {
+        this.bindings.add(new BindingHandler(binding));
+    }
+
+    public void dispatch(final CameraKitEvent event) {
+        mainThreadHandler.post(new Runnable() {
+            @Override
+            public void run() {
+                for (CameraKitEventListener listener : listeners) {
+                    listener.onEvent(event);
+                    if (event instanceof CameraKitError) listener.onError((CameraKitError) event);
+                    if (event instanceof CameraKitImage) listener.onImage((CameraKitImage) event);
+                    if (event instanceof CameraKitVideo) listener.onVideo((CameraKitVideo) event);
+                }
+
+                for (BindingHandler handler : bindings) {
+                    try {
+                        handler.dispatchEvent(event);
+                    } catch (Exception e) {
+                        e.printStackTrace();
+                    }
+                }
+            }
+        });
+    }
+
+    private class BindingHandler {
+
+        private Map<Class, List<MethodHolder>> methods;
+
+        public BindingHandler(@NonNull Object binding) {
+            this.methods = new HashMap<>();
+
+            for (Method method : binding.getClass().getDeclaredMethods()) {
+                if (method.isAnnotationPresent(OnCameraKitEvent.class)) {
+                    OnCameraKitEvent annotation = method.getAnnotation(OnCameraKitEvent.class);
+                    Class<? extends CameraKitEvent> eventType = annotation.value();
+                    addMethod(binding, method, eventType, methods);
+                }
+            }
+        }
+
+        private void addMethod(Object binding, Method method, Class<? extends CameraKitEvent> type, Map<Class, List<MethodHolder>> store) {
+            if (!store.containsKey(type)) {
+                store.put(type, new ArrayList<MethodHolder>());
+            }
+
+            store.get(type).add(new MethodHolder(binding, method));
+        }
+
+        public void dispatchEvent(@NonNull CameraKitEvent event) throws IllegalAccessException, InvocationTargetException {
+            List<MethodHolder> baseMethods = methods.get(CameraKitEvent.class);
+            if (baseMethods != null) {
+                for (MethodHolder methodHolder : baseMethods) {
+                    methodHolder.getMethod().invoke(methodHolder.getBinding(), event);
+                }
+            }
+
+            List<MethodHolder> targetMethods = methods.get(event.getClass());
+            if (targetMethods != null) {
+                for (MethodHolder methodHolder : targetMethods) {
+                    methodHolder.getMethod().invoke(methodHolder.getBinding(), event);
+                }
+            }
+        }
+
+        private class MethodHolder {
+
+            private Object binding;
+            private Method method;
+
+            public MethodHolder(Object binding, Method method) {
+                this.binding = binding;
+                this.method = method;
+            }
+
+            public Object getBinding() {
+                return binding;
+            }
+
+            public Method getMethod() {
+                return method;
+            }
+
+        }
+
+    }
+
+}

+ 12 - 0
camerakit/src/main/java/com/wonderkiln/camerakit/events/OnCameraKitEvent.java

@@ -0,0 +1,12 @@
+package com.wonderkiln.camerakit.events;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.RUNTIME)
+public @interface OnCameraKitEvent {
+    Class<? extends CameraKitEvent> value() default CameraKitEvent.class;
+}

Alguns ficheiros não foram mostrados porque muitos ficheiros mudaram neste diff