yonge 3 years ago
parent
commit
4629ca28ff
62 changed files with 7674 additions and 81 deletions
  1. 121 29
      audio-analysis/pom.xml
  2. 0 13
      audio-analysis/src/main/java/com/yonge/audio/App.java
  3. 49 0
      audio-analysis/src/main/java/com/yonge/audio/AudioAnalysisServerApplication.java
  4. 1084 0
      audio-analysis/src/main/java/com/yonge/audio/analysis/AudioFloatConverter.java
  5. 158 0
      audio-analysis/src/main/java/com/yonge/audio/analysis/Complex.java
  6. 167 0
      audio-analysis/src/main/java/com/yonge/audio/analysis/FFT.java
  7. 141 0
      audio-analysis/src/main/java/com/yonge/audio/analysis/Signals.java
  8. 52 0
      audio-analysis/src/main/java/com/yonge/audio/analysis/detector/FrequencyDetector.java
  9. 223 0
      audio-analysis/src/main/java/com/yonge/audio/analysis/detector/YINPitchDetector.java
  10. 73 0
      audio-analysis/src/main/java/com/yonge/audio/config/LocalFastJsonHttpMessageConverter.java
  11. 44 0
      audio-analysis/src/main/java/com/yonge/audio/config/ResourceServerConfig.java
  12. 35 0
      audio-analysis/src/main/java/com/yonge/audio/config/WebMvcConfig.java
  13. 98 0
      audio-analysis/src/main/java/com/yonge/audio/utils/ArrayUtil.java
  14. 42 0
      audio-analysis/src/main/java/com/yonge/netty/common/message/Message.java
  15. 34 0
      audio-analysis/src/main/java/com/yonge/netty/common/message/MessageDispatcher.java
  16. 20 0
      audio-analysis/src/main/java/com/yonge/netty/common/message/MessageHandler.java
  17. 89 0
      audio-analysis/src/main/java/com/yonge/netty/common/message/MessageHandlerContainer.java
  18. 100 0
      audio-analysis/src/main/java/com/yonge/netty/dto/ChunkAnalysis.java
  19. 134 0
      audio-analysis/src/main/java/com/yonge/netty/dto/HardLevelEnum.java
  20. 187 0
      audio-analysis/src/main/java/com/yonge/netty/dto/NoteAnalysis.java
  21. 107 0
      audio-analysis/src/main/java/com/yonge/netty/dto/NoteFrequencyRange.java
  22. 28 0
      audio-analysis/src/main/java/com/yonge/netty/dto/NotePlayResult.java
  23. 88 0
      audio-analysis/src/main/java/com/yonge/netty/dto/SectionAnalysis.java
  24. 1009 0
      audio-analysis/src/main/java/com/yonge/netty/dto/UserChannelContext.java
  25. 67 0
      audio-analysis/src/main/java/com/yonge/netty/dto/WebSocketResponse.java
  26. 182 0
      audio-analysis/src/main/java/com/yonge/netty/entity/MusicXmlBasicInfo.java
  27. 116 0
      audio-analysis/src/main/java/com/yonge/netty/entity/MusicXmlNote.java
  28. 60 0
      audio-analysis/src/main/java/com/yonge/netty/entity/MusicXmlSection.java
  29. 155 0
      audio-analysis/src/main/java/com/yonge/netty/server/NettyServer.java
  30. 9 0
      audio-analysis/src/main/java/com/yonge/netty/server/handler/ChannelContextConstants.java
  31. 135 0
      audio-analysis/src/main/java/com/yonge/netty/server/handler/NettyChannelManager.java
  32. 77 0
      audio-analysis/src/main/java/com/yonge/netty/server/handler/NettyServerHandler.java
  33. 78 0
      audio-analysis/src/main/java/com/yonge/netty/server/handler/message/BinaryWebSocketFrameHandler.java
  34. 12 0
      audio-analysis/src/main/java/com/yonge/netty/server/handler/message/MessageHandler.java
  35. 75 0
      audio-analysis/src/main/java/com/yonge/netty/server/handler/message/TextWebSocketHandler.java
  36. 112 0
      audio-analysis/src/main/java/com/yonge/netty/server/processor/WaveformWriter.java
  37. 366 0
      audio-analysis/src/main/java/com/yonge/netty/server/service/AudioCompareHandler.java
  38. 90 0
      audio-analysis/src/main/java/com/yonge/netty/server/service/PitchDetectionHandler.java
  39. 32 0
      audio-analysis/src/main/java/com/yonge/netty/server/service/UserChannelContextService.java
  40. 128 0
      audio-analysis/src/main/resources/application-template.yml
  41. 16 0
      audio-analysis/src/main/resources/bootstrap-prod.properties
  42. 16 0
      audio-analysis/src/main/resources/bootstrap-sim.properties
  43. 16 0
      audio-analysis/src/main/resources/bootstrap-test.properties
  44. 0 38
      audio-analysis/src/test/java/com/yonge/audio/AppTest.java
  45. 1 0
      cooleshow-gateway/.gitignore
  46. 1 0
      cooleshow-im/.gitignore
  47. 72 0
      cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/dao/SysMusicCompareRecordDao.java
  48. 109 0
      cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/dto/IndexBaseDto.java
  49. 154 0
      cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/dto/IndexBaseMonthData.java
  50. 54 0
      cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/dto/StudentTrainChartDto.java
  51. 43 0
      cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/dto/StudentTrainOverviewDto.java
  52. 1 1
      cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/entity/AppVersionInfo.java
  53. 274 0
      cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/entity/SysMusicCompareRecord.java
  54. 35 0
      cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/enums/DeviceTypeEnum.java
  55. 27 0
      cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/enums/FeatureType.java
  56. 71 0
      cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/enums/HeardLevelEnum.java
  57. 139 0
      cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/enums/IndexDataType.java
  58. 125 0
      cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/queryInfo/SysMusicCompareRecordQueryInfo.java
  59. 46 0
      cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/service/SysMusicCompareRecordService.java
  60. 195 0
      cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/service/impl/SysMusicCompareRecordServiceImpl.java
  61. 429 0
      cooleshow-user/user-biz/src/main/resources/config/mybatis/SysMusicCompareRecordMapper.xml
  62. 73 0
      cooleshow-user/user-student/src/main/java/com/yonge/cooleshow/student/controller/SysMusicCompareRecordController.java

+ 121 - 29
audio-analysis/pom.xml

@@ -1,31 +1,123 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  
-  <parent>
-    <groupId>com.yonge.cooleshow</groupId>
-    <artifactId>cooleshow</artifactId>
-    <version>1.0</version>
-  </parent>
-
-  <groupId>com.yonge.audio</groupId>
-  <artifactId>audio-analysis</artifactId>
-  <version>1.0</version>
-  <packaging>jar</packaging>
-
-  <name>audio-analysis</name>
-  <url>http://maven.apache.org</url>
-
-  <properties>
-    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-  </properties>
-
-  <dependencies>
-    <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-      <version>3.8.1</version>
-      <scope>test</scope>
-    </dependency>
-  </dependencies>
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+
+	<parent>
+		<groupId>com.yonge.cooleshow</groupId>
+		<artifactId>cooleshow</artifactId>
+		<version>1.0</version>
+	</parent>
+
+	<groupId>com.yonge.audio</groupId>
+	<artifactId>audio-analysis</artifactId>
+	<version>1.0</version>
+	<packaging>jar</packaging>
+
+	<name>audio-analysis</name>
+	<url>http://maven.apache.org</url>
+
+	<properties>
+		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+	</properties>
+
+	<dependencies>
+		<dependency>
+			<groupId>com.github.st-h</groupId>
+			<artifactId>TarsosDSP</artifactId>
+			<version>2.4.1</version>
+		</dependency>
+
+		<dependency>
+			<groupId>org.springframework.cloud</groupId>
+			<artifactId>spring-cloud-starter-netflix-eureka-client</artifactId>
+		</dependency>
+
+		<dependency>
+			<groupId>com.alibaba.cloud</groupId>
+			<artifactId>spring-cloud-starter-alibaba-nacos-config</artifactId>
+		</dependency>
+
+		<dependency>
+			<groupId>com.alibaba.cloud</groupId>
+			<artifactId>spring-cloud-starter-alibaba-nacos-discovery</artifactId>
+		</dependency>
+
+		<!-- swagger-spring-boot -->
+		<dependency>
+			<groupId>com.spring4all</groupId>
+			<artifactId>swagger-spring-boot-starter</artifactId>
+		</dependency>
+
+		<dependency>
+			<groupId>com.github.xiaoymin</groupId>
+			<artifactId>swagger-bootstrap-ui</artifactId>
+		</dependency>
+
+		<dependency>
+			<groupId>com.alibaba</groupId>
+			<artifactId>druid-spring-boot-starter</artifactId>
+		</dependency>
+
+		<dependency>
+			<groupId>mysql</groupId>
+			<artifactId>mysql-connector-java</artifactId>
+		</dependency>
+
+		<dependency>
+			<groupId>com.yonge.cooleshow</groupId>
+			<artifactId>auth-api</artifactId>
+		</dependency>
+
+		<dependency>
+			<groupId>com.yonge.cooleshow</groupId>
+			<artifactId>user-biz</artifactId>
+			<exclusions>
+				<!-- <exclusion> <groupId>org.redisson</groupId> <artifactId>redisson-spring-boot-starter</artifactId> 
+					</exclusion> -->
+				<exclusion>
+					<artifactId>netty-common</artifactId>
+					<groupId>io.netty</groupId>
+				</exclusion>
+				<exclusion>
+					<artifactId>netty-transport</artifactId>
+					<groupId>io.netty</groupId>
+				</exclusion>
+				<exclusion>
+					<artifactId>netty-resolver</artifactId>
+					<groupId>io.netty</groupId>
+				</exclusion>
+				<exclusion>
+					<artifactId>netty-buffer</artifactId>
+					<groupId>io.netty</groupId>
+				</exclusion>
+				<exclusion>
+					<artifactId>netty-resolver-dns</artifactId>
+					<groupId>io.netty</groupId>
+				</exclusion>
+				<exclusion>
+					<artifactId>netty-handler</artifactId>
+					<groupId>io.netty</groupId>
+				</exclusion>
+				<exclusion>
+					<artifactId>netty-codec</artifactId>
+					<groupId>io.netty</groupId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+
+		<dependency>
+			<groupId>io.netty</groupId>
+			<artifactId>netty-all</artifactId>
+			<version>4.1.68.Final</version>
+		</dependency>
+	</dependencies>
+
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.springframework.boot</groupId>
+				<artifactId>spring-boot-maven-plugin</artifactId>
+			</plugin>
+		</plugins>
+	</build>
 </project>

+ 0 - 13
audio-analysis/src/main/java/com/yonge/audio/App.java

@@ -1,13 +0,0 @@
-package com.yonge.audio;
-
-/**
- * Hello world!
- *
- */
-public class App 
-{
-    public static void main( String[] args )
-    {
-        System.out.println( "Hello World!" );
-    }
-}

+ 49 - 0
audio-analysis/src/main/java/com/yonge/audio/AudioAnalysisServerApplication.java

@@ -0,0 +1,49 @@
+package com.yonge.audio;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.mybatis.spring.annotation.MapperScan;
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.boot.autoconfigure.http.HttpMessageConverters;
+import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
+import org.springframework.cloud.client.loadbalancer.LoadBalanced;
+import org.springframework.cloud.openfeign.EnableFeignClients;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.ComponentScan;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.http.MediaType;
+import org.springframework.web.client.RestTemplate;
+
+import com.spring4all.swagger.EnableSwagger2Doc;
+import com.yonge.audio.config.LocalFastJsonHttpMessageConverter;
+
+@SpringBootApplication
+@EnableDiscoveryClient
+@EnableFeignClients("com.ym.mec")
+@MapperScan("com.ym.mec.biz.dal.dao")
+@ComponentScan(basePackages = { "com.yonge.netty", "com.ym.mec", "com.yonge.log" })
+@Configuration
+@EnableSwagger2Doc
+public class AudioAnalysisServerApplication {
+
+	public static void main(String[] args) {
+		SpringApplication.run(AudioAnalysisServerApplication.class, args);
+	}
+
+	@Bean
+	@LoadBalanced
+	public RestTemplate restTemplate() {
+		return new RestTemplate();
+	}
+	
+	@Bean
+    public HttpMessageConverters fastJsonHttpMessageConverters(){
+		LocalFastJsonHttpMessageConverter converter = new LocalFastJsonHttpMessageConverter();
+        List<MediaType> fastMediaTypes =  new ArrayList<MediaType>();
+        fastMediaTypes.add(MediaType.APPLICATION_JSON_UTF8);
+        converter.setSupportedMediaTypes(fastMediaTypes);
+        return new HttpMessageConverters(converter);
+    }
+}

+ 1084 - 0
audio-analysis/src/main/java/com/yonge/audio/analysis/AudioFloatConverter.java

@@ -0,0 +1,1084 @@
+/*
+*      _______                       _____   _____ _____  
+*     |__   __|                     |  __ \ / ____|  __ \ 
+*        | | __ _ _ __ ___  ___  ___| |  | | (___ | |__) |
+*        | |/ _` | '__/ __|/ _ \/ __| |  | |\___ \|  ___/ 
+*        | | (_| | |  \__ \ (_) \__ \ |__| |____) | |     
+*        |_|\__,_|_|  |___/\___/|___/_____/|_____/|_|     
+*                                                         
+* -------------------------------------------------------------
+*
+* TarsosDSP is developed by Joren Six at IPEM, University Ghent
+*  
+* -------------------------------------------------------------
+*
+*  Info: http://0110.be/tag/TarsosDSP
+*  Github: https://github.com/JorenSix/TarsosDSP
+*  Releases: http://0110.be/releases/TarsosDSP/
+*  
+*  TarsosDSP includes modified source code by various authors,
+*  for credits and info, see README.
+* 
+*/
+
+
+/*
+ * Copyright 2007 Sun Microsystems, Inc.  All Rights Reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.  Sun designates this
+ * particular file as subject to the "Classpath" exception as provided
+ * by Sun in the LICENSE file that accompanied this code.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
+ * CA 95054 USA or visit www.sun.com if you need additional information or
+ * have any questions.
+ */
+package com.yonge.audio.analysis;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.DoubleBuffer;
+import java.nio.FloatBuffer;
+
+import javax.sound.sampled.AudioFormat;
+import javax.sound.sampled.AudioFormat.Encoding;
+
+
+/**
+ * This class is used to convert between 8,16,24,32,32+ bit signed/unsigned
+ * big/litle endian fixed/floating point byte buffers and float buffers.
+ * 
+ * @author Karl Helgason
+ */
+public abstract class AudioFloatConverter {
+
+    public static final Encoding PCM_FLOAT = new Encoding("PCM_FLOAT");
+    
+    /***************************************************************************
+     * 
+     * LSB Filter, used filter least significant byte in samples arrays.
+     * 
+     * Is used filter out data in lsb byte when SampleSizeInBits is not
+     * dividable by 8.
+     * 
+     **************************************************************************/
+
+    private static class AudioFloatLSBFilter extends AudioFloatConverter {
+
+        private AudioFloatConverter converter;
+
+        final private int offset;
+
+        final private int stepsize;
+
+        final private byte mask;
+
+        private byte[] mask_buffer;
+
+        public AudioFloatLSBFilter(AudioFloatConverter converter,
+        		AudioFormat format) {
+            int bits = format.getSampleSizeInBits();
+            boolean bigEndian = format.isBigEndian();
+            this.converter = converter;
+            stepsize = (bits + 7) / 8;
+            offset = bigEndian ? (stepsize - 1) : 0;
+            int lsb_bits = bits % 8;
+            if (lsb_bits == 0)
+                mask = (byte) 0x00;
+            else if (lsb_bits == 1)
+                mask = (byte) 0x80;
+            else if (lsb_bits == 2)
+                mask = (byte) 0xC0;
+            else if (lsb_bits == 3)
+                mask = (byte) 0xE0;
+            else if (lsb_bits == 4)
+                mask = (byte) 0xF0;
+            else if (lsb_bits == 5)
+                mask = (byte) 0xF8;
+            else if (lsb_bits == 6)
+                mask = (byte) 0xFC;
+            else if (lsb_bits == 7)
+                mask = (byte) 0xFE;
+            else
+                mask = (byte) 0xFF;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            byte[] ret = converter.toByteArray(in_buff, in_offset, in_len,
+                    out_buff, out_offset);
+
+            int out_offset_end = in_len * stepsize;
+            for (int i = out_offset + offset; i < out_offset_end; i += stepsize) {
+                out_buff[i] = (byte) (out_buff[i] & mask);
+            }
+
+            return ret;
+        }
+
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            if (mask_buffer == null || mask_buffer.length < in_buff.length)
+                mask_buffer = new byte[in_buff.length];
+            System.arraycopy(in_buff, 0, mask_buffer, 0, in_buff.length);
+            int in_offset_end = out_len * stepsize;
+            for (int i = in_offset + offset; i < in_offset_end; i += stepsize) {
+                mask_buffer[i] = (byte) (mask_buffer[i] & mask);
+            }
+            float[] ret = converter.toFloatArray(mask_buffer, in_offset,
+                    out_buff, out_offset, out_len);
+            return ret;
+        }
+
+    }
+
+    /***************************************************************************
+     * 
+     * 64 bit float, little/big-endian
+     * 
+     **************************************************************************/
+
+    // PCM 64 bit float, little-endian
+    private static class AudioFloatConversion64L extends AudioFloatConverter {
+        ByteBuffer bytebuffer = null;
+
+        DoubleBuffer floatbuffer = null;
+
+        double[] double_buff = null;
+
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int in_len = out_len * 8;
+            if (bytebuffer == null || bytebuffer.capacity() < in_len) {
+                bytebuffer = ByteBuffer.allocate(in_len).order(
+                        ByteOrder.LITTLE_ENDIAN);
+                floatbuffer = bytebuffer.asDoubleBuffer();
+            }
+            bytebuffer.position(0);
+            floatbuffer.position(0);
+            bytebuffer.put(in_buff, in_offset, in_len);
+            if (double_buff == null
+                    || double_buff.length < out_len + out_offset)
+                double_buff = new double[out_len + out_offset];
+            floatbuffer.get(double_buff, out_offset, out_len);
+            int out_offset_end = out_offset + out_len;
+            for (int i = out_offset; i < out_offset_end; i++) {
+                out_buff[i] = (float) double_buff[i];
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int out_len = in_len * 8;
+            if (bytebuffer == null || bytebuffer.capacity() < out_len) {
+                bytebuffer = ByteBuffer.allocate(out_len).order(
+                        ByteOrder.LITTLE_ENDIAN);
+                floatbuffer = bytebuffer.asDoubleBuffer();
+            }
+            floatbuffer.position(0);
+            bytebuffer.position(0);
+            if (double_buff == null || double_buff.length < in_offset + in_len)
+                double_buff = new double[in_offset + in_len];
+            int in_offset_end = in_offset + in_len;
+            for (int i = in_offset; i < in_offset_end; i++) {
+                double_buff[i] = in_buff[i];
+            }
+            floatbuffer.put(double_buff, in_offset, in_len);
+            bytebuffer.get(out_buff, out_offset, out_len);
+            return out_buff;
+        }
+    }
+
+    // PCM 64 bit float, big-endian
+    private static class AudioFloatConversion64B extends AudioFloatConverter {
+        ByteBuffer bytebuffer = null;
+
+        DoubleBuffer floatbuffer = null;
+
+        double[] double_buff = null;
+
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int in_len = out_len * 8;
+            if (bytebuffer == null || bytebuffer.capacity() < in_len) {
+                bytebuffer = ByteBuffer.allocate(in_len).order(
+                        ByteOrder.BIG_ENDIAN);
+                floatbuffer = bytebuffer.asDoubleBuffer();
+            }
+            bytebuffer.position(0);
+            floatbuffer.position(0);
+            bytebuffer.put(in_buff, in_offset, in_len);
+            if (double_buff == null
+                    || double_buff.length < out_len + out_offset)
+                double_buff = new double[out_len + out_offset];
+            floatbuffer.get(double_buff, out_offset, out_len);
+            int out_offset_end = out_offset + out_len;
+            for (int i = out_offset; i < out_offset_end; i++) {
+                out_buff[i] = (float) double_buff[i];
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int out_len = in_len * 8;
+            if (bytebuffer == null || bytebuffer.capacity() < out_len) {
+                bytebuffer = ByteBuffer.allocate(out_len).order(
+                        ByteOrder.BIG_ENDIAN);
+                floatbuffer = bytebuffer.asDoubleBuffer();
+            }
+            floatbuffer.position(0);
+            bytebuffer.position(0);
+            if (double_buff == null || double_buff.length < in_offset + in_len)
+                double_buff = new double[in_offset + in_len];
+            int in_offset_end = in_offset + in_len;
+            for (int i = in_offset; i < in_offset_end; i++) {
+                double_buff[i] = in_buff[i];
+            }
+            floatbuffer.put(double_buff, in_offset, in_len);
+            bytebuffer.get(out_buff, out_offset, out_len);
+            return out_buff;
+        }
+    }
+
+    /***************************************************************************
+     * 
+     * 32 bit float, little/big-endian
+     * 
+     **************************************************************************/
+
+    // PCM 32 bit float, little-endian
+    private static class AudioFloatConversion32L extends AudioFloatConverter {
+        ByteBuffer bytebuffer = null;
+
+        FloatBuffer floatbuffer = null;
+
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int in_len = out_len * 4;
+            if (bytebuffer == null || bytebuffer.capacity() < in_len) {
+                bytebuffer = ByteBuffer.allocate(in_len).order(
+                        ByteOrder.LITTLE_ENDIAN);
+                floatbuffer = bytebuffer.asFloatBuffer();
+            }
+            bytebuffer.position(0);
+            floatbuffer.position(0);
+            bytebuffer.put(in_buff, in_offset, in_len);
+            floatbuffer.get(out_buff, out_offset, out_len);
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int out_len = in_len * 4;
+            if (bytebuffer == null || bytebuffer.capacity() < out_len) {
+                bytebuffer = ByteBuffer.allocate(out_len).order(
+                        ByteOrder.LITTLE_ENDIAN);
+                floatbuffer = bytebuffer.asFloatBuffer();
+            }
+            floatbuffer.position(0);
+            bytebuffer.position(0);
+            floatbuffer.put(in_buff, in_offset, in_len);
+            bytebuffer.get(out_buff, out_offset, out_len);
+            return out_buff;
+        }
+    }
+
+    // PCM 32 bit float, big-endian
+    private static class AudioFloatConversion32B extends AudioFloatConverter {
+        ByteBuffer bytebuffer = null;
+
+        FloatBuffer floatbuffer = null;
+
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int in_len = out_len * 4;
+            if (bytebuffer == null || bytebuffer.capacity() < in_len) {
+                bytebuffer = ByteBuffer.allocate(in_len).order(
+                        ByteOrder.BIG_ENDIAN);
+                floatbuffer = bytebuffer.asFloatBuffer();
+            }
+            bytebuffer.position(0);
+            floatbuffer.position(0);
+            bytebuffer.put(in_buff, in_offset, in_len);
+            floatbuffer.get(out_buff, out_offset, out_len);
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int out_len = in_len * 4;
+            if (bytebuffer == null || bytebuffer.capacity() < out_len) {
+                bytebuffer = ByteBuffer.allocate(out_len).order(
+                        ByteOrder.BIG_ENDIAN);
+                floatbuffer = bytebuffer.asFloatBuffer();
+            }
+            floatbuffer.position(0);
+            bytebuffer.position(0);
+            floatbuffer.put(in_buff, in_offset, in_len);
+            bytebuffer.get(out_buff, out_offset, out_len);
+            return out_buff;
+        }
+    }
+
+    /***************************************************************************
+     * 
+     * 8 bit signed/unsigned
+     * 
+     **************************************************************************/
+
+    // PCM 8 bit, signed
+    private static class AudioFloatConversion8S extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++)
+                out_buff[ox++] = in_buff[ix++] * (1.0f / 127.0f);
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++)
+                out_buff[ox++] = (byte) (in_buff[ix++] * 127.0f);
+            return out_buff;
+        }
+    }
+
+    // PCM 8 bit, unsigned
+    private static class AudioFloatConversion8U extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++)
+                out_buff[ox++] = ((in_buff[ix++] & 0xFF) - 127)
+                        * (1.0f / 127.0f);
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++)
+                out_buff[ox++] = (byte) (127 + in_buff[ix++] * 127.0f);
+            return out_buff;
+        }
+    }
+
+    /***************************************************************************
+     * 
+     * 16 bit signed/unsigned, little/big-endian
+     * 
+     **************************************************************************/
+
+    // PCM 16 bit, signed, little-endian
+    private static class AudioFloatConversion16SL extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int len = out_offset + out_len;
+            for (int ox = out_offset; ox < len; ox++) {
+                out_buff[ox] = ((short) ((in_buff[ix++] & 0xFF) | 
+                           (in_buff[ix++] << 8))) * (1.0f / 32767.0f);
+            }
+
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ox = out_offset;
+            int len = in_offset + in_len;
+            for (int ix = in_offset; ix < len; ix++) {
+                int x = (int) (in_buff[ix] * 32767.0);
+                out_buff[ox++] = (byte) x;
+                out_buff[ox++] = (byte) (x >>> 8);
+            }
+            return out_buff;
+        }
+    }
+
+    // PCM 16 bit, signed, big-endian
+    private static class AudioFloatConversion16SB extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                out_buff[ox++] = ((short) ((in_buff[ix++] << 8) | 
+                        (in_buff[ix++] & 0xFF))) * (1.0f / 32767.0f);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * 32767.0);
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) x;
+            }
+            return out_buff;
+        }
+    }
+
+    // PCM 16 bit, unsigned, little-endian
+    private static class AudioFloatConversion16UL extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                int x = (in_buff[ix++] & 0xFF) | ((in_buff[ix++] & 0xFF) << 8);
+                out_buff[ox++] = (x - 32767) * (1.0f / 32767.0f);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = 32767 + (int) (in_buff[ix++] * 32767.0);
+                out_buff[ox++] = (byte) x;
+                out_buff[ox++] = (byte) (x >>> 8);
+            }
+            return out_buff;
+        }
+    }
+
+    // PCM 16 bit, unsigned, big-endian
+    private static class AudioFloatConversion16UB extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                int x = ((in_buff[ix++] & 0xFF) << 8) | (in_buff[ix++] & 0xFF);
+                out_buff[ox++] = (x - 32767) * (1.0f / 32767.0f);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = 32767 + (int) (in_buff[ix++] * 32767.0);
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) x;
+            }
+            return out_buff;
+        }
+    }
+
+    /***************************************************************************
+     * 
+     * 24 bit signed/unsigned, little/big-endian
+     * 
+     **************************************************************************/
+
+    // PCM 24 bit, signed, little-endian
+    private static class AudioFloatConversion24SL extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                int x = (in_buff[ix++] & 0xFF) | ((in_buff[ix++] & 0xFF) << 8)
+                        | ((in_buff[ix++] & 0xFF) << 16);
+                if (x > 0x7FFFFF)
+                    x -= 0x1000000;
+                out_buff[ox++] = x * (1.0f / (float)0x7FFFFF);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * (float)0x7FFFFF);
+                if (x < 0)
+                    x += 0x1000000;
+                out_buff[ox++] = (byte) x;
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) (x >>> 16);
+            }
+            return out_buff;
+        }
+    }
+
+    // PCM 24 bit, signed, big-endian
+    private static class AudioFloatConversion24SB extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                int x = ((in_buff[ix++] & 0xFF) << 16)
+                        | ((in_buff[ix++] & 0xFF) << 8) | (in_buff[ix++] & 0xFF);
+                if (x > 0x7FFFFF)
+                    x -= 0x1000000;
+                out_buff[ox++] = x * (1.0f / (float)0x7FFFFF);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * (float)0x7FFFFF);
+                if (x < 0)
+                    x += 0x1000000;
+                out_buff[ox++] = (byte) (x >>> 16);
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) x;
+            }
+            return out_buff;
+        }
+    }
+
+    // PCM 24 bit, unsigned, little-endian
+    private static class AudioFloatConversion24UL extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                int x = (in_buff[ix++] & 0xFF) | ((in_buff[ix++] & 0xFF) << 8)
+                        | ((in_buff[ix++] & 0xFF) << 16);
+                x -= 0x7FFFFF;
+                out_buff[ox++] = x * (1.0f / (float)0x7FFFFF);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * (float)0x7FFFFF);
+                x += 0x7FFFFF;
+                out_buff[ox++] = (byte) x;
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) (x >>> 16);
+            }
+            return out_buff;
+        }
+    }
+
+    // PCM 24 bit, unsigned, big-endian
+    private static class AudioFloatConversion24UB extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                int x = ((in_buff[ix++] & 0xFF) << 16)
+                        | ((in_buff[ix++] & 0xFF) << 8) | (in_buff[ix++] & 0xFF);
+                x -= 0x7FFFFF;
+                out_buff[ox++] = x * (1.0f / (float)0x7FFFFF);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * (float)0x7FFFFF);
+                x += 0x7FFFFF;
+                out_buff[ox++] = (byte) (x >>> 16);
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) x;
+            }
+            return out_buff;
+        }
+    }
+
+    /***************************************************************************
+     * 
+     * 32 bit signed/unsigned, little/big-endian
+     * 
+     **************************************************************************/
+
+    // PCM 32 bit, signed, little-endian
+    private static class AudioFloatConversion32SL extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                int x = (in_buff[ix++] & 0xFF) | ((in_buff[ix++] & 0xFF) << 8) |
+                        ((in_buff[ix++] & 0xFF) << 16) |
+                        ((in_buff[ix++] & 0xFF) << 24);
+                out_buff[ox++] = x * (1.0f / (float)0x7FFFFFFF);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * (float)0x7FFFFFFF);
+                out_buff[ox++] = (byte) x;
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) (x >>> 16);
+                out_buff[ox++] = (byte) (x >>> 24);
+            }
+            return out_buff;
+        }
+    }
+
+    // PCM 32 bit, signed, big-endian
+    private static class AudioFloatConversion32SB extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                int x = ((in_buff[ix++] & 0xFF) << 24) |
+                        ((in_buff[ix++] & 0xFF) << 16) |
+                        ((in_buff[ix++] & 0xFF) << 8) | (in_buff[ix++] & 0xFF);
+                out_buff[ox++] = x * (1.0f / (float)0x7FFFFFFF);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * (float)0x7FFFFFFF);
+                out_buff[ox++] = (byte) (x >>> 24);
+                out_buff[ox++] = (byte) (x >>> 16);
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) x;
+            }
+            return out_buff;
+        }
+    }
+
+    // PCM 32 bit, unsigned, little-endian
+    private static class AudioFloatConversion32UL extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                int x = (in_buff[ix++] & 0xFF) | ((in_buff[ix++] & 0xFF) << 8) |
+                        ((in_buff[ix++] & 0xFF) << 16) | 
+                        ((in_buff[ix++] & 0xFF) << 24);
+                x -= 0x7FFFFFFF;
+                out_buff[ox++] = x * (1.0f / (float)0x7FFFFFFF);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * (float)0x7FFFFFFF);
+                x += 0x7FFFFFFF;
+                out_buff[ox++] = (byte) x;
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) (x >>> 16);
+                out_buff[ox++] = (byte) (x >>> 24);
+            }
+            return out_buff;
+        }
+    }
+
+    // PCM 32 bit, unsigned, big-endian
+    private static class AudioFloatConversion32UB extends AudioFloatConverter {
+
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                int x = ((in_buff[ix++] & 0xFF) << 24) |
+                        ((in_buff[ix++] & 0xFF) << 16) |
+                        ((in_buff[ix++] & 0xFF) << 8) | (in_buff[ix++] & 0xFF);
+                x -= 0x7FFFFFFF;
+                out_buff[ox++] = x * (1.0f / (float)0x7FFFFFFF);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * (float)0x7FFFFFFF);
+                x += 0x7FFFFFFF;
+                out_buff[ox++] = (byte) (x >>> 24);
+                out_buff[ox++] = (byte) (x >>> 16);
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) x;
+            }
+            return out_buff;
+        }
+    }
+
+    /***************************************************************************
+     * 
+     * 32+ bit signed/unsigned, little/big-endian
+     * 
+     **************************************************************************/
+
+    // PCM 32+ bit, signed, little-endian
+    private static class AudioFloatConversion32xSL extends AudioFloatConverter {
+
+        final int xbytes;
+
+        public AudioFloatConversion32xSL(int xbytes) {
+            this.xbytes = xbytes;
+        }
+
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                ix += xbytes;
+                int x = (in_buff[ix++] & 0xFF) | ((in_buff[ix++] & 0xFF) << 8)
+                        | ((in_buff[ix++] & 0xFF) << 16)
+                        | ((in_buff[ix++] & 0xFF) << 24);
+                out_buff[ox++] = x * (1.0f / (float)0x7FFFFFFF);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * (float)0x7FFFFFFF);
+                for (int j = 0; j < xbytes; j++) {
+                    out_buff[ox++] = 0;
+                }
+                out_buff[ox++] = (byte) x;
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) (x >>> 16);
+                out_buff[ox++] = (byte) (x >>> 24);
+            }
+            return out_buff;
+        }
+    }
+
+    // PCM 32+ bit, signed, big-endian
+    private static class AudioFloatConversion32xSB extends AudioFloatConverter {
+
+        final int xbytes;
+
+        public AudioFloatConversion32xSB(int xbytes) {
+            this.xbytes = xbytes;
+        }
+
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                int x = ((in_buff[ix++] & 0xFF) << 24)
+                        | ((in_buff[ix++] & 0xFF) << 16)
+                        | ((in_buff[ix++] & 0xFF) << 8)
+                        | (in_buff[ix++] & 0xFF);
+                ix += xbytes;
+                out_buff[ox++] = x * (1.0f / (float)0x7FFFFFFF);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * (float)0x7FFFFFFF);
+                out_buff[ox++] = (byte) (x >>> 24);
+                out_buff[ox++] = (byte) (x >>> 16);
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) x;
+                for (int j = 0; j < xbytes; j++) {
+                    out_buff[ox++] = 0;
+                }
+            }
+            return out_buff;
+        }
+    }
+
+    // PCM 32+ bit, unsigned, little-endian
+    private static class AudioFloatConversion32xUL extends AudioFloatConverter {
+
+        final int xbytes;
+
+        public AudioFloatConversion32xUL(int xbytes) {
+            this.xbytes = xbytes;
+        }
+
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                ix += xbytes;
+                int x = (in_buff[ix++] & 0xFF) | ((in_buff[ix++] & 0xFF) << 8)
+                        | ((in_buff[ix++] & 0xFF) << 16)
+                        | ((in_buff[ix++] & 0xFF) << 24);
+                x -= 0x7FFFFFFF;
+                out_buff[ox++] = x * (1.0f / (float)0x7FFFFFFF);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * (float)0x7FFFFFFF);
+                x += 0x7FFFFFFF;
+                for (int j = 0; j < xbytes; j++) {
+                    out_buff[ox++] = 0;
+                }
+                out_buff[ox++] = (byte) x;
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) (x >>> 16);
+                out_buff[ox++] = (byte) (x >>> 24);
+            }
+            return out_buff;
+        }
+    }
+
+    // PCM 32+ bit, unsigned, big-endian
+    private static class AudioFloatConversion32xUB extends AudioFloatConverter {
+
+        final int xbytes;
+
+        public AudioFloatConversion32xUB(int xbytes) {
+            this.xbytes = xbytes;
+        }
+
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                int x = ((in_buff[ix++] & 0xFF) << 24) |
+                        ((in_buff[ix++] & 0xFF) << 16) |
+                        ((in_buff[ix++] & 0xFF) << 8) | (in_buff[ix++] & 0xFF);
+                ix += xbytes;
+                x -= 2147483647;
+                out_buff[ox++] = x * (1.0f / 2147483647.0f);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * 2147483647.0);
+                x += 2147483647;
+                out_buff[ox++] = (byte) (x >>> 24);
+                out_buff[ox++] = (byte) (x >>> 16);
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) x;
+                for (int j = 0; j < xbytes; j++) {
+                    out_buff[ox++] = 0;
+                }
+            }
+            return out_buff;
+        }
+    }
+
+    public static AudioFloatConverter getConverter(AudioFormat format) {
+    	AudioFloatConverter conv = null;
+        if (format.getFrameSize() == 0)
+            return null;
+        if (format.getFrameSize() != 
+                ((format.getSampleSizeInBits() + 7) / 8) * format.getChannels()) {
+            return null;
+        }
+        if (format.getEncoding().equals(Encoding.PCM_SIGNED)) {
+            if (format.isBigEndian()) {
+                if (format.getSampleSizeInBits() <= 8) {
+                    conv = new AudioFloatConversion8S();
+                } else if (format.getSampleSizeInBits() > 8 &&
+                      format.getSampleSizeInBits() <= 16) {
+                    conv = new AudioFloatConversion16SB();
+                } else if (format.getSampleSizeInBits() > 16 &&
+                      format.getSampleSizeInBits() <= 24) {
+                    conv = new AudioFloatConversion24SB();
+                } else if (format.getSampleSizeInBits() > 24 &&
+                      format.getSampleSizeInBits() <= 32) {
+                    conv = new AudioFloatConversion32SB();
+                } else if (format.getSampleSizeInBits() > 32) {
+                    conv = new AudioFloatConversion32xSB(((format
+                            .getSampleSizeInBits() + 7) / 8) - 4);
+                } 
+            } else {
+                if (format.getSampleSizeInBits() <= 8) {
+                    conv = new AudioFloatConversion8S();
+                } else if (format.getSampleSizeInBits() > 8 &&
+                         format.getSampleSizeInBits() <= 16) {
+                    conv = new AudioFloatConversion16SL();
+                } else if (format.getSampleSizeInBits() > 16 &&
+                         format.getSampleSizeInBits() <= 24) {
+                    conv = new AudioFloatConversion24SL();
+                } else if (format.getSampleSizeInBits() > 24 &&
+                         format.getSampleSizeInBits() <= 32) {
+                    conv = new AudioFloatConversion32SL();
+                } else if (format.getSampleSizeInBits() > 32) {
+                    conv = new AudioFloatConversion32xSL(((format
+                            .getSampleSizeInBits() + 7) / 8) - 4);
+                }
+            }
+        } else if (format.getEncoding().equals(Encoding.PCM_UNSIGNED)) {
+            if (format.isBigEndian()) {
+                if (format.getSampleSizeInBits() <= 8) {
+                    conv = new AudioFloatConversion8U();
+                } else if (format.getSampleSizeInBits() > 8 &&
+                        format.getSampleSizeInBits() <= 16) {
+                    conv = new AudioFloatConversion16UB();
+                } else if (format.getSampleSizeInBits() > 16 &&
+                        format.getSampleSizeInBits() <= 24) {
+                    conv = new AudioFloatConversion24UB();
+                } else if (format.getSampleSizeInBits() > 24 &&
+                        format.getSampleSizeInBits() <= 32) {
+                    conv = new AudioFloatConversion32UB();
+                } else if (format.getSampleSizeInBits() > 32) {
+                    conv = new AudioFloatConversion32xUB(((
+                            format.getSampleSizeInBits() + 7) / 8) - 4);
+                }
+            } else {
+                if (format.getSampleSizeInBits() <= 8) {
+                    conv = new AudioFloatConversion8U();
+                } else if (format.getSampleSizeInBits() > 8 &&
+                        format.getSampleSizeInBits() <= 16) {
+                    conv = new AudioFloatConversion16UL();
+                } else if (format.getSampleSizeInBits() > 16 &&
+                        format.getSampleSizeInBits() <= 24) {
+                    conv = new AudioFloatConversion24UL();
+                } else if (format.getSampleSizeInBits() > 24 &&
+                        format.getSampleSizeInBits() <= 32) {
+                    conv = new AudioFloatConversion32UL();
+                } else if (format.getSampleSizeInBits() > 32) {
+                    conv = new AudioFloatConversion32xUL(((
+                            format.getSampleSizeInBits() + 7) / 8) - 4);
+                }
+            }
+        } else if (format.getEncoding().equals(PCM_FLOAT)) {
+            if (format.getSampleSizeInBits() == 32) {
+                if (format.isBigEndian())
+                    conv = new AudioFloatConversion32B();
+                else
+                    conv = new AudioFloatConversion32L();
+            } else if (format.getSampleSizeInBits() == 64) {
+                if (format.isBigEndian()) 
+                    conv = new AudioFloatConversion64B();
+                else 
+                    conv = new AudioFloatConversion64L();                
+            }
+
+        }
+
+        if ((format.getEncoding().equals(Encoding.PCM_SIGNED) || 
+                format.getEncoding().equals(Encoding.PCM_UNSIGNED)) && 
+                (format.getSampleSizeInBits() % 8 != 0)) {
+            conv = new AudioFloatLSBFilter(conv, format);
+        }
+
+        if (conv != null)
+            conv.format = format;
+        return conv;
+    }
+
+    private AudioFormat format;
+
+    public AudioFormat getFormat() {
+        return format;
+    }
+
+    public abstract float[] toFloatArray(byte[] in_buff, int in_offset,
+            float[] out_buff, int out_offset, int out_len);
+
+    public float[] toFloatArray(byte[] in_buff, float[] out_buff,
+            int out_offset, int out_len) {
+        return toFloatArray(in_buff, 0, out_buff, out_offset, out_len);
+    }
+
+    public float[] toFloatArray(byte[] in_buff, int in_offset,
+            float[] out_buff, int out_len) {
+        return toFloatArray(in_buff, in_offset, out_buff, 0, out_len);
+    }
+
+    public float[] toFloatArray(byte[] in_buff, float[] out_buff, int out_len) {
+        return toFloatArray(in_buff, 0, out_buff, 0, out_len);
+    }
+
+    public float[] toFloatArray(byte[] in_buff, float[] out_buff) {
+        return toFloatArray(in_buff, 0, out_buff, 0, out_buff.length);
+    }
+
+    public abstract byte[] toByteArray(float[] in_buff, int in_offset,
+            int in_len, byte[] out_buff, int out_offset);
+
+    public byte[] toByteArray(float[] in_buff, int in_len, byte[] out_buff,
+            int out_offset) {
+        return toByteArray(in_buff, 0, in_len, out_buff, out_offset);
+    }
+
+    public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+            byte[] out_buff) {
+        return toByteArray(in_buff, in_offset, in_len, out_buff, 0);
+    }
+
+    public byte[] toByteArray(float[] in_buff, int in_len, byte[] out_buff) {
+        return toByteArray(in_buff, 0, in_len, out_buff, 0);
+    }
+
+    public byte[] toByteArray(float[] in_buff, byte[] out_buff) {
+        return toByteArray(in_buff, 0, in_buff.length, out_buff, 0);
+    }
+
+}

+ 158 - 0
audio-analysis/src/main/java/com/yonge/audio/analysis/Complex.java

@@ -0,0 +1,158 @@
+package com.yonge.audio.analysis;
+
+/*************************************************************************
+ * Compilation: javac Complex.java Execution: java Complex
+ * 
+ * Data type for complex numbers.
+ * 
+ * The data type is "immutable" so once you create and initialize a Complex
+ * object, you cannot change it. The "final" keyword when declaring re and im
+ * enforces this rule, making it a compile-time error to change the .re or .im
+ * fields after they've been initialized.
+ * 
+ * % java Complex a = 5.0 + 6.0i b = -3.0 + 4.0i Re(a) = 5.0 Im(a) = 6.0 b + a =
+ * 2.0 + 10.0i a - b = 8.0 + 2.0i a * b = -39.0 + 2.0i b * a = -39.0 + 2.0i a /
+ * b = 0.36 - 1.52i (a / b) * b = 5.0 + 6.0i conj(a) = 5.0 - 6.0i |a| =
+ * 7.810249675906654 tan(a) = -6.685231390246571E-6 + 1.0000103108981198i
+ * 
+ *************************************************************************/
+
+public class Complex {
+	private final double re; // the real part
+	private final double im; // the imaginary part
+
+	// create a new object with the given real and imaginary parts
+	public Complex(double real, double imag) {
+		re = real;
+		im = imag;
+	}
+
+	// return a string representation of the invoking Complex object
+	public String toString() {
+		if (im == 0)
+			return re + "";
+		if (re == 0)
+			return im + "i";
+		if (im < 0)
+			return re + " - " + (-im) + "i";
+		return re + " + " + im + "i";
+	}
+
+	// return abs/modulus/magnitude and angle/phase/argument
+	public double abs() {
+		return Math.hypot(re, im);
+	} // Math.sqrt(re*re + im*im)
+
+	public double phase() {
+		return Math.atan2(im, re);
+	} // between -pi and pi
+
+	// return a new Complex object whose value is (this + b)
+	public Complex plus(Complex b) {
+		Complex a = this; // invoking object
+		double real = a.re + b.re;
+		double imag = a.im + b.im;
+		return new Complex(real, imag);
+	}
+
+	// return a new Complex object whose value is (this - b)
+	public Complex minus(Complex b) {
+		Complex a = this;
+		double real = a.re - b.re;
+		double imag = a.im - b.im;
+		return new Complex(real, imag);
+	}
+
+	// return a new Complex object whose value is (this * b)
+	public Complex times(Complex b) {
+		Complex a = this;
+		double real = a.re * b.re - a.im * b.im;
+		double imag = a.re * b.im + a.im * b.re;
+		return new Complex(real, imag);
+	}
+
+	// scalar multiplication
+	// return a new object whose value is (this * alpha)
+	public Complex times(double alpha) {
+		return new Complex(alpha * re, alpha * im);
+	}
+
+	// return a new Complex object whose value is the conjugate of this
+	public Complex conjugate() {
+		return new Complex(re, -im);
+	}
+
+	// return a new Complex object whose value is the reciprocal of this
+	public Complex reciprocal() {
+		double scale = re * re + im * im;
+		return new Complex(re / scale, -im / scale);
+	}
+
+	// return the real or imaginary part
+	public double re() {
+		return re;
+	}
+
+	public double im() {
+		return im;
+	}
+
+	// return a / b
+	public Complex divides(Complex b) {
+		Complex a = this;
+		return a.times(b.reciprocal());
+	}
+
+	// return a new Complex object whose value is the complex exponential of
+	// this
+	public Complex exp() {
+		return new Complex(Math.exp(re) * Math.cos(im), Math.exp(re)
+				* Math.sin(im));
+	}
+
+	// return a new Complex object whose value is the complex sine of this
+	public Complex sin() {
+		return new Complex(Math.sin(re) * Math.cosh(im), Math.cos(re)
+				* Math.sinh(im));
+	}
+
+	// return a new Complex object whose value is the complex cosine of this
+	public Complex cos() {
+		return new Complex(Math.cos(re) * Math.cosh(im), -Math.sin(re)
+				* Math.sinh(im));
+	}
+
+	// return a new Complex object whose value is the complex tangent of this
+	public Complex tan() {
+		return sin().divides(cos());
+	}
+
+	// a static version of plus
+	public static Complex plus(Complex a, Complex b) {
+		double real = a.re + b.re;
+		double imag = a.im + b.im;
+		Complex sum = new Complex(real, imag);
+		return sum;
+	}
+
+	
+	public static void main(String[] args) {
+		Complex a = new Complex(5.0, 0.0);
+		Complex b = new Complex(-3.0, 4.0);
+
+		System.out.println("a            = " + a);
+		System.out.println("b            = " + b);
+		System.out.println("Re(a)        = " + a.re());
+		System.out.println("Im(a)        = " + a.im());
+		System.out.println("b + a        = " + b.plus(a));
+		System.out.println("a - b        = " + a.minus(b));
+		System.out.println("a * b        = " + a.times(b));
+		System.out.println("b * a        = " + b.times(a));
+		System.out.println("a / b        = " + a.divides(b));
+		System.out.println("(a / b) * b  = " + a.divides(b).times(b));
+		System.out.println("conj(a)      = " + a.conjugate());
+		System.out.println("|a|          = " + a.abs());
+		System.out.println("tan(a)       = " + a.tan());
+	}
+
+}

+ 167 - 0
audio-analysis/src/main/java/com/yonge/audio/analysis/FFT.java

@@ -0,0 +1,167 @@
+package com.yonge.audio.analysis;
+
+
+/*************************************************************************
+ * Compilation: javac FFT.java Execution: java FFT N Dependencies: Complex.java
+ * 
+ * Compute the FFT and inverse FFT of a length N complex sequence. Bare bones
+ * implementation that runs in O(N log N) time. Our goal is to optimize the
+ * clarity of the code, rather than performance.
+ * 
+ * Limitations ----------- - assumes N is a power of 2
+ * 
+ * - not the most memory efficient algorithm (because it uses an object type for
+ * representing complex numbers and because it re-allocates memory for the
+ * subarray, instead of doing in-place or reusing a single temporary array)
+ * 
+ *************************************************************************/
+
+public class FFT {
+
+	// compute the FFT of x[], assuming its length is a power of 2
+	public static Complex[] fft(Complex[] x) {
+		int N = x.length;
+
+		// base case
+		if (N == 1)
+			return new Complex[] { x[0] };
+
+		// radix 2 Cooley-Tukey FFT
+		if (N % 2 != 0) {
+			throw new RuntimeException("N is not a power of 2");
+		}
+
+		// fft of even terms
+		Complex[] even = new Complex[N / 2];
+		for (int k = 0; k < N / 2; k++) {
+			even[k] = x[2 * k];
+		}
+		Complex[] q = fft(even);
+
+		// fft of odd terms
+		Complex[] odd = even; // reuse the array
+		for (int k = 0; k < N / 2; k++) {
+			odd[k] = x[2 * k + 1];
+		}
+		Complex[] r = fft(odd);
+
+		// combine
+		Complex[] y = new Complex[N];
+		for (int k = 0; k < N / 2; k++) {
+			double kth = -2 * k * Math.PI / N;
+			Complex wk = new Complex(Math.cos(kth), Math.sin(kth));
+			y[k] = q[k].plus(wk.times(r[k]));
+			y[k + N / 2] = q[k].minus(wk.times(r[k]));
+		}
+		return y;
+	}
+
+	// compute the inverse FFT of x[], assuming its length is a power of 2
+	public static Complex[] ifft(Complex[] x) {
+		int N = x.length;
+		Complex[] y = new Complex[N];
+
+		// take conjugate
+		for (int i = 0; i < N; i++) {
+			y[i] = x[i].conjugate();
+		}
+
+		// compute forward FFT
+		y = fft(y);
+
+		// take conjugate again
+		for (int i = 0; i < N; i++) {
+			y[i] = y[i].conjugate();
+		}
+
+		// divide by N
+		for (int i = 0; i < N; i++) {
+			y[i] = y[i].times(1.0 / N);
+		}
+
+		return y;
+
+	}
+
+	// compute the circular convolution of x and y
+	public static Complex[] cconvolve(Complex[] x, Complex[] y) {
+
+		// should probably pad x and y with 0s so that they have same length
+		// and are powers of 2
+		if (x.length != y.length) {
+			throw new RuntimeException("Dimensions don't agree");
+		}
+
+		int N = x.length;
+
+		// compute FFT of each sequence
+		Complex[] a = fft(x);
+		Complex[] b = fft(y);
+
+		// point-wise multiply
+		Complex[] c = new Complex[N];
+		for (int i = 0; i < N; i++) {
+			c[i] = a[i].times(b[i]);
+		}
+
+		// compute inverse FFT
+		return ifft(c);
+	}
+
+	// compute the linear convolution of x and y
+	public static Complex[] convolve(Complex[] x, Complex[] y) {
+		Complex ZERO = new Complex(0, 0);
+
+		Complex[] a = new Complex[2 * x.length];
+		for (int i = 0; i < x.length; i++)
+			a[i] = x[i];
+		for (int i = x.length; i < 2 * x.length; i++)
+			a[i] = ZERO;
+
+		Complex[] b = new Complex[2 * y.length];
+		for (int i = 0; i < y.length; i++)
+			b[i] = y[i];
+		for (int i = y.length; i < 2 * y.length; i++)
+			b[i] = ZERO;
+
+		return cconvolve(a, b);
+	}
+
+	// display an array of Complex numbers to standard output
+	public static void show(Complex[] x, String title) {
+		System.out.println(title);
+		System.out.println("-------------------");
+		for (int i = 0; i < x.length; i++) {
+			System.out.println(x[i]);
+		}
+		System.out.println();
+	}
+
+	/*********************************************************************
+	 * Test client and sample execution
+	 * 
+	 * % java FFT 4 x ------------------- -0.03480425839330703
+	 * 0.07910192950176387 0.7233322451735928 0.1659819820667019
+	 * 
+	 * y = fft(x) ------------------- 0.9336118983487516 -0.7581365035668999 +
+	 * 0.08688005256493803i 0.44344407521182005 -0.7581365035668999 -
+	 * 0.08688005256493803i
+	 * 
+	 * z = ifft(y) ------------------- -0.03480425839330703 0.07910192950176387
+	 * + 2.6599344570851287E-18i 0.7233322451735928 0.1659819820667019 -
+	 * 2.6599344570851287E-18i
+	 * 
+	 * c = cconvolve(x, x) ------------------- 0.5506798633981853
+	 * 0.23461407150576394 - 4.033186818023279E-18i -0.016542951108772352
+	 * 0.10288019294318276 + 4.033186818023279E-18i
+	 * 
+	 * d = convolve(x, x) ------------------- 0.001211336402308083 -
+	 * 3.122502256758253E-17i -0.005506167987577068 - 5.058885073636224E-17i
+	 * -0.044092969479563274 + 2.1934338938072244E-18i 0.10288019294318276 -
+	 * 3.6147323062478115E-17i 0.5494685269958772 + 3.122502256758253E-17i
+	 * 0.240120239493341 + 4.655566391833896E-17i 0.02755001837079092 -
+	 * 2.1934338938072244E-18i 4.01805098805014E-17i
+	 * 
+	 *********************************************************************/
+
+}

+ 141 - 0
audio-analysis/src/main/java/com/yonge/audio/analysis/Signals.java

@@ -0,0 +1,141 @@
+package com.yonge.audio.analysis;
+
+import javax.sound.sampled.AudioFormat;
+import javax.sound.sampled.AudioSystem;
+import javax.sound.sampled.DataLine;
+import javax.sound.sampled.LineUnavailableException;
+import javax.sound.sampled.TargetDataLine;
+
+public class Signals {
+	public static float mean(float[] signal) {
+		float mean = 0;
+		for (int i = 0; i < signal.length; i++)
+			mean += signal[i];
+		mean /= signal.length;
+		return mean;
+	}
+
+	public static float energy(float[] signal) {
+		float totalEnergy = 0;
+		for (int i = 0; i < signal.length; i++)
+			totalEnergy += Math.pow(signal[i], 2);
+		return totalEnergy;
+	}
+
+	public static float power(float[] signal) {
+		return energy(signal) / signal.length;
+	}
+
+	public static float norm(float[] signal) {
+		return (float) Math.sqrt(energy(signal));
+	}
+
+	public static float minimum(float[] signal) {
+		float min = Float.POSITIVE_INFINITY;
+		for (int i = 0; i < signal.length; i++)
+			min = Math.min(min, signal[i]);
+		return min;
+	}
+
+	public static float maximum(float[] signal) {
+		float max = Float.NEGATIVE_INFINITY;
+		for (int i = 0; i < signal.length; i++)
+			max = Math.max(max, signal[i]);
+		return max;
+	}
+
+	public static void scale(float[] signal, float scale) {
+		for (int i = 0; i < signal.length; i++) {
+			signal[i] *= scale;
+			if (signal[i] > 32767) {
+				signal[i] = 32767;
+			} else if (signal[i] < -32768) {
+				signal[i] = -32768;
+			}
+		}
+	}
+
+	public static float rms(float[] samples) {
+		// 均方根 (RMS) 功率
+		return (float) Math.sqrt(power(samples));
+
+	}
+
+	public static double soundPressureLevel(float[] samples) {
+
+		double rms = rms(samples);
+		// 计算声强级(Sound Pressure Level)
+		return (20.0 * Math.log10(rms));
+	}
+
+	public static int decibels(float[] samples) {
+		// 声音的分贝范围
+		double minDecibels = 0, db = 0, maxDecibels = 127;
+
+		double rms = rms(samples);
+
+		if (rms > 0) {
+			db = 20 * Math.log10(rms / 0.00002);// 空气中常用的“零”参考声压为20 uPa RMS,通常被认为是人类听力的阈值
+
+			if (db > maxDecibels) {
+				db = maxDecibels;
+			} else if (db < minDecibels) {
+				db = minDecibels;
+			}
+		}
+
+		return (int) db;
+	}
+
+	public static void main(String[] args) throws LineUnavailableException {
+
+		float sampleRate = 44100;
+
+		AudioFormat audioFormat = new AudioFormat(sampleRate, 16, 1, true, false);
+
+		DataLine.Info dataLineInfo = new DataLine.Info(TargetDataLine.class, audioFormat);
+
+		TargetDataLine targetDataLine = (TargetDataLine) AudioSystem.getLine(dataLineInfo);
+
+		targetDataLine.open(audioFormat);
+		targetDataLine.start();
+
+		AudioFloatConverter converter = AudioFloatConverter.getConverter(audioFormat);
+
+		byte[] buffer = new byte[1024 * 8];
+
+		while (true) {
+			targetDataLine.read(buffer, 0, buffer.length);
+
+			float[] sampleFloats = new float[buffer.length / 2];
+			converter.toFloatArray(buffer, sampleFloats);
+
+			// 计算声强级(Sound Pressure Level)
+			double splDb = soundPressureLevel(sampleFloats);
+
+			int db = decibels(sampleFloats);
+
+			Complex[] complex = new Complex[sampleFloats.length];
+
+			for (int i = 0; i < sampleFloats.length; i++) {
+				complex[i] = new Complex(sampleFloats[i], 0);
+			}
+			Complex[] result = FFT.fft(complex);
+
+			double maxMagnitude = result[0].abs();
+			int maxIndex = 0;
+
+			for (int i = 1; i < result.length / 2; i++) {
+				if (maxMagnitude < result[i].abs()) {
+					maxMagnitude = result[i].abs();
+					maxIndex = i;
+				}
+			}
+
+			double f = maxIndex * sampleRate / result.length;
+
+			System.out.println("db:" + db + "  energy:" + energy(sampleFloats) + "	power:" + power(sampleFloats) + "  rms:" + rms(sampleFloats) + "	splDb: "
+					+ splDb + "	frequency: " + f);
+		}
+	}
+}

+ 52 - 0
audio-analysis/src/main/java/com/yonge/audio/analysis/detector/FrequencyDetector.java

@@ -0,0 +1,52 @@
+package com.yonge.audio.analysis.detector;
+
+import com.yonge.audio.analysis.Complex;
+import com.yonge.audio.analysis.FFT;
+
+public class FrequencyDetector {
+
+	private float[] samples;
+
+	private boolean isUseHanmingWindow;
+
+	private float sampleRate;
+
+	public FrequencyDetector(float[] samples, float sampleRate, boolean isUseHanmingWindow) {
+		this.samples = samples;
+		this.sampleRate = sampleRate;
+		this.isUseHanmingWindow = isUseHanmingWindow;
+	}
+
+	public double getFrequency() {
+
+		if (isUseHanmingWindow) {
+			// 加汉明窗
+			hamming(samples);
+		}
+
+		Complex[] complex = new Complex[samples.length];
+
+		for (int i = 0; i < samples.length; i++) {
+			complex[i] = new Complex(samples[i], 0);
+		}
+		Complex[] result = FFT.fft(complex);
+
+		double maxMagnitude = result[0].abs();
+		int maxIndex = 0;
+
+		for (int i = 1; i < result.length / 2; i++) {
+			if (maxMagnitude < result[i].abs()) {
+				maxMagnitude = result[i].abs();
+				maxIndex = i;
+			}
+		}
+
+		return maxIndex * sampleRate / result.length;
+	}
+
+	private void hamming(float[] samples) {
+		for (int i = 0; i < samples.length; i++) {
+			samples[i] *= (0.54f - 0.46f * Math.cos((2 * Math.PI) * i / (samples.length - 1)));
+		}
+	}
+}

+ 223 - 0
audio-analysis/src/main/java/com/yonge/audio/analysis/detector/YINPitchDetector.java

@@ -0,0 +1,223 @@
+package com.yonge.audio.analysis.detector;
+
+/**
+ * A {@link PitchDetector} implementation that uses a YIN algorithm to determine the frequency of
+ * the provided waveform data. The YIN algorithm is similar to the Auto-correlation Function used
+ * for pitch detection but adds additional steps to better the accuracy of the results. Each step
+ * lowers the error rate further. The following implementation was inspired by
+ * <a href="https://github.com/JorenSix/TarsosDSP/blob/master/src/core/be/tarsos/dsp/pitch/Yin.java">TarsosDsp</a>
+ * and
+ * <a href="http://recherche.ircam.fr/equipes/pcm/cheveign/ps/2002_JASA_YIN_proof.pdf">this YIN paper</a>.
+ * The six steps in the YIN algorithm are (according to the YIN paper):
+ * <p>
+ * <ol>
+ * <li>Auto-correlation Method</li>
+ * <li>Difference Function</li>
+ * <li>Cumulative Mean Normalized Difference Function</li>
+ * <li>Absolute Threshold</li>
+ * <li>Parabolic Interpolation</li>
+ * <li>Best Local Estimate</li>
+ * </ol>
+ * </p>
+ * The first two steps, the Auto-correlation Method and the Difference Function, can seemingly be
+ * combined into a single difference function step according to the YIN paper.
+ */
+public class YINPitchDetector {
+    // According to the YIN Paper, the threshold should be between 0.10 and 0.15
+    private static final float ABSOLUTE_THRESHOLD = 0.125f;
+
+	/**
+	 * C-1 = 16.35 / 2 Hz.
+	 */
+	private static final double REF_FREQ = 8.17579892;
+
+	/**
+	 * Cache LOG 2 calculation.
+	 */
+	private static final double LOG_TWO = Math.log(2.0);
+
+    private final double sampleRate;
+    private final float[] resultBuffer;
+
+    public YINPitchDetector(int bufferSize, float sampleRate) {
+        this.sampleRate = sampleRate;
+        this.resultBuffer = new float[bufferSize/2];
+    }
+    
+    /**
+	 * The reference frequency is configured. The default reference frequency is
+	 * 16.35Hz. This is C0 on a piano keyboard with A4 tuned to 440 Hz. This
+	 * means that 0 cents is C0; 1200 is C1; 2400 is C2; ... also -1200 cents is
+	 * C-1
+	 * 
+	 * @param hertzValue
+	 *            The pitch in Hertz.
+	 * @return The value in absolute cents using the configured reference
+	 *         frequency
+	 */
+	public static double hertzToAbsoluteCent(final double hertzValue) {
+		double pitchInAbsCent = 0.0;
+		if (hertzValue > 0) {
+			pitchInAbsCent = 1200 * Math.log(hertzValue / REF_FREQ) / LOG_TWO;
+		}
+		return pitchInAbsCent;
+	}
+
+    public double getFrequency(float[] wave) {
+        int tau;
+
+        // First, perform the functions to normalize the wave data
+
+        // The first and second steps in the YIN algorithm
+        autoCorrelationDifference(wave);
+
+        // The third step in the YIN algorithm
+        cumulativeMeanNormalizedDifference();
+
+        // Then perform the functions to retrieve the tau (the approximate period)
+
+        // The fourth step in the YIN algorithm
+        tau = absoluteThreshold();
+
+        // The fifth step in the YIN algorithm
+        float betterTau = parabolicInterpolation(tau);
+
+        // TODO implement the sixth and final step of the YIN algorithm
+        // (it isn't implemented in the Tarsos DSP project but is briefly explained in the YIN
+        // paper).
+
+        // The fundamental frequency (note frequency) is the sampling rate divided by the tau (index
+        // within the resulting buffer array that marks the period).
+        // The period is the duration (or index here) of one cycle.
+        // Frequency = 1 / Period, with respect to the sampling rate, Frequency = Sample Rate / Period
+        return sampleRate / betterTau;
+    }
+
+    /**
+     * Performs the first and second step of the YIN Algorithm on the provided array buffer values.
+     * This is a "combination" of the AutoCorrelation Method and the Difference Function. The
+     * AutoCorrelation Method multiplies the array value at the specified index with the array value
+     * at the specified index plus the "tau" (greek letter used in the formula). Whereas the
+     * Difference Function takes the square of the difference of the two values. This is supposed to
+     * provide a more accurate result (from about 10% to about 1.95% error rate). Note that this
+     * formula is a riemann sum, meaning the operation specified above is performed and accumulated
+     * for every value in the array. The result of this function is stored in a global array,
+     * {@link #resultBuffer}, which the subsequent steps of the algorithm should use.
+     *
+     * @param wave The waveform data to perform the AutoCorrelation Difference function on.
+     */
+    private void autoCorrelationDifference(final float[] wave) {
+        // Note this algorithm is currently slow (O(n^2)). Should look for any possible optimizations.
+        int length = resultBuffer.length;
+        int i, j;
+
+        for (j = 1; j < length; j++) {
+            for (i = 0; i < length; i++) {
+                // d sub t (tau) = (x(i) - x(i - tau))^2, from i = 1 to result buffer size
+                resultBuffer[j] += Math.pow((wave[i] - wave[i + j]), 2);
+            }
+        }
+    }
+
+    /**
+     * Performs the third step in the YIN Algorithm on the {@link #resultBuffer}. The result of this
+     * function yields an even lower error rate (about 1.69% from 1.95%). The {@link #resultBuffer}
+     * is updated when this function is performed.
+     */
+    private void cumulativeMeanNormalizedDifference() {
+        // newValue = oldValue / (runningSum / tau)
+        // == (oldValue / 1) * (tau / runningSum)
+        // == oldValue * (tau / runningSum)
+
+        // Here we're using index i as the "tau" in the equation
+        int i;
+        int length = resultBuffer.length;
+        float runningSum = 0;
+
+        // Set the first value in the result buffer to the value of one
+        resultBuffer[0] = 1;
+
+        for (i = 1; i < length; i++) {
+            // The sum of this value plus all the previous values in the buffer array
+            runningSum += resultBuffer[i];
+
+            // The current value is updated to be the current value multiplied by the index divided by the running sum value
+            resultBuffer[i] *= i / runningSum;
+        }
+    }
+
+    /**
+     * Performs step four of the YIN Algorithm on the {@link #resultBuffer}. This is the first step
+     * in the algorithm to attempt finding the period of the wave data. When attempting to determine
+     * the period of a wave, it's common to search for the high or low peaks or dips of the wave.
+     * This will allow you to determine the length of a cycle or its period. However, especially
+     * with a natural sound sample, it is possible to have false dips. This makes determining the
+     * period more difficult. This function attempts to resolve this issue by introducing a
+     * threshold. The result of this function yields an even lower rate (about 0.78% from about
+     * 1.69%).
+     *
+     * @return The tau indicating the approximate period.
+     */
+    private int absoluteThreshold() {
+        int tau;
+        int length = resultBuffer.length;
+
+        // The first two values in the result buffer should be 1, so start at the third value
+        for (tau = 2; tau < length; tau++) {
+            // If we are less than the threshold, continue on until we find the lowest value
+            // indicating the lowest dip in the wave since we first crossed the threshold.
+            if (resultBuffer[tau] < ABSOLUTE_THRESHOLD) {
+                while (tau + 1 < length && resultBuffer[tau + 1] < resultBuffer[tau]) {
+                    tau++;
+                }
+
+                // We have the approximate tau value, so break the loop
+                break;
+            }
+        }
+
+        // Some implementations of this algorithm set the tau value to -1 to indicate no correct tau
+        // value was found. This implementation will just return the last tau.
+        tau = tau >= length ? length - 1 : tau;
+
+        return tau;
+    }
+
+    /**
+     * Further lowers the error rate by using parabolas to smooth the wave between the minimum and
+     * maximum points. Especially helps to detect higher frequencies more precisely. The result of
+     * this function results in only a small error rate decline from about 0.78% to about 0.77%.
+     */
+    private float parabolicInterpolation(final int currentTau) {
+        // Finds the points to fit the parabola between
+        int x0 = currentTau < 1 ? currentTau : currentTau - 1;
+        int x2 = currentTau + 1 < resultBuffer.length ? currentTau + 1 : currentTau;
+
+        // Finds the better tau estimate
+        float betterTau;
+
+        if (x0 == currentTau) {
+            if (resultBuffer[currentTau] <= resultBuffer[x2]) {
+                betterTau = currentTau;
+            } else {
+                betterTau = x2;
+            }
+        } else if (x2 == currentTau) {
+            if (resultBuffer[currentTau] <= resultBuffer[x0]) {
+                betterTau = currentTau;
+            } else {
+                betterTau = x0;
+            }
+        } else {
+            // Fit the parabola between the first point, current tau, and the last point to find a
+            // better tau estimate.
+            float s0 = resultBuffer[x0];
+            float s1 = resultBuffer[currentTau];
+            float s2 = resultBuffer[x2];
+
+            betterTau = currentTau + (s2 - s0) / (2 * (2 * s1 - s2 - s0));
+        }
+
+        return betterTau;
+    }
+}

+ 73 - 0
audio-analysis/src/main/java/com/yonge/audio/config/LocalFastJsonHttpMessageConverter.java

@@ -0,0 +1,73 @@
+package com.yonge.audio.config;
+
+import com.alibaba.fastjson.serializer.JSONSerializer;
+import com.alibaba.fastjson.serializer.ObjectSerializer;
+import com.alibaba.fastjson.serializer.SimpleDateFormatSerializer;
+import com.alibaba.fastjson.serializer.ValueFilter;
+import com.alibaba.fastjson.support.spring.FastJsonHttpMessageConverter;
+import com.vdurmont.emoji.EmojiParser;
+import com.yonge.toolset.base.enums.BaseEnum;
+import com.yonge.toolset.utils.json.JsonUtil;
+import org.apache.commons.lang3.StringUtils;
+import org.springframework.http.HttpInputMessage;
+import org.springframework.http.HttpOutputMessage;
+import org.springframework.http.converter.HttpMessageNotReadableException;
+import org.springframework.http.converter.HttpMessageNotWritableException;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.lang.reflect.Type;
+import java.math.BigDecimal;
+import java.util.Date;
+
+public class LocalFastJsonHttpMessageConverter extends FastJsonHttpMessageConverter {
+
+	private static final String FORMAT = "yyyy-MM-dd HH:mm:ss";
+
+	@Override
+	protected Object readInternal(Class<? extends Object> clazz, HttpInputMessage inputMessage) throws IOException, HttpMessageNotReadableException {
+		return super.readInternal(clazz, inputMessage);
+	}
+
+	@Override
+	protected void writeInternal(Object obj, HttpOutputMessage outputMessage) throws IOException, HttpMessageNotWritableException {
+
+		OutputStream out = outputMessage.getBody();
+		JsonUtil.getConfig().put(Date.class, new SimpleDateFormatSerializer(FORMAT));
+		//JsonUtil.getConfig().put(String.class, new EmojiSerializer());
+		String text = JsonUtil.toJSONString(obj, EnumFilter.instance, getFeatures());
+		byte[] bytes = text.getBytes(getCharset());
+		out.write(bytes);
+	}
+}
+
+class EmojiSerializer implements ObjectSerializer{
+
+	@Override
+	public void write(JSONSerializer serializer, Object object, Object fieldName, Type fieldType, int features) throws IOException {
+		serializer.write(EmojiParser.parseToUnicode(object.toString()));
+	}
+	
+}
+
+class EnumFilter implements ValueFilter {
+
+	public static EnumFilter instance = new EnumFilter();
+
+	public EnumFilter() {
+	}
+
+	@Override
+	public Object process(Object object, String name, Object value) {
+		if (value == null || StringUtils.isBlank(value.toString())) {
+			return value;
+		}
+		if (value instanceof BigDecimal || value instanceof Double || value instanceof Float) {
+			return new BigDecimal(value.toString());
+		}
+		if (BaseEnum.class.isAssignableFrom(value.getClass())) {
+			return ((BaseEnum<?, ?>) value).getCode();
+		}
+		return value;
+	}
+}

+ 44 - 0
audio-analysis/src/main/java/com/yonge/audio/config/ResourceServerConfig.java

@@ -0,0 +1,44 @@
+package com.yonge.audio.config;
+
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity;
+import org.springframework.security.config.annotation.web.builders.HttpSecurity;
+import org.springframework.security.oauth2.config.annotation.web.configuration.EnableResourceServer;
+import org.springframework.security.oauth2.config.annotation.web.configuration.ResourceServerConfigurerAdapter;
+import org.springframework.security.oauth2.config.annotation.web.configurers.ResourceServerSecurityConfigurer;
+
+import com.yonge.cooleshow.common.security.BaseAccessDeniedHandler;
+import com.yonge.cooleshow.common.security.BaseAuthenticationEntryPoint;
+
+@Configuration
+@EnableResourceServer
+@EnableGlobalMethodSecurity(prePostEnabled = true)
+public class ResourceServerConfig extends ResourceServerConfigurerAdapter {
+
+	@Autowired
+	private BaseAccessDeniedHandler baseAccessDeniedHandler;
+
+	@Autowired
+	private BaseAuthenticationEntryPoint baseAuthenticationEntryPoint;
+
+	@Override
+	public void configure(HttpSecurity http) throws Exception {
+		http.authorizeRequests()
+		.antMatchers("/task/**")
+		.hasIpAddress("0.0.0.0/0")
+				.antMatchers("/v2/api-docs")
+				.permitAll()
+				// 任何人不登录都可以获取的资源
+				// .antMatchers("/ipController/**").hasIpAddress("127.0.0.1") //特定ip可以不登录获取资源
+				// .antMatchers("/ipControll/**").access("isAuthenticated() and hasIpAddress('127.0.0.1')")// 特定ip必须登录才能获取
+				.anyRequest().authenticated().and().csrf().disable().exceptionHandling().accessDeniedHandler(baseAccessDeniedHandler)
+				.authenticationEntryPoint(baseAuthenticationEntryPoint).and();
+	}
+
+	@Override
+	public void configure(ResourceServerSecurityConfigurer resources) throws Exception {
+		resources.authenticationEntryPoint(baseAuthenticationEntryPoint).accessDeniedHandler(baseAccessDeniedHandler);
+	}
+
+}

+ 35 - 0
audio-analysis/src/main/java/com/yonge/audio/config/WebMvcConfig.java

@@ -0,0 +1,35 @@
+package com.yonge.audio.config;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.springframework.boot.autoconfigure.http.HttpMessageConverters;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.format.FormatterRegistry;
+import org.springframework.http.MediaType;
+import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
+
+import com.yonge.cooleshow.common.config.EnumConverterFactory;
+
+@Configuration
+public class WebMvcConfig implements WebMvcConfigurer {
+	
+	/**
+	 * 枚举类的转换器 addConverterFactory
+	 */
+	@Override
+	public void addFormatters(FormatterRegistry registry) {
+		registry.addConverterFactory(new EnumConverterFactory());
+	}
+	
+	@Bean
+    public HttpMessageConverters fastJsonHttpMessageConverters(){
+		LocalFastJsonHttpMessageConverter converter = new LocalFastJsonHttpMessageConverter();
+        List<MediaType> fastMediaTypes =  new ArrayList<MediaType>();
+        fastMediaTypes.add(MediaType.APPLICATION_JSON_UTF8);
+        converter.setSupportedMediaTypes(fastMediaTypes);
+        return new HttpMessageConverters(converter);
+    }
+
+}

+ 98 - 0
audio-analysis/src/main/java/com/yonge/audio/utils/ArrayUtil.java

@@ -0,0 +1,98 @@
+package com.yonge.audio.utils;
+
+public class ArrayUtil {
+
+	/**
+	 * 合并2个数组
+	 * @param bt1
+	 * @param bt2
+	 * @return bt1 + bt2
+	 */
+	public static byte[] mergeByte(byte[] bt1, byte[] bt2) {
+		if (bt2.length == 0) {
+			return bt1;
+		}
+
+		byte[] bt3 = new byte[bt1.length + bt2.length];
+
+		if (bt1.length > 0) {
+			System.arraycopy(bt1, 0, bt3, 0, bt1.length);
+		}
+
+		if (bt2.length > 0) {
+			System.arraycopy(bt2, 0, bt3, bt1.length, bt2.length);
+		}
+
+		return bt3;
+	}
+
+	/**
+	 * 根据指定的起始、结束为止提取数组中的数据(起止都包含),并返回
+	 * @param src
+	 * @param startIndex
+	 * @param endIndex
+	 * @return
+	 */
+	public static byte[] extractByte(byte[] src, int startIndex, int endIndex) {
+
+		if (startIndex > endIndex) {
+			throw new RuntimeException("结束索引[" + endIndex + "]不能小于起始索引[" + startIndex + "]");
+		}
+
+		byte[] target = new byte[endIndex - startIndex + 1];
+		System.arraycopy(src, startIndex, target, 0, target.length);
+
+		return target;
+	}
+
+	/**
+	 * 合并2个数组
+	 * @param bt1
+	 * @param bt2
+	 * @return bt1 + bt2
+	 */
+	public static float[] mergeFloat(float[] bt1, float[] bt2) {
+		if (bt2.length == 0) {
+			return bt1;
+		}
+
+		float[] bt3 = new float[bt1.length + bt2.length];
+
+		if (bt1.length > 0) {
+			System.arraycopy(bt1, 0, bt3, 0, bt1.length);
+		}
+
+		if (bt2.length > 0) {
+			System.arraycopy(bt2, 0, bt3, bt1.length, bt2.length);
+		}
+
+		return bt3;
+	}
+
+	/**
+	 * 根据指定的起始、结束为止提取数组中的数据(起止都包含),并返回
+	 * @param src
+	 * @param startIndex
+	 * @param endIndex
+	 * @return
+	 */
+	public static float[] extractFloat(float[] src, int startIndex, int endIndex) {
+		if (startIndex > endIndex) {
+			throw new RuntimeException("结束索引[" + endIndex + "]不能小于起始索引[" + startIndex + "]");
+		}
+
+		float[] target = new float[endIndex - startIndex + 1];
+		System.arraycopy(src, startIndex, target, 0, target.length);
+
+		return target;
+	}
+
+	public static void main(String[] args) {
+		byte[] b1 = { 1, 2, 3, 4, 5 };
+		//byte[] b2 = { 3, 2, 1 };
+		byte[] r = extractByte(b1, 0, 4);
+		for (int i = 0; i < r.length; i++) {
+			System.out.println(r[i]);
+		}
+	}
+}

+ 42 - 0
audio-analysis/src/main/java/com/yonge/netty/common/message/Message.java

@@ -0,0 +1,42 @@
+package com.yonge.netty.common.message;
+
+/**
+ * 通信协议的消息体
+ */
+public class Message<T> {
+
+	/**
+	 * 类型
+	 */
+	private String type;
+
+	/**
+	 * 消息
+	 */
+	private T data;
+
+	// 空构造方法
+	public Message() {
+	}
+
+	public Message(String type, T data) {
+		this.type = type;
+		this.data = data;
+	}
+
+	public String getType() {
+		return type;
+	}
+
+	public void setType(String type) {
+		this.type = type;
+	}
+
+	public T getData() {
+		return data;
+	}
+
+	public void setData(T data) {
+		this.data = data;
+	}
+}

+ 34 - 0
audio-analysis/src/main/java/com/yonge/netty/common/message/MessageDispatcher.java

@@ -0,0 +1,34 @@
+package com.yonge.netty.common.message;
+
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.SimpleChannelInboundHandler;
+
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+import org.springframework.beans.factory.annotation.Autowired;
+
+public class MessageDispatcher extends SimpleChannelInboundHandler<Message<?>> {
+
+	@Autowired
+	private MessageHandlerContainer messageHandlerContainer;
+
+	private final ExecutorService executor = Executors.newFixedThreadPool(200);
+
+	@Override
+	protected void channelRead0(ChannelHandlerContext ctx, Message<?> message) {
+		// 获得 type 对应的 MessageHandler 处理器
+		MessageHandler messageHandler = messageHandlerContainer.getMessageHandler(message.getType());
+		// 获得 MessageHandler 处理器 的消息类
+		// Class<? extends Message> messageClass = MessageHandlerContainer.getMessageClass(messageHandler);
+		// 执行逻辑
+		executor.submit(new Runnable() {
+
+			@Override
+			public void run() {
+				messageHandler.execute(ctx.channel(), message.getData());
+			}
+
+		});
+	}
+}

+ 20 - 0
audio-analysis/src/main/java/com/yonge/netty/common/message/MessageHandler.java

@@ -0,0 +1,20 @@
+package com.yonge.netty.common.message;
+
+import io.netty.channel.Channel;
+
+public interface MessageHandler<T> {
+
+	/**
+	 * 执行处理消息
+	 *
+	 * @param channel 通道
+	 * @param message 消息
+	 */
+	void execute(Channel channel, T message);
+
+	/**
+	 * @return 消息类型,即每个 Message 实现类上的 TYPE 静态字段
+	 */
+	String getType();
+
+}

+ 89 - 0
audio-analysis/src/main/java/com/yonge/netty/common/message/MessageHandlerContainer.java

@@ -0,0 +1,89 @@
+package com.yonge.netty.common.message;
+
+import java.lang.reflect.ParameterizedType;
+import java.lang.reflect.Type;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Objects;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.aop.framework.AopProxyUtils;
+import org.springframework.beans.factory.InitializingBean;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.ApplicationContext;
+import org.springframework.stereotype.Component;
+
+//@Component
+public class MessageHandlerContainer implements InitializingBean {
+
+	private static final Logger LOGGER = LoggerFactory.getLogger(MessageHandlerContainer.class);
+
+	/**
+	 * 消息类型与 MessageHandler 的映射
+	 */
+	private final Map<String, MessageHandler<?>> handlers = new HashMap<String, MessageHandler<?>>();
+
+	@Autowired
+	private ApplicationContext applicationContext;
+
+	@Override
+	public void afterPropertiesSet() throws Exception {
+		// 通过 ApplicationContext 获得所有 MessageHandler Bean
+		applicationContext.getBeansOfType(MessageHandler.class).values() // 获得所有 MessageHandler Bean
+				.forEach(messageHandler -> handlers.put(messageHandler.getType(), messageHandler)); // 添加到 handlers 中
+		LOGGER.info("[afterPropertiesSet][消息处理器数量:{}]", handlers.size());
+	}
+
+	/**
+	 * 获得类型对应的 MessageHandler
+	 *
+	 * @param type 类型
+	 * @return MessageHandler
+	 */
+	MessageHandler<?> getMessageHandler(String type) {
+		MessageHandler<?> handler = handlers.get(type);
+		if (handler == null) {
+			throw new IllegalArgumentException(String.format("类型(%s) 找不到匹配的 MessageHandler 处理器", type));
+		}
+		return handler;
+	}
+
+	/**
+	 * 获得 MessageHandler 处理的消息类
+	 *
+	 * @param handler 处理器
+	 * @return 消息类
+	 */
+	static Class<? extends Message> getMessageClass(MessageHandler<?> handler) {
+		// 获得 Bean 对应的 Class 类名。因为有可能被 AOP 代理过。
+		Class<?> targetClass = AopProxyUtils.ultimateTargetClass(handler);
+		// 获得接口的 Type 数组
+		Type[] interfaces = targetClass.getGenericInterfaces();
+		Class<?> superclass = targetClass.getSuperclass();
+		while ((Objects.isNull(interfaces) || 0 == interfaces.length) && Objects.nonNull(superclass)) { // 此处,是以父类的接口为准
+			interfaces = superclass.getGenericInterfaces();
+			superclass = targetClass.getSuperclass();
+		}
+		if (Objects.nonNull(interfaces)) {
+			// 遍历 interfaces 数组
+			for (Type type : interfaces) {
+				// 要求 type 是泛型参数
+				if (type instanceof ParameterizedType) {
+					ParameterizedType parameterizedType = (ParameterizedType) type;
+					// 要求是 MessageHandler 接口
+					if (Objects.equals(parameterizedType.getRawType(), MessageHandler.class)) {
+						Type[] actualTypeArguments = parameterizedType.getActualTypeArguments();
+						// 取首个元素
+						if (Objects.nonNull(actualTypeArguments) && actualTypeArguments.length > 0) {
+							return (Class<Message>) actualTypeArguments[0];
+						} else {
+							throw new IllegalStateException(String.format("类型(%s) 获得不到消息类型", handler));
+						}
+					}
+				}
+			}
+		}
+		throw new IllegalStateException(String.format("类型(%s) 获得不到消息类型", handler));
+	}
+}

+ 100 - 0
audio-analysis/src/main/java/com/yonge/netty/dto/ChunkAnalysis.java

@@ -0,0 +1,100 @@
+package com.yonge.netty.dto;
+
+public class ChunkAnalysis {
+
+	private double startTime;
+
+	private double endTime;
+
+	private double durationTime;
+
+	private int frequency;
+
+	private int splDb;
+
+	private int power;
+	
+	private int amplitude;
+	
+	private boolean isPeak;
+
+	public ChunkAnalysis(double startTime, double endTime, int frequency, int splDb, int power, int amplitude) {
+		this.startTime = startTime;
+		this.endTime = endTime;
+		this.frequency = frequency;
+		this.splDb = splDb;
+		this.power = power;
+		this.amplitude = amplitude;
+		this.durationTime = endTime - startTime;
+	}
+
+	public ChunkAnalysis(int frequency, int splDb, int power) {
+		this.frequency = frequency;
+		this.splDb = splDb;
+		this.power = power;
+	}
+
+	public double getStartTime() {
+		return startTime;
+	}
+
+	public void setStartTime(double startTime) {
+		this.startTime = startTime;
+	}
+
+	public double getEndTime() {
+		return endTime;
+	}
+
+	public void setEndTime(double endTime) {
+		this.endTime = endTime;
+	}
+
+	public double getDurationTime() {
+		return durationTime;
+	}
+
+	public void setDurationTime(double durationTime) {
+		this.durationTime = durationTime;
+	}
+
+	public int getFrequency() {
+		return frequency;
+	}
+
+	public void setFrequency(int frequency) {
+		this.frequency = frequency;
+	}
+
+	public int getSplDb() {
+		return splDb;
+	}
+
+	public void setSplDb(int splDb) {
+		this.splDb = splDb;
+	}
+
+	public int getPower() {
+		return power;
+	}
+
+	public void setPower(int power) {
+		this.power = power;
+	}
+
+	public int getAmplitude() {
+		return amplitude;
+	}
+
+	public void setAmplitude(int amplitude) {
+		this.amplitude = amplitude;
+	}
+
+	public boolean isPeak() {
+		return isPeak;
+	}
+
+	public void setPeak(boolean isPeak) {
+		this.isPeak = isPeak;
+	}
+}

+ 134 - 0
audio-analysis/src/main/java/com/yonge/netty/dto/HardLevelEnum.java

@@ -0,0 +1,134 @@
+package com.yonge.netty.dto;
+
+import com.yonge.toolset.base.enums.BaseEnum;
+
+public enum HardLevelEnum implements BaseEnum<String, HardLevelEnum> {
+	/**
+	 * 入门级, 振幅阈值, 频率阈值 <br>
+	 * 节奏有效范围(1分音符), 节奏有效范围(2分音符), 节奏有效范围(4分音符), 节奏有效范围(8分音符), 节奏有效范围(16分音符), 节奏有效范围(32分音符)<br>
+	 * 完成度范围, 未演奏的范围
+	 */
+	//BEGINNER("入门级", 3, 5, 5, 5, 10, 10, 13, 15, 60, 10), 
+	BEGINNER("入门级", 3, 5, 10, 10, 15, 15, 22, 22, 75, 25), 
+	/**
+	 * 进阶级, 振幅阈值, 频率阈值 <br>
+	 * 节奏有效范围(1分音符), 节奏有效范围(2分音符), 节奏有效范围(4分音符), 节奏有效范围(8分音符), 节奏有效范围(16分音符), 节奏有效范围(32分音符)<br>
+	 * 完成度范围, 未演奏的范围
+	 */
+	ADVANCED("进阶级", 3, 5, 8, 8, 12, 12, 20, 20, 85, 15),
+	//ADVANCED("进阶级", 3, 5, 50, 50, 50, 50, 50, 5, 80, 10),
+	/**
+	 * 大师级, 振幅阈值, 频率阈值 <br>
+	 * 节奏有效范围(1分音符), 节奏有效范围(2分音符), 节奏有效范围(4分音符), 节奏有效范围(8分音符), 节奏有效范围(16分音符), 节奏有效范围(32分音符)<br>
+	 * 完成度范围, 未演奏的范围
+	 */
+	PERFORMER("大师级", 3, 3, 5, 5, 10, 10, 13, 15, 95, 10);
+
+	private String msg;
+
+	private int amplitudeThreshold;
+
+	private int frequencyThreshold;
+
+	private int tempoEffectiveRangeOf1;
+
+	private int tempoEffectiveRangeOf2;
+
+	private int tempoEffectiveRangeOf4;
+
+	private int tempoEffectiveRangeOf8;
+
+	private int tempoEffectiveRangeOf16;
+
+	private int tempoEffectiveRangeOf32;
+
+	private int integrityRange;
+
+	private int notPlayRange;
+
+	/**
+	 * 
+	 * @param msg
+	 * @param amplitudeThreshold 振幅阈值
+	 * @param frequencyThreshold 频率阈值
+	 * @param tempoEffectiveRangeOf1 节奏偏移量百分比(在当前范围内节奏才算正确)
+	 * @param tempoEffectiveRangeOf2 节奏偏移量百分比(在当前范围内节奏才算正确)
+	 * @param tempoEffectiveRangeOf4 节奏偏移量百分比(在当前范围内节奏才算正确)
+	 * @param tempoEffectiveRangeOf8 节奏偏移量百分比(在当前范围内节奏才算正确)
+	 * @param tempoEffectiveRangeOf16 节奏偏移量百分比(在当前范围内节奏才算正确)
+	 * @param tempoEffectiveRangeOf32 节奏偏移量百分比(在当前范围内节奏才算正确)
+	 * @param integrityRange 完成度范围
+	 * @param notPlayRange 未演奏的范围
+	 */
+	HardLevelEnum(String msg, int amplitudeThreshold, int frequencyThreshold, int tempoEffectiveRangeOf1, int tempoEffectiveRangeOf2,
+			int tempoEffectiveRangeOf4, int tempoEffectiveRangeOf8, int tempoEffectiveRangeOf16, int tempoEffectiveRangeOf32, int integrityRange,
+			int notPlayRange) {
+		this.msg = msg;
+		this.amplitudeThreshold = amplitudeThreshold;
+		this.frequencyThreshold = frequencyThreshold;
+		this.tempoEffectiveRangeOf1 = tempoEffectiveRangeOf1;
+		this.tempoEffectiveRangeOf2 = tempoEffectiveRangeOf2;
+		this.tempoEffectiveRangeOf4 = tempoEffectiveRangeOf4;
+		this.tempoEffectiveRangeOf8 = tempoEffectiveRangeOf8;
+		this.tempoEffectiveRangeOf16 = tempoEffectiveRangeOf16;
+		this.tempoEffectiveRangeOf32 = tempoEffectiveRangeOf32;
+		this.integrityRange = integrityRange;
+		this.notPlayRange = notPlayRange;
+	}
+
+	public String getMsg() {
+		return msg;
+	}
+
+	public int getAmplitudeThreshold() {
+		return amplitudeThreshold;
+	}
+
+	public int getFrequencyThreshold() {
+		return frequencyThreshold;
+	}
+
+	public int getTempoEffectiveRange(int denominator) {
+		
+		int tempoEffectiveRange = 0;
+		
+		switch (denominator) {
+		case 1:
+			tempoEffectiveRange = tempoEffectiveRangeOf1;
+			break;
+		case 2:
+			tempoEffectiveRange = tempoEffectiveRangeOf2;
+			break;
+		case 4:
+			tempoEffectiveRange = tempoEffectiveRangeOf4;
+			break;
+		case 8:
+			tempoEffectiveRange = tempoEffectiveRangeOf8;
+			break;
+		case 16:
+			tempoEffectiveRange = tempoEffectiveRangeOf16;
+			break;
+		case 32:
+			tempoEffectiveRange = tempoEffectiveRangeOf32;
+			break;
+
+		default:
+			break;
+		}
+		return tempoEffectiveRange;
+	}
+
+	public int getIntegrityRange() {
+		return integrityRange;
+	}
+
+	public int getNotPlayRange() {
+		return notPlayRange;
+	}
+
+	@Override
+	public String getCode() {
+		return this.name();
+	}
+
+}

+ 187 - 0
audio-analysis/src/main/java/com/yonge/netty/dto/NoteAnalysis.java

@@ -0,0 +1,187 @@
+package com.yonge.netty.dto;
+
+import com.yonge.toolset.base.enums.BaseEnum;
+
+public class NoteAnalysis {
+
+	public enum NoteErrorType implements BaseEnum<String, NoteErrorType> {
+		RIGHT("演奏正确"), CADENCE_WRONG("节奏错误"), INTONATION_WRONG("音准错误"), INTEGRITY_WRONG("完整度不足"), NOT_PLAY("未演奏");
+
+		private String msg;
+
+		NoteErrorType(String msg) {
+			this.msg = msg;
+		}
+
+		public String getMsg() {
+			return msg;
+		}
+
+		@Override
+		public String getCode() {
+			return this.name();
+		}
+	}
+
+	private int index;
+
+	private int sectionIndex;
+
+	private double startTime;
+
+	private double endTime;
+	
+	private double durationTime;
+
+	private int frequency;
+
+	private int playFrequency = -1;
+
+	private boolean tempo = true;
+
+	private NoteErrorType noteErrorType = NoteErrorType.RIGHT;
+
+	private int score;
+	
+	private int intonationScore;
+	
+	private int tempoScore;
+	
+	private int integrityScore;
+
+	private boolean ignore;
+	
+	private int measureRenderIndex;
+	
+	public NoteAnalysis(int measureRenderIndex, int index, int sectionIndex, int frequency, double durationTime) {
+		this.measureRenderIndex = measureRenderIndex;
+		this.durationTime = durationTime;
+		this.index = index;
+		this.sectionIndex = sectionIndex;
+		this.frequency = frequency;
+	}
+
+	public NoteAnalysis(double startTime, double endTime, int playFrequency) {
+		this.startTime = startTime;
+		this.endTime = endTime;
+		this.durationTime = endTime - startTime;
+		this.playFrequency = playFrequency;
+	}
+
+	public int getMusicalNotesIndex() {
+		return index;
+	}
+
+	public void setMusicalNotesIndex(int index) {
+		this.index = index;
+	}
+
+	public double getStartTime() {
+		return startTime;
+	}
+
+	public void setStartTime(double startTime) {
+		this.startTime = startTime;
+	}
+
+	public double getEndTime() {
+		return endTime;
+	}
+
+	public void setEndTime(double endTime) {
+		this.endTime = endTime;
+	}
+
+	public double getDurationTime() {
+		return durationTime;
+	}
+
+	public void setDurationTime(double durationTime) {
+		this.durationTime = durationTime;
+	}
+
+	public double getPlayFrequency() {
+		return playFrequency;
+	}
+
+	public void setPlayFrequency(int playFrequency) {
+		this.playFrequency = playFrequency;
+	}
+
+	public int getFrequency() {
+		return frequency;
+	}
+
+	public void setFrequency(int frequency) {
+		this.frequency = frequency;
+	}
+
+	public boolean isTempo() {
+		return tempo;
+	}
+
+	public void setTempo(boolean tempo) {
+		this.tempo = tempo;
+	}
+
+	public int getSectionIndex() {
+		return sectionIndex;
+	}
+
+	public void setSectionIndex(int sectionIndex) {
+		this.sectionIndex = sectionIndex;
+	}
+
+	public boolean isIgnore() {
+		return ignore;
+	}
+
+	public void setIgnore(boolean ignore) {
+		this.ignore = ignore;
+	}
+
+	public NoteErrorType getMusicalErrorType() {
+		return noteErrorType;
+	}
+
+	public void setMusicalErrorType(NoteErrorType noteErrorType) {
+		this.noteErrorType = noteErrorType;
+	}
+
+	public int getScore() {
+		return score;
+	}
+
+	public void setScore(int score) {
+		this.score = score;
+	}
+
+	public int getIntonationScore() {
+		return intonationScore;
+	}
+
+	public void setIntonationScore(int intonationScore) {
+		this.intonationScore = intonationScore;
+	}
+
+	public int getTempoScore() {
+		return tempoScore;
+	}
+
+	public void setTempoScore(int tempoScore) {
+		this.tempoScore = tempoScore;
+	}
+
+	public int getIntegrityScore() {
+		return integrityScore;
+	}
+
+	public void setIntegrityScore(int integrityScore) {
+		this.integrityScore = integrityScore;
+	}
+
+	public int getMeasureRenderIndex() {
+		return measureRenderIndex;
+	}
+
+}

+ 107 - 0
audio-analysis/src/main/java/com/yonge/netty/dto/NoteFrequencyRange.java

@@ -0,0 +1,107 @@
+package com.yonge.netty.dto;
+
+import java.math.BigDecimal;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * 一个音符的频率范围,包含最大值和最小值
+ */
+public class NoteFrequencyRange {
+
+	private double minFrequency;
+
+	private double maxFrequency;
+
+	public NoteFrequencyRange(double standardFrequecy, double frequency) {
+		int midiNoteSize = 128;
+		double[] midiNoteFrequencies = new double[midiNoteSize];
+
+		for (int x = 0; x < midiNoteSize; ++x) {
+			midiNoteFrequencies[x] = new BigDecimal(standardFrequecy).multiply(
+					new BigDecimal(Math.pow(2, new BigDecimal(x - 69).divide(new BigDecimal(12), 6, BigDecimal.ROUND_HALF_UP).doubleValue()))).doubleValue();
+
+			if(frequency <= 0){
+				continue;
+			}
+			
+			if (midiNoteFrequencies[x] >= frequency) {
+				if (midiNoteFrequencies[x] - frequency > frequency - midiNoteFrequencies[x - 1]) {
+					// frequency演奏的是上一个音符
+					maxFrequency = midiNoteFrequencies[x - 1] + (midiNoteFrequencies[x] - midiNoteFrequencies[x - 1]) / 2;
+					minFrequency = midiNoteFrequencies[x - 1] - (midiNoteFrequencies[x - 1] - midiNoteFrequencies[x - 2]) / 2;
+				} else {
+					// frequency演奏的是当前音符
+					midiNoteFrequencies[x + 1] = new BigDecimal(standardFrequecy).multiply(
+							new BigDecimal(Math.pow(2, new BigDecimal((x + 1) - 69).divide(new BigDecimal(12), 6, BigDecimal.ROUND_HALF_UP).doubleValue())))
+							.doubleValue();
+					maxFrequency = midiNoteFrequencies[x] + (midiNoteFrequencies[x + 1] - midiNoteFrequencies[x]) / 2;
+					minFrequency = midiNoteFrequencies[x] - (midiNoteFrequencies[x] - midiNoteFrequencies[x - 1]) / 2;
+				}
+				break;
+			}
+		}
+	}
+
+	public NoteFrequencyRange(double frequency) {
+		new NoteFrequencyRange(442, frequency);
+	}
+
+	public double getMinFrequency() {
+		return minFrequency;
+	}
+
+	public void setMinFrequency(double minFrequency) {
+		this.minFrequency = minFrequency;
+	}
+
+	public double getMaxFrequency() {
+		return maxFrequency;
+	}
+
+	public void setMaxFrequency(double maxFrequency) {
+		this.maxFrequency = maxFrequency;
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if (obj instanceof NoteFrequencyRange) {
+			NoteFrequencyRange nfr = (NoteFrequencyRange) obj;
+			return this.minFrequency == nfr.minFrequency && this.maxFrequency == nfr.maxFrequency;
+		}
+		return false;
+	}
+	
+	@Override
+	public int hashCode() {
+		int result = 17;
+		result = 31 * result + Double.hashCode(minFrequency);
+		result = 31 * result + Double.hashCode(maxFrequency);
+		return result;
+	}
+
+	public static void main(String[] args) {
+		
+		Map<NoteFrequencyRange,Integer> map = new HashMap<NoteFrequencyRange, Integer>();
+		
+		NoteFrequencyRange nfr = new NoteFrequencyRange(442,442);
+		System.out.println(nfr.getMinFrequency() + "-"+ nfr.getMaxFrequency());
+		
+		if(map.containsKey(nfr) == false){
+			map.put(nfr, 1);
+		}
+
+		NoteFrequencyRange nfr1 = new NoteFrequencyRange(442,430);
+		System.out.println(nfr1.getMinFrequency() + "-"+ nfr1.getMaxFrequency());
+		
+		if(map.containsKey(nfr1) == false){
+			map.put(nfr1, 1);
+		}else{
+			map.put(nfr, 2);
+		}
+		
+		System.out.println(nfr.equals(nfr1));
+		System.out.println(map.size());
+	}
+
+}

+ 28 - 0
audio-analysis/src/main/java/com/yonge/netty/dto/NotePlayResult.java

@@ -0,0 +1,28 @@
+package com.yonge.netty.dto;
+
+public class NotePlayResult {
+
+	private boolean status;
+	
+	private double migrationRate;
+	
+	public NotePlayResult() {
+		// TODO Auto-generated constructor stub
+	}
+
+	public boolean getStatus() {
+		return status;
+	}
+
+	public void setStatus(boolean status) {
+		this.status = status;
+	}
+
+	public double getMigrationRate() {
+		return migrationRate;
+	}
+
+	public void setMigrationRate(double migrationRate) {
+		this.migrationRate = migrationRate;
+	}
+}

+ 88 - 0
audio-analysis/src/main/java/com/yonge/netty/dto/SectionAnalysis.java

@@ -0,0 +1,88 @@
+package com.yonge.netty.dto;
+
+import org.apache.commons.lang3.builder.ToStringBuilder;
+
+public class SectionAnalysis {
+
+	// 小节下标
+	private int measureIndex;
+
+	// 音符数
+	private int noteNum;
+
+	// 持续时长
+	private double durationTime;
+
+	// 得分
+	private float score;
+	
+	private boolean isIngore;
+	
+	private int measureRenderIndex;
+	
+	public SectionAnalysis() {
+		// TODO Auto-generated constructor stub
+	}
+
+	public SectionAnalysis(int index, int noteNum, float durationTime, float score, boolean isIngore) {
+		this.measureIndex = index;
+		this.noteNum = noteNum;
+		this.durationTime = durationTime;
+		this.score = score;
+		this.isIngore = isIngore;
+	}
+
+	public int getIndex() {
+		return measureIndex;
+	}
+
+	public void setIndex(int measureIndex) {
+		this.measureIndex = measureIndex;
+	}
+
+	public int getNoteNum() {
+		return noteNum;
+	}
+
+	public void setNoteNum(int noteNum) {
+		this.noteNum = noteNum;
+	}
+
+	public double getDurationTime() {
+		return durationTime;
+	}
+
+	public void setDurationTime(double durationTime) {
+		this.durationTime = durationTime;
+	}
+
+	public float getScore() {
+		return score;
+	}
+
+	public void setScore(float score) {
+		this.score = score;
+	}
+	
+	public boolean isIngore() {
+		return isIngore;
+	}
+
+	public void setIsIngore(boolean isIngore) {
+		this.isIngore = isIngore;
+	}
+
+	public int getMeasureRenderIndex() {
+		return measureRenderIndex;
+	}
+
+	public void setMeasureRenderIndex(int measureRenderIndex) {
+		this.measureRenderIndex = measureRenderIndex;
+	}
+
+	@Override
+	public String toString() {
+		return ToStringBuilder.reflectionToString(this);
+	}
+
+}

+ 1009 - 0
audio-analysis/src/main/java/com/yonge/netty/dto/UserChannelContext.java

@@ -0,0 +1,1009 @@
+package com.yonge.netty.dto;
+
+import java.math.BigDecimal;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Optional;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.stream.Collectors;
+
+import javax.sound.sampled.AudioFormat;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import be.tarsos.dsp.pitch.FastYin;
+
+import com.yonge.audio.analysis.Signals;
+import com.yonge.audio.analysis.detector.YINPitchDetector;
+import com.yonge.netty.dto.NoteAnalysis.NoteErrorType;
+import com.yonge.netty.entity.MusicXmlBasicInfo;
+import com.yonge.netty.entity.MusicXmlNote;
+import com.yonge.netty.entity.MusicXmlSection;
+import com.yonge.netty.server.processor.WaveformWriter;
+
+/**
+ * 用户通道上下文
+ */
+public class UserChannelContext {
+	
+	private final static Logger LOGGER = LoggerFactory.getLogger(UserChannelContext.class);
+	
+	//打击乐
+	private final static List<Integer> percussionList = Arrays.asList(23, 113);
+	
+	private final static int MIN_FREQUECY = 100;
+	
+	private final static int MAX_FREQUECY = 2000;
+	
+	private FastYin detector;
+	
+	private String user;
+	
+	private double standardFrequecy = 442;
+	
+	private float offsetMS;
+	
+	private double dynamicOffset;
+	
+	private String platform;
+	
+	private Long recordId;
+	
+	private Integer subjectId;
+	
+	private float beatDuration;
+	
+	private boolean delayProcessed;
+	
+	// 曲目与musicxml对应关系
+	private ConcurrentHashMap<Long, MusicXmlBasicInfo> songMusicXmlMap = new ConcurrentHashMap<Long, MusicXmlBasicInfo>();
+
+	private WaveformWriter waveFileProcessor;
+
+	private NoteAnalysis processingNote = new NoteAnalysis(0, 0, -1);
+	
+	private AtomicInteger evaluatingSectionIndex = new AtomicInteger(0);
+	
+	private List<NoteAnalysis> doneNoteAnalysisList = new ArrayList<NoteAnalysis>();
+	
+	private List<SectionAnalysis> doneSectionAnalysisList = new ArrayList<SectionAnalysis>();
+	
+	private List<ChunkAnalysis> totalChunkAnalysisList = new ArrayList<ChunkAnalysis>();
+	
+	private byte[] channelBufferBytes = new byte[0];
+	
+	private double playTime;
+	
+	private HardLevelEnum hardLevel = HardLevelEnum.ADVANCED;
+	
+	private boolean handlerSwitch;
+	
+	private NotePlayResult queryNoteFrequency(MusicXmlNote xmlNote, double playFrequency) {
+
+		NotePlayResult result = new NotePlayResult();
+
+		boolean status = false;
+		double migrationRate = 0;
+
+		if (Math.round(xmlNote.getFrequency()) == Math.round(playFrequency)) {
+			status = true;
+			migrationRate = 0;
+		} else {
+			NoteFrequencyRange noteFrequencyRange = new NoteFrequencyRange(standardFrequecy, xmlNote.getFrequency());
+
+			if (noteFrequencyRange.getMinFrequency() > playFrequency || playFrequency > noteFrequencyRange.getMaxFrequency()) {
+				status = false;
+			} else {
+
+				status = true;
+
+				if (Math.round(playFrequency) < Math.round(xmlNote.getFrequency())) {
+					double min = Math.abs(xmlNote.getFrequency() - noteFrequencyRange.getMinFrequency()) / 2;
+					migrationRate = Math.abs(playFrequency - xmlNote.getFrequency()) / min;
+				} else {
+					double max = Math.abs(xmlNote.getFrequency() - noteFrequencyRange.getMaxFrequency()) / 2;
+					migrationRate = Math.abs(playFrequency - xmlNote.getFrequency()) / max;
+				}
+			}
+		}
+
+		result.setStatus(status);
+		result.setMigrationRate(migrationRate);
+
+		return result;
+	}
+	
+	public void init(String platform, String heardLevel, int subjectId, float beatDuration,float sampleRate, int bufferSize) {
+		this.platform = platform;
+		this.subjectId = subjectId;
+		this.beatDuration = beatDuration;
+		hardLevel = HardLevelEnum.valueOf(heardLevel);
+		if(detector == null){
+			detector = new FastYin(sampleRate, bufferSize);
+		}
+	}
+	
+	public void setUser(String user) {
+		this.user = user;
+	}
+
+	public Long getRecordId() {
+		return recordId;
+	}
+
+	public void setRecordId(Long recordId) {
+		this.recordId = recordId;
+	}
+
+	public boolean getHandlerSwitch() {
+		return handlerSwitch;
+	}
+
+	public void setHandlerSwitch(boolean handlerSwitch) {
+		this.handlerSwitch = handlerSwitch;
+	}
+
+	public float getOffsetMS() {
+		return offsetMS;
+	}
+
+	public void setOffsetMS(float offsetMS) {
+		this.offsetMS = offsetMS;
+	}
+
+	public float getBeatDuration() {
+		return beatDuration;
+	}
+
+	public void setBeatDuration(float beatDuration) {
+		this.beatDuration = beatDuration;
+	}
+
+	public HardLevelEnum getHardLevel() {
+		return hardLevel;
+	}
+
+	public ConcurrentHashMap<Long, MusicXmlBasicInfo> getSongMusicXmlMap() {
+		return songMusicXmlMap;
+	}
+
+	public WaveformWriter getWaveFileProcessor() {
+		return waveFileProcessor;
+	}
+
+	public void setWaveFileProcessor(WaveformWriter waveFileProcessor) {
+		this.waveFileProcessor = waveFileProcessor;
+	}
+
+	public NoteAnalysis getProcessingNote() {
+		return processingNote;
+	}
+
+	public void setProcessingNote(NoteAnalysis processingNote) {
+		this.processingNote = processingNote;
+	}
+	
+	public List<SectionAnalysis> getDoneSectionAnalysisList() {
+		return doneSectionAnalysisList;
+	}
+
+	public List<NoteAnalysis> getDoneNoteAnalysisList() {
+		return doneNoteAnalysisList;
+	}
+
+	public void resetUserInfo() {
+		waveFileProcessor = null;
+		processingNote = new NoteAnalysis(0,0,-1);
+		evaluatingSectionIndex = new AtomicInteger(0);
+		channelBufferBytes = new byte[0];
+		doneNoteAnalysisList = new ArrayList<NoteAnalysis>();
+		doneSectionAnalysisList = new ArrayList<SectionAnalysis>();
+		totalChunkAnalysisList = new ArrayList<ChunkAnalysis>();
+		recordId = null;
+		playTime = 0;
+		delayProcessed = false;
+		offsetMS = 0;
+		dynamicOffset = 0;
+		handlerSwitch = false;
+	}
+	
+	public MusicXmlBasicInfo getMusicXmlBasicInfo(Integer songId){
+		MusicXmlBasicInfo musicXmlBasicInfo = null;
+		if (songId == null) {
+			musicXmlBasicInfo = songMusicXmlMap.values().stream().findFirst().get();
+		} else {
+			musicXmlBasicInfo = songMusicXmlMap.get(songId);
+		}
+		return musicXmlBasicInfo;
+	}
+	
+	public MusicXmlSection getCurrentMusicSection(Integer songId, int sectionIndex){
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+		return musicXmlBasicInfo.getMusicXmlSectionMap().get(sectionIndex);
+	}
+
+	public MusicXmlNote getCurrentMusicNote(Integer songId, Integer noteIndex) {
+		if (songMusicXmlMap.size() == 0) {
+			return null;
+		}
+		if(noteIndex == null){
+			noteIndex = processingNote.getMusicalNotesIndex();
+		}
+		final int index = noteIndex;
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		int totalNoteIndex = getTotalMusicNoteIndex(null);
+		if (musicXmlBasicInfo != null && index <= totalNoteIndex) {
+			return musicXmlBasicInfo.getMusicXmlInfos().stream().filter(t -> t.getMusicalNotesIndex() == index).findFirst().get();
+		}
+
+		return null;
+	}
+
+	public int getTotalMusicNoteIndex(Integer songId) {
+		if (songMusicXmlMap.size() == 0) {
+			return -1;
+		}
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null) {
+			return musicXmlBasicInfo.getMusicXmlInfos().stream().map(t -> t.getMusicalNotesIndex()).distinct().max(Integer::compareTo).get();
+		}
+
+		return -1;
+	}
+
+	public List<MusicXmlNote> getCurrentMusicSection(Integer songId, Integer sectionIndex) {
+		if (songMusicXmlMap.size() == 0) {
+			return null;
+		}
+		if(sectionIndex == null){
+			sectionIndex = processingNote.getSectionIndex();
+		}
+		final int index = sectionIndex;
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null) {
+			return musicXmlBasicInfo.getMusicXmlInfos().stream().filter(t -> t.getMusicalNotesIndex() == index)
+					.sorted(Comparator.comparing(MusicXmlNote::getMusicalNotesIndex)).collect(Collectors.toList());
+		}
+
+		return null;
+	}
+
+	public int getTotalMusicSectionSize(Integer songId) {
+		if (songMusicXmlMap.size() == 0) {
+			return -1;
+		}
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null) {
+			return (int) musicXmlBasicInfo.getMusicXmlInfos().stream().map(t -> t.getMeasureIndex()).distinct().count();
+		}
+
+		return -1;
+	}
+	
+	public int getMusicSectionIndex(Integer songId, int musicXmlNoteIndex) {
+		if (songMusicXmlMap.size() == 0) {
+			return -1;
+		}
+		
+		if(getTotalMusicNoteIndex(null) < musicXmlNoteIndex){
+			return -1;
+		}
+		
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null) {
+			return musicXmlBasicInfo.getMusicXmlInfos().stream().filter(t -> t.getMusicalNotesIndex() == musicXmlNoteIndex).findFirst().get().getMeasureIndex();
+		}
+
+		return -1;
+	}
+	
+	public byte[] getChannelBufferBytes() {
+		return channelBufferBytes;
+	}
+
+	public void setChannelBufferBytes(byte[] channelBufferBytes) {
+		this.channelBufferBytes = channelBufferBytes;
+	}
+
+	public AtomicInteger getEvaluatingSectionIndex() {
+		return evaluatingSectionIndex;
+	}
+
+	public void handle(float[] samples, AudioFormat audioFormat){
+		
+		//YINPitchDetector frequencyDetector = new YINPitchDetector(samples.length , audioFormat.getSampleRate());
+		//int playFrequency = (int) frequencyDetector.getFrequency(samples);
+		
+		int playFrequency = -1;
+		if(!percussionList.contains(subjectId)){
+			playFrequency = (int)detector.getPitch(samples).getPitch();
+		}
+		
+		int splDb = (int) Signals.soundPressureLevel(samples);
+		int power = (int) Signals.power(samples);
+		int amplitude = (int) Signals.norm(samples);
+		//float rms = Signals.rms(samples);
+		
+		double durationTime = 1000 * (samples.length * 2) / audioFormat.getSampleRate() / (audioFormat.getSampleSizeInBits() / 8);
+		
+		playTime += durationTime;
+		
+		// 获取当前音符信息
+		MusicXmlNote musicXmlNote = getCurrentMusicNote(null,null);
+
+		if (musicXmlNote == null) {
+			return;
+		}
+		
+		//取出当前处理中的音符信息
+		NoteAnalysis noteAnalysis = getProcessingNote();
+		if(noteAnalysis == null || noteAnalysis.getDurationTime() == 0) {
+			noteAnalysis = new NoteAnalysis(musicXmlNote.getMeasureRenderIndex(), musicXmlNote.getMusicalNotesIndex(), musicXmlNote.getMeasureIndex(), (int)musicXmlNote.getFrequency(), musicXmlNote.getDuration());
+		}
+		
+		evaluatingSectionIndex.set(noteAnalysis.getSectionIndex());
+		
+		if (noteAnalysis.getMusicalNotesIndex() >= 0 && noteAnalysis.getMusicalNotesIndex() <= getTotalMusicNoteIndex(null)) {
+			
+			LOGGER.debug("user:{}  delayProcessed:{}  dynamicOffset:{}  Frequency:{}  splDb:{}  amplitude:{}  time:{}", user, delayProcessed, dynamicOffset, playFrequency, splDb, amplitude, playTime);
+			
+			ChunkAnalysis chunkAnalysis = new ChunkAnalysis(playTime - durationTime, playTime, playFrequency, splDb, power, amplitude);
+			
+			if(totalChunkAnalysisList.size() > 0){
+				if(totalChunkAnalysisList.get(totalChunkAnalysisList.size() - 1).getAmplitude() + 2 < chunkAnalysis.getAmplitude()){
+					chunkAnalysis.setPeak(true);//只针对打击乐
+				}
+			}
+			totalChunkAnalysisList.add(chunkAnalysis);
+			
+			boolean flag = false; //是否收到有效信号
+			if(percussionList.contains(subjectId)){
+				flag = chunkAnalysis.getAmplitude() > hardLevel.getAmplitudeThreshold();
+			}else{
+				flag = chunkAnalysis.getFrequency() > MIN_FREQUECY && chunkAnalysis.getFrequency() < MAX_FREQUECY;
+			}
+			
+			if(delayProcessed == false && flag){
+				
+				delayProcessed = true;
+				
+				//计算延迟偏移值
+				//playTime = musicXmlNote.getTimeStamp() + durationTime;
+				dynamicOffset = chunkAnalysis.getStartTime() - musicXmlNote.getTimeStamp();
+				if(100 * dynamicOffset / musicXmlNote.getDuration() > (100 - hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()))){
+					dynamicOffset = 0;
+				}
+			}
+			
+			if (playTime >= (musicXmlNote.getDuration() + musicXmlNote.getTimeStamp() + dynamicOffset)) {
+
+				if (musicXmlNote.getDontEvaluating()) {
+					noteAnalysis.setIgnore(true);
+				}
+				
+				//判断节奏(音符持续时间内有不间断的音高,就节奏正确)
+				boolean tempo = true;
+				if (percussionList.contains(subjectId)) {
+					noteAnalysis.setPlayFrequency(-1);
+					tempo = computeTempoWithAmplitude2(musicXmlNote);
+				}else{
+					noteAnalysis.setPlayFrequency(computeFrequency(musicXmlNote));
+					tempo = computeTempoWithFrequency(musicXmlNote);
+				}
+				
+				noteAnalysis.setTempo(tempo);
+				
+				evaluateForNote(musicXmlNote, noteAnalysis);//对当前音符评分
+
+				LOGGER.debug("当前音符下标[{}] 预计频率:{} 实际频率:{} 节奏:{}", noteAnalysis.getMusicalNotesIndex(), musicXmlNote.getFrequency(), noteAnalysis.getPlayFrequency(),
+						noteAnalysis.isTempo());
+				
+				doneNoteAnalysisList.add(noteAnalysis);
+				
+				// 准备处理下一个音符
+				int nextNoteIndex = musicXmlNote.getMusicalNotesIndex() + 1;
+				float nextNoteFrequence = -1;
+				double standDuration = 0;
+				int measureRenderIndex = 0;
+				MusicXmlNote nextMusicXmlNote = getCurrentMusicNote(null, nextNoteIndex);
+				if(nextMusicXmlNote != null){
+					nextNoteFrequence = nextMusicXmlNote.getFrequency();
+					standDuration = nextMusicXmlNote.getDuration();
+					measureRenderIndex = nextMusicXmlNote.getMeasureRenderIndex();
+				}
+				
+				NoteAnalysis nextNoteAnalysis = new NoteAnalysis(measureRenderIndex, nextNoteIndex, getMusicSectionIndex(null, nextNoteIndex), (int)nextNoteFrequence, standDuration);
+
+				noteAnalysis = nextNoteAnalysis;
+
+			}
+
+			setProcessingNote(noteAnalysis);
+		}
+		
+	}
+	
+
+	public int evaluateForSection(int sectionIndex, int subjectId){
+
+		int score = -1;
+		if(doneSectionAnalysisList.size() >= getTotalMusicSectionSize(null)){
+			return score;
+		}
+		
+		//取出当前小节的所有音符
+		List<NoteAnalysis> noteAnalysisList = doneNoteAnalysisList.stream().filter(t -> t.getSectionIndex() == sectionIndex).collect(Collectors.toList());
+		
+		long ignoreSize = noteAnalysisList.stream().filter(t -> t.isIgnore()).count();
+
+		SectionAnalysis sectionAnalysis = new SectionAnalysis();
+		sectionAnalysis.setIndex(sectionIndex);
+		sectionAnalysis.setNoteNum(noteAnalysisList.size());
+		sectionAnalysis.setIsIngore(ignoreSize == noteAnalysisList.size());
+		
+		//判断是否需要评分
+		MusicXmlSection musicXmlSection = getCurrentMusicSection(null, sectionIndex);
+		if(noteAnalysisList.size() == musicXmlSection.getNoteNum()){
+			sectionAnalysis.setMeasureRenderIndex(noteAnalysisList.stream().findFirst().get().getMeasureRenderIndex());
+			//取出需要评测的音符
+			List<NoteAnalysis>  noteList = noteAnalysisList.stream().filter(t -> t.isIgnore() == false).collect(Collectors.toList());
+			
+			if(noteList != null && noteList.size() > 0){
+				score = noteList.stream().mapToInt(t -> t.getScore()).sum() / noteList.size();
+			}
+			sectionAnalysis.setDurationTime(noteAnalysisList.stream().mapToDouble(t -> t.getDurationTime()).sum());
+			sectionAnalysis.setScore(score);
+
+			LOGGER.debug("小节评分:{}",sectionAnalysis);
+			doneSectionAnalysisList.add(sectionAnalysis);
+		}
+		
+		return score;
+	}
+	
+	public Map<String, Integer> evaluateForMusic() {
+
+		Map<String, Integer> result = new HashMap<String, Integer>();
+		
+		result.put("playTime", (int) doneNoteAnalysisList.stream().mapToDouble(t -> t.getDurationTime()).sum());
+        result.put("recordId", recordId.intValue());
+		
+		// 取出需要评测的音符
+		List<NoteAnalysis> noteAnalysisList = doneNoteAnalysisList.stream().filter(t -> t.isIgnore() == false).collect(Collectors.toList());
+
+		if (noteAnalysisList != null && noteAnalysisList.size() > 0) {
+			int intonationScore = 0;
+			int tempoScore = 0;
+			int integrityScore = 0;
+			int socre = 0;
+
+			for (NoteAnalysis note : noteAnalysisList) {
+				intonationScore += note.getIntonationScore();
+				tempoScore += note.getTempoScore();
+				integrityScore += note.getIntegrityScore();
+				socre += note.getScore();
+			}
+
+			tempoScore = tempoScore / noteAnalysisList.size();
+			intonationScore = intonationScore / noteAnalysisList.size();
+			integrityScore = integrityScore / noteAnalysisList.size();
+
+			result.put("cadence", tempoScore);
+			result.put("intonation", intonationScore);
+			result.put("integrity", integrityScore);
+
+			int score = socre / noteAnalysisList.size();
+
+			// 平均得分
+			if (getMusicXmlBasicInfo(null).getSubjectId() == 23 || getMusicXmlBasicInfo(null).getSubjectId() == 113) {
+				score = tempoScore;
+			}
+			result.put("score", score);
+		} else {
+			result.put("cadence", 0);
+			result.put("intonation", 0);
+			result.put("integrity", 0);
+			result.put("score", 0);
+		}
+		return result;
+	}
+	
+
+	public void evaluateForNote(MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis) {
+		
+		double floatingRange = musicXmlNote.getDuration() * hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) / 100;
+		
+		double endTime = musicXmlNote.getTimeStamp() + dynamicOffset + floatingRange;
+		double startTime = musicXmlNote.getTimeStamp() + dynamicOffset - floatingRange;
+		
+		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
+		
+		double correctedStartTime = queryFirstNoteStartTime(chunkAnalysisList, musicXmlNote);
+		double correctedEndTime = correctedStartTime + musicXmlNote.getDuration();
+		
+		chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(correctedStartTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(correctedEndTime)).collect(Collectors.toList());
+		
+		double durationTime = chunkAnalysisList.get(chunkAnalysisList.size() - 1).getEndTime() - chunkAnalysisList.get(0).getStartTime();
+		
+		double playDurationTime = 0;
+		
+		if (percussionList.contains(subjectId)) {
+			if (noteAnalysis.getFrequency() == -1) {// 休止符
+				if (!noteAnalysis.isTempo()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				} else {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				}
+			}else{
+				int beatTimes = (int) chunkAnalysisList.stream().filter(t -> t.getAmplitude() > hardLevel.getAmplitudeThreshold()).count();
+				if(beatTimes == 0){
+					noteAnalysis.setMusicalErrorType(NoteErrorType.NOT_PLAY);
+				}else if (!noteAnalysis.isTempo()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				} else {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				}
+			}
+		} else {
+			
+			NotePlayResult notePlayResult = queryNoteFrequency(musicXmlNote, noteAnalysis.getPlayFrequency());
+			
+			if (noteAnalysis.getFrequency() == -1) {// 休止符
+
+				playDurationTime = chunkAnalysisList.stream().filter(t -> t.getFrequency() <= MIN_FREQUECY).mapToDouble(t -> t.getDurationTime()).sum();
+
+				if (!noteAnalysis.isTempo()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				} else if (playDurationTime * 100 / durationTime < hardLevel.getIntegrityRange()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.INTEGRITY_WRONG);
+				} else if (notePlayResult.getStatus() == false) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.INTONATION_WRONG);
+				} else {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				}
+			} else {
+				playDurationTime = chunkAnalysisList.stream().filter(t -> t.getFrequency() > MIN_FREQUECY && t.getFrequency() < MAX_FREQUECY)
+						.mapToDouble(t -> t.getDurationTime()).sum();
+
+				if (playDurationTime * 100 / durationTime < hardLevel.getNotPlayRange()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.NOT_PLAY);
+					LOGGER.debug("未演奏:{}", playDurationTime * 100 / durationTime);
+				} else if (playDurationTime * 100 / durationTime < hardLevel.getIntegrityRange()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.INTEGRITY_WRONG);
+					LOGGER.debug("完整度不足:{}", playDurationTime * 100 / durationTime);
+				} else if (!noteAnalysis.isTempo()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				} else if (notePlayResult.getStatus() == false) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.INTONATION_WRONG);
+				} else {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				}
+			}
+			
+		}
+
+		// 计算音分
+		int tempoScore = 0;
+		int integrityScore = 0;
+		int intonationScore = 100 - new BigDecimal(Math.abs(YINPitchDetector.hertzToAbsoluteCent(noteAnalysis.getPlayFrequency())
+				- YINPitchDetector.hertzToAbsoluteCent(noteAnalysis.getFrequency()))).multiply(new BigDecimal(20)).divide(new BigDecimal(17), BigDecimal.ROUND_UP)
+				.setScale(0, BigDecimal.ROUND_UP).intValue();
+		if (intonationScore < 0) {
+			intonationScore = 0;
+		} else if (intonationScore > 100) {
+			intonationScore = 100;
+		}
+
+		if (noteAnalysis.getMusicalErrorType() == NoteErrorType.NOT_PLAY) {
+			intonationScore = 0;
+		} else {
+
+			if (noteAnalysis.isTempo()) {
+				tempoScore = 100;
+				noteAnalysis.setTempoScore(tempoScore);
+			}
+
+			integrityScore = (int) (playDurationTime * 100 * 100 / hardLevel.getIntegrityRange() / durationTime);
+			if (integrityScore > 100) {
+				integrityScore = 100;
+			}
+			noteAnalysis.setIntegrityScore(integrityScore);
+		}
+		noteAnalysis.setIntonationScore(intonationScore);
+		if (percussionList.contains(subjectId)) {
+			noteAnalysis.setScore(tempoScore);
+		} else {
+			noteAnalysis.setScore(new BigDecimal(intonationScore + tempoScore + integrityScore).divide(new BigDecimal(3), 2).setScale(0, BigDecimal.ROUND_UP)
+					.intValue());
+		}
+	}
+	
+	private int computeFrequency(MusicXmlNote musicXmlNote) {
+		
+		double floatingRange = musicXmlNote.getDuration() * hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) / 100;
+		
+		double endTime = musicXmlNote.getTimeStamp() + dynamicOffset + floatingRange;
+		double startTime = musicXmlNote.getTimeStamp() + dynamicOffset - floatingRange;
+		
+		LOGGER.debug("------------TimeStamp:{}  Duration:{}  floatingRange:{}  StartTime:{}  EndTime:{}------------", musicXmlNote.getTimeStamp(), musicXmlNote.getDuration(), floatingRange, startTime, endTime);
+		
+		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
+		
+		double correctedStartTime = queryFirstNoteStartTime(chunkAnalysisList, musicXmlNote);
+		double correctedEndTime = correctedStartTime + musicXmlNote.getDuration();
+		
+		chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(correctedStartTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(correctedEndTime)).collect(Collectors.toList());
+
+		LOGGER.debug("------------ correctedStartTime:{}  correctedEndTime:{}------------", correctedStartTime, correctedEndTime);
+		
+		//根据完整度取部分有效信号
+		int elementSize = chunkAnalysisList.size() * hardLevel.getIntegrityRange() / 100;
+		chunkAnalysisList = chunkAnalysisList.subList(0, elementSize);
+		if(chunkAnalysisList == null || chunkAnalysisList.size() == 0){
+			return -1;
+		}
+		
+		ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
+		
+		LOGGER.debug("-------startTime:{}  endTime:{}------", firstChunkAnalysis.getStartTime(), chunkAnalysisList.get(chunkAnalysisList.size() - 1)
+				.getEndTime());
+		
+		List<ChunkAnalysis> chunkList = new ArrayList<ChunkAnalysis>(chunkAnalysisList);
+		
+		List<Integer> chunkFrequencyList = chunkList.stream().map(t -> t.getFrequency()).filter(t -> t.doubleValue() > MIN_FREQUECY && t.doubleValue() < MAX_FREQUECY)
+				.collect(Collectors.toList());
+		
+		if (chunkFrequencyList.size() == 0) {
+			return -1;
+		}
+
+		int frequency = (int) (chunkFrequencyList.stream().mapToInt(t -> t).sum() / chunkFrequencyList.size());
+
+		return frequency;
+	}
+	
+	/**
+	 * 时值范围内有且只有一个音,且不能间断,且在合理范围内需开始演奏
+	 * 与上一个音相同时,2个音之间需要间断
+	 * @param musicXmlNote
+	 * @return
+	 */
+	private boolean computeTempoWithFrequency(MusicXmlNote musicXmlNote){
+		
+		double floatingRange = musicXmlNote.getDuration() * hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) / 100;
+		
+		double endTime = musicXmlNote.getTimeStamp() + dynamicOffset + floatingRange;
+		double startTime = musicXmlNote.getTimeStamp() + dynamicOffset - floatingRange;
+		
+		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
+		
+		double correctedStartTime = queryFirstNoteStartTime(chunkAnalysisList, musicXmlNote);
+		double correctedEndTime = correctedStartTime + musicXmlNote.getDuration();
+		
+		chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(correctedStartTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(correctedEndTime)).collect(Collectors.toList());
+		
+		//根据完整度取部分有效信号,取中间的
+		int elementSize = chunkAnalysisList.size() * hardLevel.getIntegrityRange() / 100;
+		
+		int startIndex = (chunkAnalysisList.size() - elementSize) / 2 - 1;
+		startIndex = startIndex < 1 ? 0 : startIndex;
+		List<ChunkAnalysis> chunkList = chunkAnalysisList.subList(startIndex, elementSize + startIndex);
+		
+		if(chunkList == null || chunkList.size() == 0){
+			return false;
+		}
+		
+		if (musicXmlNote.getFrequency() == -1) {// 休止符
+			return chunkList.stream().filter(t -> t.getFrequency() > MIN_FREQUECY).count() <= 1;
+		}
+
+		ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
+		//取出前面忽略的块
+		List<ChunkAnalysis> ignoreHeaderList = chunkAnalysisList.subList(0, startIndex);
+		if(ignoreHeaderList != null && ignoreHeaderList.size() > 0){
+			for(ChunkAnalysis ca : ignoreHeaderList){
+				if(ca.getFrequency() < MIN_FREQUECY){
+					firstChunkAnalysis.setFrequency(-1);
+					break;
+				}
+			}
+		}
+		
+		double firstChunkStartTime = firstChunkAnalysis.getStartTime();
+		
+		Optional<ChunkAnalysis> chunkAnalysisOptional = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getEndTime()) < Double.doubleToLongBits(firstChunkStartTime)).findFirst();
+
+		ChunkAnalysis lastChunkAnalysis = null;
+		if (chunkAnalysisOptional.isPresent()) {
+			lastChunkAnalysis = chunkAnalysisOptional.get();
+		}
+		
+		if(lastChunkAnalysis == null){
+			lastChunkAnalysis = new ChunkAnalysis(0, 0, -1, 0, 0, 0);
+		}
+		
+		/*List<ChunkAnalysis> chunkList = new ArrayList<ChunkAnalysis>(chunkAnalysisList);
+		
+		if(chunkList.size() == 0){
+			return false;
+		}*/
+		
+		NoteFrequencyRange noteFrequencyRange = null;
+		ChunkAnalysis chunkAnalysis = null;
+		boolean tempo = true;
+		//boolean isContinue = true;
+		//int unplayedSize = 0;
+		int firstPeakIndex = -1;
+		
+		//将信号分堆归类
+		Map<NoteFrequencyRange, Integer> signalGrouping = new HashMap<NoteFrequencyRange, Integer>();
+		
+		for (int i = 0; i < chunkList.size(); i++) {
+			chunkAnalysis = chunkList.get(i);
+			if (chunkAnalysis != null) {
+				if (chunkAnalysis.getFrequency() > MIN_FREQUECY || firstPeakIndex > -1) {
+					
+					if (firstPeakIndex == -1) {
+						firstPeakIndex = i;
+					}
+					
+					noteFrequencyRange = new NoteFrequencyRange(standardFrequecy, chunkAnalysis.getFrequency());
+					
+					if (signalGrouping.containsKey(noteFrequencyRange)) {
+						signalGrouping.put(noteFrequencyRange, signalGrouping.get(noteFrequencyRange) + 1);
+					} else {
+						signalGrouping.put(noteFrequencyRange, 1);
+					}
+				}
+			}
+		}
+		
+		Integer maxTimes = 0, totalTimes = 0;
+		
+		for (Entry<NoteFrequencyRange, Integer> entry : signalGrouping.entrySet()) {
+			if (entry.getValue() > maxTimes) {
+				maxTimes = entry.getValue();
+			}
+			totalTimes = totalTimes + entry.getValue();
+		}
+		
+		if(totalTimes == 0){
+			totalTimes = chunkList.size();
+		}
+		
+		if (maxTimes * 100 / totalTimes < hardLevel.getIntegrityRange()) {
+			tempo = false;
+			LOGGER.debug("节奏错误原因:信号分堆后的最大数量不足指定的完成比例");
+		}
+		
+		/**
+		for (int i = 0; i < chunkList.size(); i++) {
+			chunkAnalysis = chunkList.get(i);
+			if (chunkAnalysis != null) {
+				if (chunkAnalysis.getFrequency() > MIN_FREQUECY) {
+					
+					tempo = true;
+					if (firstPeakIndex == -1) {
+						firstPeakIndex = i;
+						noteFrequencyRange = new NoteFrequencyRange(standardFrequecy, chunkAnalysis.getFrequency());
+					} else if (noteFrequencyRange.getMinFrequency() > chunkAnalysis.getFrequency()
+							|| chunkAnalysis.getFrequency() > noteFrequencyRange.getMaxFrequency()) {// 判断是否是同一个音
+						//是否是低八度或高八度
+						if(!((noteFrequencyRange.getMinFrequency() < chunkAnalysis.getFrequency() * 2 && chunkAnalysis.getFrequency() * 2 < noteFrequencyRange.getMaxFrequency())
+								|| (noteFrequencyRange.getMinFrequency() < chunkAnalysis.getFrequency() / 2 && chunkAnalysis.getFrequency() / 2 < noteFrequencyRange.getMaxFrequency()))){
+							tempo = false;
+							LOGGER.debug("节奏错误原因:不是同一个音[{}]:{}-{}", chunkAnalysis.getFrequency(), noteFrequencyRange.getMinFrequency(), noteFrequencyRange.getMaxFrequency());
+							break;
+						}
+					}
+					if (isContinue == false) {
+						if ((i + 1) / chunkAnalysisList.size() < hardLevel.getIntegrityRange()) {
+							if (unplayedSize > 0) {
+								tempo = false;
+								LOGGER.debug("节奏错误原因:信号不连续");
+								break;
+							}
+						}
+					}
+				} else {
+					if (tempo == true) {
+						isContinue = false;
+						unplayedSize++;
+					}
+				}
+			}
+		}
+		*/
+		
+		if (tempo) {
+			// 判断进入时间点
+			if(firstPeakIndex * 100 /chunkList.size() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator())){
+				tempo = false;
+				LOGGER.debug("节奏错误原因:进入时间点太晚");
+			}else{
+				//判断是否与上一个音延续下来的
+				if(firstChunkAnalysis.getFrequency() > MIN_FREQUECY && lastChunkAnalysis.getFrequency() > MIN_FREQUECY){
+					tempo = new NoteFrequencyRange(standardFrequecy, firstChunkAnalysis.getFrequency()).equals(new NoteFrequencyRange(standardFrequecy, lastChunkAnalysis.getFrequency())) == false;
+					if(tempo == false){
+						LOGGER.debug("节奏错误原因:上一个音[{}]延续下来导致的", lastChunkAnalysis.getFrequency());
+					}
+				}
+			}
+		}
+		
+		return tempo;
+	}
+	
+	private boolean computeTempoWithAmplitude2(MusicXmlNote musicXmlNote) {
+
+		double floatingRange = musicXmlNote.getDuration() * hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) / 100;
+		
+		double endTime = musicXmlNote.getTimeStamp() + dynamicOffset + floatingRange;
+		double startTime = musicXmlNote.getTimeStamp() + dynamicOffset - floatingRange;
+		
+		LOGGER.debug("------------TimeStamp:{}  floatingRange:{}  StartTime:{}  EndTime:{}------------", musicXmlNote.getTimeStamp(), floatingRange, startTime, endTime);
+		
+		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
+		
+		double correctedStartTime = queryFirstNoteStartTime(chunkAnalysisList, musicXmlNote);
+		double correctedEndTime = correctedStartTime + musicXmlNote.getDuration();
+		
+		chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(correctedStartTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(correctedEndTime)).collect(Collectors.toList());
+		
+		//根据完整度取部分有效信号
+		int elementSize = chunkAnalysisList.size() * (100 - hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator())) / 100;
+		List<ChunkAnalysis> chunkList = chunkAnalysisList.subList(0, elementSize);
+		
+		if(chunkList == null || chunkList.size() == 0){
+			return false;
+		}
+		
+		ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
+		
+		LOGGER.debug("-------startTime:{}  endTime:{}------", firstChunkAnalysis.getStartTime(), chunkList.get(chunkList.size() - 1)
+				.getEndTime());
+
+		if (musicXmlNote.getFrequency() == -1) {// 休止符
+			
+			LOGGER.debug("--Amplitude:{}  Denominator:{}",chunkList.stream().map(t -> t).collect(Collectors.toList()), musicXmlNote.getDenominator());
+			return chunkList.stream().filter(t -> t.getAmplitude() > hardLevel.getAmplitudeThreshold()).count() <= 0;
+		}
+		
+		Optional<ChunkAnalysis> chunkAnalysisOptional = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getEndTime()) < Double.doubleToLongBits(firstChunkAnalysis.getStartTime())).findFirst();
+
+		ChunkAnalysis lastChunkAnalysis = null;
+		if (chunkAnalysisOptional.isPresent()) {
+			lastChunkAnalysis = chunkAnalysisOptional.get();
+		}
+		if(lastChunkAnalysis == null){
+			lastChunkAnalysis = new ChunkAnalysis(0, 0, -1, 0, 0, 0);
+		}
+		
+		List<Integer> chunkAmplitudeList = chunkList.stream().map(ChunkAnalysis::getAmplitude).collect(Collectors.toList());
+
+		chunkAmplitudeList.add(0, lastChunkAnalysis.getAmplitude());
+		
+		LOGGER.debug("--Amplitude:{}  Denominator:{}",chunkAmplitudeList.stream().map(t -> t).collect(Collectors.toList()), musicXmlNote.getDenominator());
+		
+		// 检测是否有多个波峰
+		boolean tempo = false;
+		boolean isContinue = true;
+		int firstPeakIndex = -1;
+		int peakSize = 0;
+		for (int i = 1; i < chunkAmplitudeList.size(); i++) {
+			if (Math.abs(chunkAmplitudeList.get(i) - chunkAmplitudeList.get(i - 1)) < hardLevel.getAmplitudeThreshold()) {
+				continue;
+			}
+			if (chunkAmplitudeList.get(i) > hardLevel.getAmplitudeThreshold() && chunkAmplitudeList.get(i) > chunkAmplitudeList.get(i - 1)) {
+				tempo = true;
+				if(firstPeakIndex == -1){
+					firstPeakIndex = i;
+					peakSize++;
+				}
+				if (isContinue == false) {
+					tempo = false;
+					peakSize++;
+					break;
+				}
+			} else {
+				if (tempo == true) {
+					isContinue = false;
+				}
+			}
+		}
+		
+		if(peakSize == 0){
+			tempo = lastChunkAnalysis.isPeak();
+		}else if(peakSize == 1){
+			tempo = true;
+		}else{
+			tempo = false;
+		}
+		
+		if (tempo) {
+			// 判断进入时间点
+			if((firstPeakIndex - 1) * 100 /chunkAmplitudeList.size() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) * 2){
+				LOGGER.debug("超过范围:{}", (firstPeakIndex - 1) * 100 /chunkAmplitudeList.size());
+				tempo = false;
+			}
+		}
+		
+		return tempo;
+	}
+	
+	private double queryFirstNoteStartTime(List<ChunkAnalysis> chunkAnalysisList, MusicXmlNote musicXmlNote) {
+		
+		if(chunkAnalysisList == null || chunkAnalysisList.size() == 0){
+			LOGGER.debug("找不到数据,correctedStartTime:{}", musicXmlNote.getTimeStamp() + dynamicOffset);
+			return musicXmlNote.getTimeStamp() + dynamicOffset;
+		}
+		
+		if (percussionList.contains(subjectId)) {
+			Optional<ChunkAnalysis> optional = chunkAnalysisList.stream().filter(t -> t.getAmplitude() > hardLevel.getAmplitudeThreshold()).findFirst();
+			if(optional.isPresent()){
+				LOGGER.debug("范围内查询到信号,correctedStartTime:{}", optional.get().getStartTime());
+				return optional.get().getStartTime();
+			}else{
+				LOGGER.debug("范围内未查询到信号,correctedStartTime:{}", musicXmlNote.getTimeStamp() + dynamicOffset);
+				return musicXmlNote.getTimeStamp() + dynamicOffset;
+			}
+		}
+		
+		//判断是否与上一个音是同一个音符
+		if(musicXmlNote.getMusicalNotesIndex() > 0){
+			MusicXmlNote preMusicXmlNote = getCurrentMusicNote(null, musicXmlNote.getMusicalNotesIndex() - 1);
+			if((int)preMusicXmlNote.getFrequency() == (int)musicXmlNote.getFrequency()){
+				Optional<ChunkAnalysis> optional = chunkAnalysisList.stream().filter(t -> t.getFrequency() <= MIN_FREQUECY).findFirst();
+				if(optional.isPresent()){
+					LOGGER.debug("与上一个音同音,有断开,correctedStartTime:{}", optional.get().getStartTime());
+					return optional.get().getEndTime();
+				}else{
+					LOGGER.debug("与上一个音同音,未断开,correctedStartTime:{}", musicXmlNote.getTimeStamp() + dynamicOffset);
+					return musicXmlNote.getTimeStamp() + dynamicOffset;
+				}
+			}
+		}
+
+		NoteFrequencyRange standardNote = new NoteFrequencyRange(standardFrequecy, musicXmlNote.getFrequency());
+
+		NoteFrequencyRange noteFrequencyRange = null;
+
+		for (ChunkAnalysis ca : chunkAnalysisList) {
+			noteFrequencyRange = new NoteFrequencyRange(standardFrequecy, ca.getFrequency());
+			if (standardNote.equals(noteFrequencyRange)) {
+				LOGGER.debug("范围内查询到信号,correctedStartTime:{}", ca.getStartTime());
+				return ca.getStartTime();
+			}
+		}
+		
+		LOGGER.debug("范围内未查询到信号,correctedStartTime:{}", musicXmlNote.getTimeStamp() + dynamicOffset);
+
+		//return chunkAnalysisList.get(chunkAnalysisList.size() - 1).getEndTime();
+		return musicXmlNote.getTimeStamp() + dynamicOffset;
+	}
+	
+	public static void main(String[] args) {
+		double[] midi = new double[128];;
+		int standardPitch = 440; // a is 440 hz...
+		for (int x = 0; x < midi.length; ++x)
+		{
+			//转调
+		   midi[x] = new BigDecimal(standardPitch).multiply(new BigDecimal(Math.pow(2, new BigDecimal(x-69).divide(new BigDecimal(12),6,BigDecimal.ROUND_HALF_UP).doubleValue()))).doubleValue();
+		   System.out.println("x=" + x +"  "+ midi[x]);
+		}
+		
+	}
+	
+}

+ 67 - 0
audio-analysis/src/main/java/com/yonge/netty/dto/WebSocketResponse.java

@@ -0,0 +1,67 @@
+package com.yonge.netty.dto;
+
+import org.springframework.http.HttpStatus;
+
+public class WebSocketResponse<T> {
+
+	private Head header = new Head();
+
+	private T body;
+
+	public WebSocketResponse(Head header, T body) {
+		this.header = header;
+		this.body = body;
+	}
+
+	public WebSocketResponse(String command, T body) {
+		this.header = new Head(command, HttpStatus.OK.value());
+		this.body = body;
+	}
+
+	public Head getHeader() {
+		return header;
+	}
+
+	public void setHeader(Head header) {
+		this.header = header;
+	}
+
+	public T getBody() {
+		return body;
+	}
+
+	public void setBody(T body) {
+		this.body = body;
+	}
+
+	public static class Head {
+		private int status = HttpStatus.OK.value();
+		private String commond = "";
+
+		public Head() {
+
+		}
+
+		public Head(String commond, int status) {
+			this.commond = commond;
+			this.status = status;
+		}
+
+		public int getStatus() {
+			return status;
+		}
+
+		public void setStatus(int status) {
+			this.status = status;
+		}
+
+		public String getCommond() {
+			return commond;
+		}
+
+		public void setCommond(String commond) {
+			this.commond = commond;
+		}
+
+	}
+}

+ 182 - 0
audio-analysis/src/main/java/com/yonge/netty/entity/MusicXmlBasicInfo.java

@@ -0,0 +1,182 @@
+package com.yonge.netty.entity;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.stream.Collectors;
+
+public class MusicXmlBasicInfo {
+
+	private Integer id;
+
+	private Integer subjectId;
+	
+	private String clientId;
+
+	private Integer detailId;
+
+	private Long examSongId;
+
+	private String xmlUrl;
+
+	private String behaviorId;
+
+	private String platform;
+	
+	private String partIndex;
+
+	private int speed;
+
+	private String heardLevel;
+
+	private String uuid;
+	
+	private float beatLength;
+
+	private List<MusicXmlNote> musicXmlInfos = new ArrayList<MusicXmlNote>();
+
+	private Map<Integer, MusicXmlSection> musicXmlSectionMap = new HashMap<Integer, MusicXmlSection>();
+
+	public Integer getId() {
+		return id;
+	}
+
+	public void setId(Integer id) {
+		this.id = id;
+	}
+
+	public Integer getSubjectId() {
+		return subjectId;
+	}
+
+	public void setSubjectId(Integer subjectId) {
+		this.subjectId = subjectId;
+	}
+
+	public String getClientId() {
+		return clientId;
+	}
+
+	public void setClientId(String clientId) {
+		this.clientId = clientId;
+	}
+
+	public Integer getDetailId() {
+		return detailId;
+	}
+
+	public void setDetailId(Integer detailId) {
+		this.detailId = detailId;
+	}
+
+	public Long getExamSongId() {
+		return examSongId;
+	}
+
+	public void setExamSongId(Long examSongId) {
+		this.examSongId = examSongId;
+	}
+
+	public String getXmlUrl() {
+		return xmlUrl;
+	}
+
+	public void setXmlUrl(String xmlUrl) {
+		this.xmlUrl = xmlUrl;
+	}
+
+	public String getBehaviorId() {
+		return behaviorId;
+	}
+
+	public void setBehaviorId(String behaviorId) {
+		this.behaviorId = behaviorId;
+	}
+
+	public String getPlatform() {
+		return platform;
+	}
+
+	public void setPlatform(String platform) {
+		this.platform = platform;
+	}
+
+	public void setPartIndex(String partIndex) {
+		this.partIndex = partIndex;
+	}
+
+	public String getPartIndex() {
+		return partIndex;
+	}
+
+	public int getSpeed() {
+		return speed;
+	}
+
+	public void setSpeed(int speed) {
+		this.speed = speed;
+	}
+
+	public String getHeardLevel() {
+		return heardLevel;
+	}
+
+	public void setHeardLevel(String heardLevel) {
+		this.heardLevel = heardLevel;
+	}
+
+	public String getUuid() {
+		return uuid;
+	}
+
+	public void setUuid(String uuid) {
+		this.uuid = uuid;
+	}
+
+	public float getBeatLength() {
+		return beatLength;
+	}
+
+	public void setBeatLength(int beatLength) {
+		this.beatLength = beatLength;
+	}
+
+	public List<MusicXmlNote> getMusicXmlInfos() {
+		return musicXmlInfos;
+	}
+
+	public void setMusicXmlInfos(List<MusicXmlNote> musicXmlInfos) {
+		this.musicXmlInfos = musicXmlInfos;
+	}
+
+	public Map<Integer, MusicXmlSection> getMusicXmlSectionMap() {
+
+		if (musicXmlSectionMap.size() == 0) {
+			Map<Integer, List<MusicXmlNote>> map = musicXmlInfos.stream().collect(Collectors.groupingBy(MusicXmlNote::getMeasureIndex));
+
+			List<MusicXmlNote> noteList = null;
+			MusicXmlSection section = null;
+			for (Entry<Integer, List<MusicXmlNote>> entry : map.entrySet()) {
+				noteList = entry.getValue();
+
+				section = new MusicXmlSection();
+
+				section.setStartTime(noteList.stream().map(t -> t.getTimeStamp()).distinct().min(Double::compareTo).get());
+				section.setDuration(noteList.stream().mapToDouble(t -> t.getDuration()).sum());
+				section.setNoteNum(noteList.size());
+				section.setIndex(entry.getKey());
+				section.setMeasureRenderIndex(noteList.stream().findFirst().get().getMeasureRenderIndex());
+
+				musicXmlSectionMap.put(entry.getKey(), section);
+			}
+		}
+
+		return musicXmlSectionMap;
+	}
+
+	public void setMusicXmlSectionMap(Map<Integer, MusicXmlSection> musicXmlSectionMap) {
+		this.musicXmlSectionMap = musicXmlSectionMap;
+	}
+}

+ 116 - 0
audio-analysis/src/main/java/com/yonge/netty/entity/MusicXmlNote.java

@@ -0,0 +1,116 @@
+package com.yonge.netty.entity;
+
+/**
+ * 音符信息
+ */
+public class MusicXmlNote {
+
+	// 音符起始时间戳(第一个音符是0ms)
+	private double timeStamp;
+
+	// 当前音符持续的播放时间(ms)
+	private double duration;
+
+	// 当前音符的频率
+	private float frequency;
+
+	// 下一个音的频率(不是乐谱中下一个音符的频率)
+	private float nextFrequency;
+
+	// 上一个音的频率(不是乐谱中上一个音符的频率)
+	private float prevFrequency;
+
+	// 当前音符所在的小节下标(从0开始)
+	private int measureIndex;
+
+	// 当前音符是否需要评测
+	private boolean dontEvaluating;
+
+	// 当前音符在整个曲谱中的下标(从0开始)
+	private int musicalNotesIndex;
+	
+	// 多少分音符
+	private int denominator;
+	
+	private int measureRenderIndex;
+
+	public double getTimeStamp() {
+		return timeStamp;
+	}
+
+	public void setTimeStamp(double timeStamp) {
+		this.timeStamp = timeStamp;
+	}
+
+	public double getDuration() {
+		return duration;
+	}
+
+	public void setDuration(double duration) {
+		this.duration = duration;
+	}
+
+	public float getFrequency() {
+		return frequency;
+	}
+
+	public void setFrequency(float frequency) {
+		this.frequency = frequency;
+	}
+
+	public float getPrevFrequency() {
+		return prevFrequency;
+	}
+
+	public void setPrevFrequency(float prevFrequency) {
+		this.prevFrequency = prevFrequency;
+	}
+
+	public float getNextFrequency() {
+		return nextFrequency;
+	}
+
+	public void setNextFrequency(float nextFrequency) {
+		this.nextFrequency = nextFrequency;
+	}
+
+	public int getMeasureIndex() {
+		return measureIndex;
+	}
+
+	public void setMeasureIndex(int measureIndex) {
+		this.measureIndex = measureIndex;
+	}
+
+	public boolean getDontEvaluating() {
+		return dontEvaluating;
+	}
+
+	public void setDontEvaluating(boolean dontEvaluating) {
+		this.dontEvaluating = dontEvaluating;
+	}
+
+	public int getMusicalNotesIndex() {
+		return musicalNotesIndex;
+	}
+
+	public void setMusicalNotesIndex(int musicalNotesIndex) {
+		this.musicalNotesIndex = musicalNotesIndex;
+	}
+
+	public int getDenominator() {
+		return denominator;
+	}
+
+	public void setDenominator(int denominator) {
+		this.denominator = denominator;
+	}
+
+	public int getMeasureRenderIndex() {
+		return measureRenderIndex;
+	}
+
+	public void setMeasureRenderIndex(int measureRenderIndex) {
+		this.measureRenderIndex = measureRenderIndex;
+	}
+}

+ 60 - 0
audio-analysis/src/main/java/com/yonge/netty/entity/MusicXmlSection.java

@@ -0,0 +1,60 @@
+package com.yonge.netty.entity;
+
+/**
+ * 小节信息
+ */
+public class MusicXmlSection {
+
+	private double startTime;
+
+	// 当前小节持续的播放时间(ms)
+	private double duration;
+
+	// 音符数量
+	private int noteNum;
+
+	private int index;
+	
+	private int measureRenderIndex;
+
+	public double getDuration() {
+		return duration;
+	}
+
+	public void setDuration(double duration) {
+		this.duration = duration;
+	}
+
+	public int getNoteNum() {
+		return noteNum;
+	}
+
+	public void setNoteNum(int noteNum) {
+		this.noteNum = noteNum;
+	}
+
+	public double getStartTime() {
+		return startTime;
+	}
+
+	public void setStartTime(double startTime) {
+		this.startTime = startTime;
+	}
+
+	public int getIndex() {
+		return index;
+	}
+
+	public void setIndex(int index) {
+		this.index = index;
+	}
+
+	public int getMeasureRenderIndex() {
+		return measureRenderIndex;
+	}
+
+	public void setMeasureRenderIndex(int measureRenderIndex) {
+		this.measureRenderIndex = measureRenderIndex;
+	}
+
+}

+ 155 - 0
audio-analysis/src/main/java/com/yonge/netty/server/NettyServer.java

@@ -0,0 +1,155 @@
+package com.yonge.netty.server;
+
+import io.netty.bootstrap.ServerBootstrap;
+import io.netty.channel.ChannelFuture;
+import io.netty.channel.ChannelInitializer;
+import io.netty.channel.ChannelOption;
+import io.netty.channel.ChannelPipeline;
+import io.netty.channel.EventLoopGroup;
+import io.netty.channel.nio.NioEventLoopGroup;
+import io.netty.channel.socket.SocketChannel;
+import io.netty.channel.socket.nio.NioServerSocketChannel;
+import io.netty.handler.codec.http.HttpObjectAggregator;
+import io.netty.handler.codec.http.HttpServerCodec;
+import io.netty.handler.codec.http.websocketx.WebSocketServerProtocolHandler;
+import io.netty.handler.codec.http.websocketx.extensions.compression.WebSocketServerCompressionHandler;
+import io.netty.handler.stream.ChunkedWriteHandler;
+
+import java.net.InetSocketAddress;
+
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Configuration;
+
+import com.yonge.netty.server.handler.NettyServerHandler;
+import com.yonge.netty.server.handler.message.BinaryWebSocketFrameHandler;
+import com.yonge.netty.server.handler.message.TextWebSocketHandler;
+
+@Configuration
+public class NettyServer {
+
+	private static final Logger LOGGER = LoggerFactory.getLogger(NettyServer.class);
+
+	/**
+	 * webSocket协议名
+	 */
+	private static final String WEBSOCKET_PROTOCOL = "WebSocket";
+
+	/**
+	 * 端口号
+	 */
+	@Value("${netty.server.port}")
+	private int port;
+
+	/**
+	 * webSocket路径
+	 */
+	private String webSocketPath = "/audioAnalysis";
+
+	private EventLoopGroup bossGroup = new NioEventLoopGroup(1);
+
+	private EventLoopGroup workGroup = new NioEventLoopGroup(5);
+
+	@Autowired
+	private NettyServerHandler nettyServerHandler;
+
+	@Autowired
+	private BinaryWebSocketFrameHandler binaryWebSocketFrameHandler;
+
+	@Autowired
+	private TextWebSocketHandler textWebSocketHandler;
+
+	/**
+	 * 启动
+	 * @throws InterruptedException
+	 */
+	private void start() throws InterruptedException {
+		ServerBootstrap bootstrap = new ServerBootstrap();
+
+		// bossGroup辅助客户端的tcp连接请求, workGroup负责与客户端之前的读写操作
+		bootstrap.group(bossGroup, workGroup);
+		// 设置NIO类型的channel
+		bootstrap.channel(NioServerSocketChannel.class);
+		// 设置监听端口
+		bootstrap.localAddress(new InetSocketAddress(port));
+		// 服务端 accept 队列的大小
+		//bootstrap.option(ChannelOption.SO_BACKLOG, 1024);
+		//bootstrap.option(ChannelOption.SO_RCVBUF, 1024*4);
+		// 允许较小的数据包的发送,降低延迟
+		bootstrap.childOption(ChannelOption.TCP_NODELAY, true);
+		// 连接到达时会创建一个通道
+		bootstrap.childHandler(new ChannelInitializer<SocketChannel>() {
+
+			@Override
+			protected void initChannel(SocketChannel ch) throws Exception {
+				// 获得 Channel 对应的 ChannelPipeline
+				ChannelPipeline channelPipeline = ch.pipeline();
+
+				// 流水线管理通道中的处理程序(Handler),用来处理业务
+				// webSocket协议本身是基于http协议的,所以这边也要使用http编解码器
+				channelPipeline.addLast(new HttpServerCodec());
+				// channelPipeline.addLast(new ObjectEncoder());
+				// 分块向客户端写数据,防止发送大文件时导致内存溢出, channel.write(new ChunkedFile(new File("bigFile.mkv")))
+				channelPipeline.addLast(new ChunkedWriteHandler());
+				/*
+				 * 说明: 1、http数据在传输过程中是分段的,HttpObjectAggregator可以将多个段聚合 2、这就是为什么,当浏览器发送大量数据时,就会发送多次http请求
+				 */
+				channelPipeline.addLast(new HttpObjectAggregator(1024 * 8));
+				// webSocket 数据压缩扩展,当添加这个的时候WebSocketServerProtocolHandler的第三个参数需要设置成true
+				channelPipeline.addLast(new WebSocketServerCompressionHandler());
+				/*
+				 * 说明: 1、对应webSocket,它的数据是以帧(frame)的形式传递 2、浏览器请求时 ws://localhost:58080/xxx 表示请求的uri 3、核心功能是将http协议升级为ws协议,保持长连接
+				 */
+				channelPipeline.addLast(new WebSocketServerProtocolHandler(webSocketPath, WEBSOCKET_PROTOCOL, true, 65536 * 1000, false, true));
+
+				// 自定义的handler,处理业务逻辑
+				channelPipeline.addLast(nettyServerHandler);
+				channelPipeline.addLast(binaryWebSocketFrameHandler);
+				channelPipeline.addLast(textWebSocketHandler);
+
+			}
+		});
+
+		// 配置完成,开始绑定server,通过调用sync同步方法阻塞直到绑定成功
+		ChannelFuture channelFuture = bootstrap.bind().sync();
+
+		if (channelFuture.isSuccess()) {
+			LOGGER.info("Server started and listen on:{}", channelFuture.channel().localAddress());
+		}
+
+		// 对关闭通道进行监听
+		channelFuture.channel().closeFuture().sync();
+	}
+
+	/**
+	 * 释放资源
+	 * @throws InterruptedException
+	 */
+	@PreDestroy
+	public void destroy() throws InterruptedException {
+		if (bossGroup != null) {
+			bossGroup.shutdownGracefully().sync();
+		}
+		if (workGroup != null) {
+			workGroup.shutdownGracefully().sync();
+		}
+	}
+
+	@PostConstruct()
+	public void init() {
+		// 需要开启一个新的线程来执行netty server 服务器
+		new Thread(() -> {
+			try {
+				start();
+			} catch (InterruptedException e) {
+				e.printStackTrace();
+			}
+		}).start();
+	}
+
+}

+ 9 - 0
audio-analysis/src/main/java/com/yonge/netty/server/handler/ChannelContextConstants.java

@@ -0,0 +1,9 @@
+package com.yonge.netty.server.handler;
+
+import io.netty.util.AttributeKey;
+
+public class ChannelContextConstants {
+
+	public static final AttributeKey<String> CHANNEL_ATTR_KEY_ACTION = AttributeKey.newInstance("action");
+	
+}

+ 135 - 0
audio-analysis/src/main/java/com/yonge/netty/server/handler/NettyChannelManager.java

@@ -0,0 +1,135 @@
+package com.yonge.netty.server.handler;
+
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelId;
+import io.netty.handler.codec.http.websocketx.TextWebSocketFrame;
+import io.netty.util.AttributeKey;
+
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Component;
+
+import com.yonge.toolset.utils.json.JsonUtil;
+
+@Component
+public class NettyChannelManager {
+
+	private static final Logger LOGGER = LoggerFactory.getLogger(NettyChannelManager.class);
+
+	/**
+	 * {@link Channel#attr(AttributeKey)} 属性中,表示 Channel 对应的用户
+	 */
+	private static final AttributeKey<String> CHANNEL_ATTR_KEY_USER = AttributeKey.newInstance("user");
+
+	/**
+	 * Channel 映射
+	 */
+	private ConcurrentMap<ChannelId, Channel> channels = new ConcurrentHashMap<ChannelId, Channel>();
+
+	/**
+	 * 用户与 Channel 的映射。
+	 *
+	 * 通过它,可以获取用户对应的 Channel。这样,我们可以向指定用户发送消息。
+	 */
+	private ConcurrentMap<String, Channel> userChannels = new ConcurrentHashMap<String, Channel>();
+
+	/**
+	* 添加 Channel 到 {@link #channels} 中
+	*
+	* @param channel Channel
+	*/
+	public void add(Channel channel) {
+		channels.put(channel.id(), channel);
+	}
+
+	/**
+	 * 添加指定用户到 {@link #userChannels} 中
+	 *
+	 * @param channel Channel
+	 * @param user 用户
+	 */
+	public void addUser(Channel channel, String user) {
+		Channel existChannel = channels.get(channel.id());
+		if (existChannel == null) {
+			LOGGER.error("[addUser][连接({}) 不存在]", channel.id());
+			return;
+		}
+		// 设置属性
+		channel.attr(CHANNEL_ATTR_KEY_USER).set(user);
+		// 添加到 userChannels
+		userChannels.put(user, channel);
+
+		LOGGER.info("[add][用户({})加入,总数({})]", user, channels.size());
+	}
+
+	/**
+	 * 从channel中获取user
+	 * @param channel
+	 * @return
+	 */
+	public String getUser(Channel channel) {
+		if (channel.hasAttr(CHANNEL_ATTR_KEY_USER)) {
+			return channel.attr(CHANNEL_ATTR_KEY_USER).get();
+		}
+		return null;
+	}
+
+	/**
+	 * 将 Channel 从 {@link #channels} 和 {@link #userChannels} 中移除
+	 *
+	 * @param channel Channel
+	 */
+	public void remove(Channel channel) {
+		// 移除 channels
+		channels.remove(channel.id());
+
+		String user = "";
+		// 移除 userChannels
+		if (channel.hasAttr(CHANNEL_ATTR_KEY_USER)) {
+			user = channel.attr(CHANNEL_ATTR_KEY_USER).get();
+			userChannels.remove(user);
+		}
+		LOGGER.info("[remove][用户({})移除,总数({})]", user, channels.size());
+	}
+
+	/**
+	 * 向指定用户发送消息
+	 *
+	 * @param user 用户
+	 * @param message 消息体
+	 */
+	public void sendTextMessage(String user, Object message) {
+		// 获得用户对应的 Channel
+		Channel channel = userChannels.get(user);
+		if (channel == null) {
+			LOGGER.error("[send][连接不存在]");
+			return;
+		}
+		if (!channel.isActive()) {
+			LOGGER.error("[send][连接({})未激活]", channel.id());
+			return;
+		}
+		// 发送消息
+		channel.writeAndFlush(new TextWebSocketFrame(JsonUtil.toJSONString(message)));
+	}
+
+	/**
+	 * 向所有用户发送消息
+	 *
+	 * @param message 消息体
+	 */
+	public void sendAll(Object message) {
+		for (Channel channel : channels.values()) {
+			if (!channel.isActive()) {
+				LOGGER.error("[send][连接({})未激活]", channel.id());
+				return;
+			}
+			// 发送消息
+			channel.writeAndFlush(message);
+		}
+	}
+
+}

+ 77 - 0
audio-analysis/src/main/java/com/yonge/netty/server/handler/NettyServerHandler.java

@@ -0,0 +1,77 @@
+package com.yonge.netty.server.handler;
+
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelHandler;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.ChannelInboundHandlerAdapter;
+import io.netty.handler.codec.http.HttpHeaders;
+import io.netty.handler.codec.http.websocketx.WebSocketServerProtocolHandler;
+
+import org.apache.commons.lang3.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.security.oauth2.common.OAuth2AccessToken;
+import org.springframework.stereotype.Component;
+
+@Component
+@ChannelHandler.Sharable
+public class NettyServerHandler extends ChannelInboundHandlerAdapter {
+
+	private static final Logger LOGGER = LoggerFactory.getLogger(NettyServerHandler.class);
+
+	@Autowired
+	private NettyChannelManager channelManager;
+
+	@Override
+	public void channelActive(ChannelHandlerContext ctx) {
+		// 从管理器中添加
+		channelManager.add(ctx.channel());
+	}
+
+	@Override
+	public void channelUnregistered(ChannelHandlerContext ctx) {
+		// 从管理器中移除
+		channelManager.remove(ctx.channel());
+	}
+
+	@Override
+	public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
+		LOGGER.error("[exceptionCaught][连接({}) 发生异常]", ctx.channel().id(), cause);
+		// 断开连接
+		ctx.channel().close();
+	}
+
+	@Override
+	public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
+		if (evt instanceof WebSocketServerProtocolHandler.HandshakeComplete) {
+			WebSocketServerProtocolHandler.HandshakeComplete handshakeComplete = (WebSocketServerProtocolHandler.HandshakeComplete) evt;
+			String requestUri = handshakeComplete.requestUri();
+			
+			String userId = StringUtils.substringAfterLast(requestUri, "?");
+			
+			if(StringUtils.isBlank(userId) || !StringUtils.isNumeric(userId)){
+				userId = StringUtils.substringAfterLast(requestUri, "/");
+			}
+			
+			Channel channel = ctx.channel();
+			
+			if(!StringUtils.isNumeric(userId)){
+				// 断开连接
+				channel.close();
+			}
+			
+			channelManager.addUser(channel, userId);
+			
+			LOGGER.info("userId:[{}]", userId);
+			
+			HttpHeaders httpHeaders = handshakeComplete.requestHeaders();
+			String authHeader = httpHeaders.get("Authorization");
+			
+			String tokenValue = authHeader.toLowerCase().replace(OAuth2AccessToken.BEARER_TYPE.toLowerCase(), StringUtils.EMPTY).trim();
+			LOGGER.info("token:[{}]", tokenValue);
+		}
+		super.userEventTriggered(ctx, evt);
+	}
+
+}

+ 78 - 0
audio-analysis/src/main/java/com/yonge/netty/server/handler/message/BinaryWebSocketFrameHandler.java

@@ -0,0 +1,78 @@
+package com.yonge.netty.server.handler.message;
+
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufUtil;
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelHandler;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.SimpleChannelInboundHandler;
+import io.netty.handler.codec.http.websocketx.BinaryWebSocketFrame;
+
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.BeansException;
+import org.springframework.beans.factory.InitializingBean;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.ApplicationContext;
+import org.springframework.context.ApplicationContextAware;
+import org.springframework.stereotype.Component;
+
+import com.yonge.netty.server.handler.ChannelContextConstants;
+import com.yonge.netty.server.handler.NettyChannelManager;
+
+@Component
+@ChannelHandler.Sharable
+public class BinaryWebSocketFrameHandler extends SimpleChannelInboundHandler<BinaryWebSocketFrame> implements ApplicationContextAware,InitializingBean {
+
+	private final static Logger LOGGER = LoggerFactory.getLogger(BinaryWebSocketFrameHandler.class);
+	
+	@Autowired
+	private NettyChannelManager nettyChannelManager;
+
+	private ApplicationContext applicationContext;
+	
+	private Map<String, MessageHandler> handlerMap;
+
+	@Override
+	protected void channelRead0(ChannelHandlerContext ctx, BinaryWebSocketFrame frame) throws Exception {
+
+		Channel channel = ctx.channel();
+
+		ByteBuf buf = frame.content().retain();
+
+		try {
+			byte[] datas = ByteBufUtil.getBytes(buf);
+
+			String user = nettyChannelManager.getUser(channel);
+			
+			String action = channel.attr(ChannelContextConstants.CHANNEL_ATTR_KEY_ACTION).get();
+			
+			if(handlerMap == null){
+				LOGGER.error("消息处理器没有初始化");
+			}
+			MessageHandler handler = handlerMap.get(action);
+			
+			if(handler != null){
+				handler.handleBinaryMessage(user, channel, datas);
+			}
+			
+		} finally {
+			buf.release();
+		}
+	}
+
+	@Override
+	public void afterPropertiesSet() throws Exception {
+		handlerMap = applicationContext.getBeansOfType(MessageHandler.class).values().stream()
+				.collect(Collectors.toMap(MessageHandler::getAction, t -> t));
+	}
+
+	@Override
+	public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
+		this.applicationContext = applicationContext;
+	}
+
+}

+ 12 - 0
audio-analysis/src/main/java/com/yonge/netty/server/handler/message/MessageHandler.java

@@ -0,0 +1,12 @@
+package com.yonge.netty.server.handler.message;
+
+import io.netty.channel.Channel;
+
+public interface MessageHandler {
+	
+	String getAction();
+	
+	boolean handleTextMessage(String user, Channel channel, String text);
+
+	boolean handleBinaryMessage(String user, Channel channel, byte[] bytes);
+}

+ 75 - 0
audio-analysis/src/main/java/com/yonge/netty/server/handler/message/TextWebSocketHandler.java

@@ -0,0 +1,75 @@
+package com.yonge.netty.server.handler.message;
+
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelHandler;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.SimpleChannelInboundHandler;
+import io.netty.handler.codec.http.websocketx.TextWebSocketFrame;
+
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import org.apache.commons.lang3.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.BeansException;
+import org.springframework.beans.factory.InitializingBean;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.ApplicationContext;
+import org.springframework.context.ApplicationContextAware;
+import org.springframework.stereotype.Component;
+
+import com.alibaba.fastjson.JSONPath;
+import com.yonge.netty.server.handler.ChannelContextConstants;
+import com.yonge.netty.server.handler.NettyChannelManager;
+
+@Component
+@ChannelHandler.Sharable
+public class TextWebSocketHandler extends SimpleChannelInboundHandler<TextWebSocketFrame> implements ApplicationContextAware,InitializingBean {
+
+	private static final Logger LOGGER = LoggerFactory.getLogger(TextWebSocketHandler.class);
+
+	@Autowired
+	private NettyChannelManager nettyChannelManager;
+	
+	private ApplicationContext applicationContext;
+	
+	private Map<String, MessageHandler> handlerMap;
+
+	@Override
+	protected void channelRead0(ChannelHandlerContext ctx, TextWebSocketFrame frame) throws Exception {
+
+		Channel channel = ctx.channel();
+
+		String jsonMsg = frame.text();
+		
+		LOGGER.info("接收到客户端的消息内容:{}", jsonMsg);
+		
+		String action = (String) JSONPath.extract(jsonMsg, "$.header.type");
+		
+		if(StringUtils.isNoneBlank(action)){
+			channel.attr(ChannelContextConstants.CHANNEL_ATTR_KEY_ACTION).set(action);
+			
+			if(handlerMap == null){
+				LOGGER.error("消息处理器没有初始化");
+			}
+			MessageHandler handler = handlerMap.get(action);
+			
+			if(handler != null){
+				handler.handleTextMessage(nettyChannelManager.getUser(channel), channel, jsonMsg);
+			}
+		}
+	}
+
+	@Override
+	public void afterPropertiesSet() throws Exception {
+		handlerMap = applicationContext.getBeansOfType(MessageHandler.class).values().stream()
+				.collect(Collectors.toMap(MessageHandler::getAction, t -> t));
+	}
+
+	@Override
+	public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
+		this.applicationContext = applicationContext;
+	}
+
+}

+ 112 - 0
audio-analysis/src/main/java/com/yonge/netty/server/processor/WaveformWriter.java

@@ -0,0 +1,112 @@
+/*
+ *      _______                       _____   _____ _____  
+ *     |__   __|                     |  __ \ / ____|  __ \ 
+ *        | | __ _ _ __ ___  ___  ___| |  | | (___ | |__) |
+ *        | |/ _` | '__/ __|/ _ \/ __| |  | |\___ \|  ___/ 
+ *        | | (_| | |  \__ \ (_) \__ \ |__| |____) | |     
+ *        |_|\__,_|_|  |___/\___/|___/_____/|_____/|_|     
+ *                                                         
+ * -------------------------------------------------------------
+ *
+ * TarsosDSP is developed by Joren Six at IPEM, University Ghent
+ *  
+ * -------------------------------------------------------------
+ *
+ *  Info: http://0110.be/tag/TarsosDSP
+ *  Github: https://github.com/JorenSix/TarsosDSP
+ *  Releases: http://0110.be/releases/TarsosDSP/
+ *  
+ *  TarsosDSP includes modified source code by various authors,
+ *  for credits and info, see README.
+ * 
+ */
+
+package com.yonge.netty.server.processor;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import be.tarsos.dsp.writer.WaveHeader;
+
+/**
+ * 写wav文件
+ */
+public class WaveformWriter {
+
+	private static final Logger LOGGER = LoggerFactory.getLogger(WaveformWriter.class);
+
+	private RandomAccessFile randomAccessFile;
+
+	private final String fileName;
+
+	public static final short CHANNEL_NUM = 1;
+
+	public static final int SAMPLE_RATE = 44100;
+
+	public static final short BITS_PER_SAMPLE = 16;
+
+	public static final int HEADER_LENGTH = 44;
+
+	public WaveformWriter(String fileName) {
+
+		this.fileName = fileName;
+		try {
+			randomAccessFile = new RandomAccessFile(fileName, "rw");
+			randomAccessFile.write(new byte[HEADER_LENGTH]);
+		} catch (IOException e) {
+			LOGGER.error("创建WAV文件出现异常[{}]:{}", fileName, e.getMessage());
+			e.printStackTrace();
+		}
+
+	}
+
+	public boolean process(byte[] datas) {
+
+		try {
+			randomAccessFile.write(datas);
+		} catch (IOException e) {
+			LOGGER.error("写WAV文件出现异常[{}]:{}", fileName, e.getMessage());
+			e.printStackTrace();
+		}
+
+		return true;
+	}
+
+	public void processingFinished() {
+		try {
+			WaveHeader waveHeader = new WaveHeader(WaveHeader.FORMAT_PCM, CHANNEL_NUM, SAMPLE_RATE, BITS_PER_SAMPLE, (int) randomAccessFile.length()
+					- HEADER_LENGTH);
+
+			ByteArrayOutputStream header = new ByteArrayOutputStream();
+			waveHeader.write(header);
+			randomAccessFile.seek(0);
+			randomAccessFile.write(header.toByteArray());
+			randomAccessFile.close();
+		} catch (IOException e) {
+			LOGGER.error("关闭WAV文件出现异常[{}]:{}", fileName, e.getMessage());
+			e.printStackTrace();
+		}
+	}
+
+	public File getFile() {
+		return new File(fileName);
+	}
+
+	public long getFileLength(boolean isSubHeadLength) {
+		try {
+			if (isSubHeadLength) {
+				return randomAccessFile.length() - HEADER_LENGTH;
+			}
+			return randomAccessFile.length();
+		} catch (IOException e) {
+			LOGGER.error("读取WAV文件出现异常[{}]:{}", fileName, e.getMessage());
+			e.printStackTrace();
+		}
+		return 0;
+	}
+}

+ 366 - 0
audio-analysis/src/main/java/com/yonge/netty/server/service/AudioCompareHandler.java

@@ -0,0 +1,366 @@
+package com.yonge.netty.server.service;
+
+import io.netty.channel.Channel;
+
+import java.io.File;
+import java.math.BigDecimal;
+import java.text.SimpleDateFormat;
+import java.util.Comparator;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Objects;
+import java.util.stream.Collectors;
+
+import javax.sound.sampled.AudioFormat;
+
+import org.apache.commons.lang3.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Component;
+
+import com.alibaba.fastjson.JSON;
+import com.alibaba.fastjson.JSONObject;
+import com.alibaba.fastjson.JSONPath;
+import com.yonge.audio.analysis.AudioFloatConverter;
+import com.yonge.audio.utils.ArrayUtil;
+import com.yonge.cooleshow.auth.api.client.SysUserFeignService;
+import com.yonge.cooleshow.auth.api.entity.SysUser;
+import com.yonge.cooleshow.biz.dal.entity.SysMusicCompareRecord;
+import com.yonge.cooleshow.biz.dal.enums.DeviceTypeEnum;
+import com.yonge.cooleshow.biz.dal.enums.FeatureType;
+import com.yonge.cooleshow.biz.dal.enums.HeardLevelEnum;
+import com.yonge.cooleshow.biz.dal.service.SysMusicCompareRecordService;
+import com.yonge.netty.dto.SectionAnalysis;
+import com.yonge.netty.dto.UserChannelContext;
+import com.yonge.netty.dto.WebSocketResponse;
+import com.yonge.netty.entity.MusicXmlBasicInfo;
+import com.yonge.netty.entity.MusicXmlNote;
+import com.yonge.netty.server.handler.NettyChannelManager;
+import com.yonge.netty.server.handler.message.MessageHandler;
+import com.yonge.netty.server.processor.WaveformWriter;
+import com.yonge.toolset.thirdparty.storage.StoragePluginContext;
+import com.yonge.toolset.thirdparty.storage.provider.KS3StoragePlugin;
+import com.yonge.toolset.utils.upload.UploadUtil;
+
+@Component
+public class AudioCompareHandler implements MessageHandler {
+	
+	private static final Logger LOGGER = LoggerFactory.getLogger(AudioCompareHandler.class);
+
+	@Autowired
+	private UserChannelContextService userChannelContextService;
+
+	@Autowired
+	private NettyChannelManager nettyChannelManager;
+	
+	@Autowired
+	private SysMusicCompareRecordService sysMusicCompareRecordService;
+	
+    @Autowired
+    private SysUserFeignService sysUserFeignService;
+
+    @Autowired
+    private StoragePluginContext storagePluginContext;
+
+	/**
+	 * @describe 采样率
+	 */
+	private float sampleRate = 44100;
+
+	/**
+	 * 每个采样大小(Bit)
+	 */
+	private int bitsPerSample = 16;
+
+	/**
+	 * 通道数
+	 */
+	private int channels = 1;
+
+	/**
+	 * @describe 采样大小
+	 */
+	private int bufferSize = 1024 * 2;
+
+	private boolean signed = true;
+
+	private boolean bigEndian = false;
+
+	private AudioFormat audioFormat = new AudioFormat(sampleRate, bitsPerSample, channels, signed, bigEndian);
+
+	private AudioFloatConverter converter = AudioFloatConverter.getConverter(audioFormat);
+
+	private String tmpFileDir = "/mdata/soundCompare/";
+
+	private SimpleDateFormat sdf = new SimpleDateFormat("yyMMddHHmmSS");
+	
+	@Override
+	public String getAction() {
+		return "SOUND_COMPARE";
+	}
+
+	@Override
+	public boolean handleTextMessage(String user, Channel channel, String jsonMsg) {
+		
+		String command = (String) JSONPath.extract(jsonMsg, "$.header.commond");
+
+		JSONObject dataObj = (JSONObject) JSONPath.extract(jsonMsg, "$.body");
+		
+		UserChannelContext channelContext = userChannelContextService.getChannelContext(channel);
+		
+		MusicXmlBasicInfo musicXmlBasicInfo = null;
+
+		switch (command) {
+		case "musicXml": // 同步music xml信息
+			
+			musicXmlBasicInfo = JSONObject.toJavaObject(dataObj, MusicXmlBasicInfo.class);
+			
+			userChannelContextService.remove(channel);
+
+			channelContext = new UserChannelContext();
+			
+			channelContext.setHandlerSwitch(false);
+
+			channelContext.getSongMusicXmlMap().put(musicXmlBasicInfo.getExamSongId(), musicXmlBasicInfo);
+			channelContext.init(musicXmlBasicInfo.getPlatform(), musicXmlBasicInfo.getHeardLevel(), musicXmlBasicInfo.getSubjectId(),
+					musicXmlBasicInfo.getBeatLength(), audioFormat.getSampleRate(), bufferSize / 2);
+			channelContext.setUser(user);
+			
+			userChannelContextService.register(channel, channelContext);
+
+			break;
+		case "recordStart": // 开始评测
+
+			// 清空缓存信息
+			channelContext.resetUserInfo();
+			
+			channelContext.setHandlerSwitch(false);
+			
+			musicXmlBasicInfo = channelContext.getMusicXmlBasicInfo(null);
+
+			if (musicXmlBasicInfo != null) {
+				Date date = new Date();
+				SysMusicCompareRecord sysMusicCompareRecord = new SysMusicCompareRecord(FeatureType.CLOUD_STUDY_EVALUATION);
+				sysMusicCompareRecord.setCreateTime(date);
+				sysMusicCompareRecord.setUserId(Long.parseLong(user));
+				sysMusicCompareRecord.setMusicSheetId(musicXmlBasicInfo.getExamSongId());
+				sysMusicCompareRecord.setBehaviorId(musicXmlBasicInfo.getBehaviorId());
+				sysMusicCompareRecord.setClientId(musicXmlBasicInfo.getClientId());
+				sysMusicCompareRecord.setDeviceType(DeviceTypeEnum.valueOf(musicXmlBasicInfo.getPlatform()));
+				sysMusicCompareRecord.setSpeed(musicXmlBasicInfo.getSpeed());
+				sysMusicCompareRecord.setPartIndex(musicXmlBasicInfo.getPartIndex());
+				
+				SysUser sysUser = sysUserFeignService.queryUserById(sysMusicCompareRecord.getUserId());
+				
+				MusicXmlNote musicXmlNote = musicXmlBasicInfo.getMusicXmlInfos().stream().max(Comparator.comparing(MusicXmlNote::getTimeStamp)).get();
+				sysMusicCompareRecord.setSourceTime((float) ((musicXmlNote.getTimeStamp()+musicXmlNote.getDuration())/1000));
+				sysMusicCompareRecordService.insert(sysMusicCompareRecord);
+				channelContext.setRecordId(sysMusicCompareRecord.getId());
+			}
+			break;
+		case "recordEnd": // 结束评测
+		case "recordCancel": // 取消评测
+			if (channelContext == null) {
+				return false;
+			}
+			
+			channelContext.setHandlerSwitch(false);
+
+			WaveformWriter waveFileProcessor = channelContext.getWaveFileProcessor();
+			if (waveFileProcessor != null) {
+				// 写文件头
+				waveFileProcessor.processingFinished();
+			}
+
+			if (StringUtils.equals(command, "recordEnd")) {
+				// 生成评测报告
+				Map<String, Object> params = new HashMap<String, Object>();
+
+				Map<String, Integer> scoreMap = channelContext.evaluateForMusic();
+				for (Entry<String, Integer> entry : scoreMap.entrySet()) {
+					params.put(entry.getKey(), entry.getValue());
+				}
+				
+				//保存评测结果
+				Long recordId = channelContext.getRecordId();
+				SysMusicCompareRecord sysMusicCompareRecord = sysMusicCompareRecordService.get(recordId);
+				if(sysMusicCompareRecord != null){
+					musicXmlBasicInfo = channelContext.getMusicXmlBasicInfo(null);
+					
+					if (scoreMap != null && scoreMap.size() > 1) {
+						sysMusicCompareRecord.setScore(new BigDecimal(scoreMap.get("score")));
+						sysMusicCompareRecord.setIntonation(new BigDecimal(scoreMap.get("intonation")));
+						sysMusicCompareRecord.setIntegrity(new BigDecimal(scoreMap.get("integrity")));
+						sysMusicCompareRecord.setCadence(new BigDecimal(scoreMap.get("cadence")));
+						sysMusicCompareRecord.setPlayTime(scoreMap.get("playTime") / 1000);
+						
+						LOGGER.info("Score:{} Intonation:{} Integrity:{} Cadence:{}", sysMusicCompareRecord.getScore(),sysMusicCompareRecord.getIntonation(),sysMusicCompareRecord.getIntegrity(),sysMusicCompareRecord.getCadence());
+					}
+					sysMusicCompareRecord.setFeature(FeatureType.CLOUD_STUDY_EVALUATION);
+
+		            String url = null;
+		            try {
+		                String folder = UploadUtil.getFileFloder();
+		                url = storagePluginContext.asyncUploadFile(KS3StoragePlugin.PLUGIN_NAME,"soundCompare/" + folder, waveFileProcessor.getFile(), true);
+		            } catch (Exception e) {
+		                LOGGER.error("录音文件上传失败:{}", e);
+		            }
+					sysMusicCompareRecord.setRecordFilePath(url);
+					//sysMusicCompareRecord.setVideoFilePath(videoFilePath);
+
+					Map<String, Object> scoreData = new HashMap<>();
+					List<SectionAnalysis> sectionAnalysisList = channelContext.getDoneSectionAnalysisList();
+					sectionAnalysisList = sectionAnalysisList.stream().filter(t -> t.isIngore() == false).collect(Collectors.toList());
+					scoreData.put("userMeasureScore", sectionAnalysisList.stream().collect(Collectors.toMap(SectionAnalysis :: getIndex, t -> t)));
+
+					Map<String, Object> musicalNotesPlayStats = new HashMap<>();
+					musicalNotesPlayStats.put("detailId", musicXmlBasicInfo.getDetailId());
+					musicalNotesPlayStats.put("examSongId", musicXmlBasicInfo.getExamSongId());
+					musicalNotesPlayStats.put("xmlUrl", musicXmlBasicInfo.getXmlUrl());
+					
+					musicalNotesPlayStats.put("notesData", channelContext.getDoneNoteAnalysisList().stream().filter(t -> t.isIgnore() == false).collect(Collectors.toList()));
+					scoreData.put("musicalNotesPlayStats", musicalNotesPlayStats);
+					sysMusicCompareRecord.setScoreData(JSON.toJSONString(scoreData));
+					
+					sysMusicCompareRecord.setHeardLevel(HeardLevelEnum.valueOf(channelContext.getHardLevel().name()));
+					
+					sysMusicCompareRecordService.saveMusicCompareData(sysMusicCompareRecord);
+				}
+				
+				WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>("overall", params);
+
+				nettyChannelManager.sendTextMessage(user, resp);
+			}
+
+			// 清空缓存信息
+			channelContext.resetUserInfo();
+
+			break;
+		case "audioPlayStart": // ???
+			
+			Integer offsetTime = dataObj.getInteger("offsetTime");
+			if(offsetTime != null){
+				channelContext.setOffsetMS(offsetTime);
+				channelContext.setHandlerSwitch(true);
+			}
+
+			break;
+		case "videoUpload": // 上传音频
+			SysMusicCompareRecord musicCompareRecord = null;
+			if (dataObj.containsKey("recordId")) {
+				musicCompareRecord = sysMusicCompareRecordService.get(dataObj.getLong("recordId"));
+			}
+			if (Objects.nonNull(musicCompareRecord) && dataObj.containsKey("filePath")) {
+				musicCompareRecord.setVideoFilePath(dataObj.getString("filePath"));
+				sysMusicCompareRecordService.update(musicCompareRecord);
+			} else {
+				musicCompareRecord.setVideoFilePath(musicCompareRecord.getRecordFilePath());
+				sysMusicCompareRecordService.update(musicCompareRecord);
+			}
+			
+			break;
+
+		default:
+			// 非法请求
+			break;
+		}
+		return true;
+	}
+
+	@Override
+	public boolean handleBinaryMessage(String user, Channel channel, byte[] datas) {
+		
+		UserChannelContext channelContext = userChannelContextService.getChannelContext(channel);
+
+		if (channelContext == null) {
+			return false;
+		}
+
+		// 写录音文件
+		WaveformWriter waveFileProcessor = channelContext.getWaveFileProcessor();
+		if (waveFileProcessor == null) {
+			File file = new File(tmpFileDir + user + "_" + sdf.format(new Date()) + ".wav");
+			waveFileProcessor = new WaveformWriter(file.getAbsolutePath());
+			channelContext.setWaveFileProcessor(waveFileProcessor);
+		}
+		waveFileProcessor.process(datas);
+		
+		/*datas = channelContext.skipMetronome(datas);
+
+		if (datas.length == 0) {
+			return false;
+		}*/
+
+		channelContext.setChannelBufferBytes(ArrayUtil.mergeByte(channelContext.getChannelBufferBytes(), datas));
+
+		int totalLength = channelContext.getChannelBufferBytes().length;
+		
+		if (channelContext.getHandlerSwitch() == false) {
+			return false;
+		}
+		
+		if (channelContext.getOffsetMS() + channelContext.getBeatDuration() > 0) {
+			int beatByteLength = (int) (audioFormat.getSampleRate() * audioFormat.getSampleSizeInBits() / 8 * (channelContext.getOffsetMS() + channelContext.getBeatDuration()) / 1000);
+			
+			if(totalLength > beatByteLength){
+				if(beatByteLength % 2 != 0){
+					LOGGER.debug("**************奇数*****************");
+					beatByteLength--;
+				}
+				channelContext.setChannelBufferBytes(ArrayUtil.extractByte(channelContext.getChannelBufferBytes(), beatByteLength, totalLength - 1));
+				
+				LOGGER.debug("--------Length:{}  Times[{} + {}]:{}--------", waveFileProcessor.getFile().length() - channelContext.getChannelBufferBytes().length, channelContext.getOffsetMS() , channelContext.getBeatDuration(),(waveFileProcessor.getFile().length() - channelContext.getChannelBufferBytes().length) * 1000 /audioFormat.getSampleRate()/2);
+				
+				channelContext.setOffsetMS(0);
+				channelContext.setBeatDuration(0);
+			}else{
+				return false;
+			}
+		}
+		
+		totalLength = channelContext.getChannelBufferBytes().length;
+		
+
+		while (totalLength >= bufferSize) {
+			byte[] bufferData = ArrayUtil.extractByte(channelContext.getChannelBufferBytes(), 0, bufferSize - 1);
+
+			if (bufferSize != totalLength) {
+				channelContext.setChannelBufferBytes(ArrayUtil.extractByte(channelContext.getChannelBufferBytes(), bufferSize, totalLength - 1));
+			} else {
+				channelContext.setChannelBufferBytes(new byte[0]);
+			}
+
+			float[] sampleFloats = new float[bufferSize / 2];
+
+			converter.toFloatArray(bufferData, sampleFloats);
+
+			channelContext.handle(sampleFloats, audioFormat);
+
+			MusicXmlBasicInfo musicXmlBasicInfo = channelContext.getMusicXmlBasicInfo(null);
+			int sectionIndex = channelContext.getEvaluatingSectionIndex().get();
+
+			// 评分
+			int score = channelContext.evaluateForSection(sectionIndex, musicXmlBasicInfo.getSubjectId());
+			if (score >= 0) {
+
+				Map<String, Object> params = new HashMap<String, Object>();
+				params.put("score", score);
+				params.put("measureIndex", sectionIndex);
+				params.put("measureRenderIndex", channelContext.getCurrentMusicSection(null, sectionIndex).getMeasureRenderIndex());
+
+				WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>("measureScore", params);
+
+				nettyChannelManager.sendTextMessage(user, resp);
+			}
+
+			totalLength = channelContext.getChannelBufferBytes().length;
+		}
+
+		return true;
+	}
+
+}

+ 90 - 0
audio-analysis/src/main/java/com/yonge/netty/server/service/PitchDetectionHandler.java

@@ -0,0 +1,90 @@
+package com.yonge.netty.server.service;
+
+import io.netty.channel.Channel;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import javax.sound.sampled.AudioFormat;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Component;
+
+import com.yonge.audio.analysis.AudioFloatConverter;
+import com.yonge.audio.analysis.detector.YINPitchDetector;
+import com.yonge.netty.dto.WebSocketResponse;
+import com.yonge.netty.server.handler.NettyChannelManager;
+import com.yonge.netty.server.handler.message.MessageHandler;
+
+@Component
+public class PitchDetectionHandler implements MessageHandler {
+	
+	private final static Logger LOGGER = LoggerFactory.getLogger(PitchDetectionHandler.class);
+
+	/**
+	 * @describe 采样率
+	 */
+	private float sampleRate = 44100;
+
+	/**
+	 * 每个采样大小(Bit)
+	 */
+	private int bitsPerSample = 16;
+
+	/**
+	 * 通道数
+	 */
+	private int channels = 1;
+	
+	private boolean signed = true;
+
+	private boolean bigEndian = false;
+
+	private AudioFormat audioFormat = new AudioFormat(sampleRate, bitsPerSample, channels, signed, bigEndian);
+
+	private AudioFloatConverter converter = AudioFloatConverter.getConverter(audioFormat);
+	
+	@Autowired
+	private NettyChannelManager nettyChannelManager;
+	
+	@Override
+	public String getAction() {
+		return "PITCH_DETECTION";
+	}
+
+	@Override
+	public boolean handleTextMessage(String user, Channel channel, String text) {
+
+		return true;
+	}
+
+	@Override
+	public boolean handleBinaryMessage(String userId, Channel channel, byte[] bytes) {
+
+		float[] samples = new float[bytes.length / 2];
+
+		if (samples.length == 0) {
+			return false;
+		}
+
+		converter.toFloatArray(bytes, samples);
+
+		YINPitchDetector frequencyDetector = new YINPitchDetector(samples.length, audioFormat.getSampleRate());
+
+		int playFrequency = (int) frequencyDetector.getFrequency(samples);
+		
+		LOGGER.info("校音频率:{}", playFrequency);
+
+		Map<String, Object> params = new HashMap<String, Object>();
+		params.put("frequency", playFrequency);
+
+		WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>("checking", params);
+
+		nettyChannelManager.sendTextMessage(userId, resp);
+
+		return true;
+	}
+	
+}

+ 32 - 0
audio-analysis/src/main/java/com/yonge/netty/server/service/UserChannelContextService.java

@@ -0,0 +1,32 @@
+package com.yonge.netty.server.service;
+
+import io.netty.channel.Channel;
+
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.springframework.stereotype.Component;
+
+import com.yonge.netty.dto.UserChannelContext;
+
+@Component
+public class UserChannelContextService {
+
+	private ConcurrentHashMap<Channel, UserChannelContext> channelContextMap = new ConcurrentHashMap<Channel, UserChannelContext>();
+	
+	public boolean register(Channel channel,UserChannelContext userChannelContext){
+		channelContextMap.put(channel, userChannelContext);
+		return true;
+	}
+	
+	public boolean remove(Channel channel){
+		if(channel != null){
+			channelContextMap.remove(channel);
+		}
+		return true;
+	}
+	
+	public UserChannelContext getChannelContext(Channel channel){
+		return channelContextMap.get(channel);
+	}
+	
+}

+ 128 - 0
audio-analysis/src/main/resources/application-template.yml

@@ -0,0 +1,128 @@
+server:
+  port: 9004
+  tomcat:
+    accesslog:
+      enabled: true
+      buffered: true
+      directory: /var/logs
+      file-date-format: -yyyy-MM-dd
+      pattern: common
+      prefix: tomcat-audio
+      rename-on-rotate: false
+      request-attributes-enabled: false
+      rotate: true
+      suffix: .log
+      
+netty:
+  server:
+    port: 8090
+
+eureka:
+  client:
+    serviceUrl:
+      defaultZone: http://admin:admin123@localhost:8761/eureka/eureka/
+    instance: 
+      lease-renewal-interval-in-seconds: 5
+
+spring:
+  application:
+    name: audio-analysis-server
+    
+  datasource:
+    name: test
+    url: jdbc:mysql://47.114.1.200:3306/mec_test?useUnicode=true&characterEncoding=UTF8&serverTimezone=Asia/Shanghai&allowMultiQueries=true
+    username: mec_dev
+    password: dayaDataOnline@2019
+    # 使用druid数据源
+    type: com.alibaba.druid.pool.DruidDataSource
+    driver-class-name: com.mysql.cj.jdbc.Driver
+    filters: stat
+    maxActive: 20
+    initialSize: 1
+    maxWait: 60000
+    minIdle: 1
+    timeBetweenEvictionRunsMillis: 60000
+    minEvictableIdleTimeMillis: 300000
+    validationQuery: select 'x'
+    testWhileIdle: true
+    testOnBorrow: false
+    testOnReturn: false
+    poolPreparedStatements: true
+    maxOpenPreparedStatements: 20
+  
+  redis:
+    host: 47.114.1.200
+    port: 6379
+    password: dyym
+    database: 1
+    #连接超时时间(毫秒)
+    timeout: 10000
+    jedis:
+      pool:
+        #连接池最大连接数(使用负值表示没有限制)
+        max-active: 20
+        #连接池最大阻塞等待时间(使用负值表示没有限制)
+        max-wait: 10000
+        #连接池中的最大空闲连接
+        max-idle: 10
+        #连接池中的最小空闲连接
+        min-idle: 5
+    
+
+mybatis:
+    mapperLocations: classpath:config/mybatis/*.xml
+    
+swagger:
+  base-package: com.yonge.audo.controller
+          
+##认证 
+security:
+  oauth2:
+    client:
+      client-id: app
+      client-secret: app
+    resource:
+      token-info-uri: http://localhost:8001/oauth/check_token
+  
+#spring boot admin 相关配置
+management:
+  endpoints:
+    web:
+      exposure:
+        include: "*"
+  endpoint:
+    health:
+      show-details: ALWAYS
+      
+
+ribbon:  
+    ReadTimeout: 60000  
+    ConnectTimeout: 60000
+
+message:
+  debugMode: true
+  
+##支付流水隐藏
+payment:
+  hiddenMode: false
+  #隐藏的支付方式
+  channel: YQPAY
+  
+eseal:
+  tsign:
+    projectid: 4438776254
+    projectSecret: a94cf63d6361084d232f345d71321691
+    apisUrl: http://smlitsm.tsign.cn:8080/tgmonitor/rest/app!getAPIInfo2
+
+push:
+  jiguang:
+    reqURL: https://api.jpush.cn/v3/push
+    appKey:
+      student: 0e7422e1d6e73637e678716a
+      teacher: 7e0282ca92c12c8c45a93bb3
+      system: 496fc1007dea59b1b4252d2b
+    masterSecret:
+      student: c2361016604eab56ab2db2ac
+      teacher: d47430e2f4755ef5dc050ac5
+      system: a5e51e9cdb25417463afbf7a
+    apns_production: false

+ 16 - 0
audio-analysis/src/main/resources/bootstrap-prod.properties

@@ -0,0 +1,16 @@
+#\u6307\u5b9a\u5f00\u53d1\u73af\u5883
+#spring.profiles.active=dev
+#\u670d\u52a1\u5668\u5730\u5740
+spring.cloud.nacos.config.server-addr=47.96.80.97:8848
+#\u9ed8\u8ba4\u4e3aPublic\u547d\u540d\u7a7a\u95f4,\u53ef\u4ee5\u7701\u7565\u4e0d\u5199
+spring.cloud.nacos.config.namespace=f40a7594-4bd0-4bc6-8397-9353c6d2e63a
+#\u6307\u5b9a\u914d\u7f6e\u7fa4\u7ec4 --\u5982\u679c\u662fPublic\u547d\u540d\u7a7a\u95f4 \u5219\u53ef\u4ee5\u7701\u7565\u7fa4\u7ec4\u914d\u7f6e
+spring.cloud.nacos.config.group=DEFAULT_GROUP
+#\u6587\u4ef6\u540d -- \u5982\u679c\u6ca1\u6709\u914d\u7f6e\u5219\u9ed8\u8ba4\u4e3a ${spring.appliction.name}
+spring.cloud.nacos.config.prefix=audio-analysis
+#\u6307\u5b9a\u6587\u4ef6\u540e\u7f00
+spring.cloud.nacos.config.file-extension=yaml
+#\u662f\u5426\u52a8\u6001\u5237\u65b0
+spring.cloud.nacos.config.refresh.enabled=true
+#\u662f\u5426\u542f\u7528nacos\u914d\u7f6e\u4e2d\u5fc3
+spring.cloud.nacos.config.enabled=true

+ 16 - 0
audio-analysis/src/main/resources/bootstrap-sim.properties

@@ -0,0 +1,16 @@
+#\u6307\u5b9a\u5f00\u53d1\u73af\u5883
+#spring.profiles.active=dev
+#\u670d\u52a1\u5668\u5730\u5740
+spring.cloud.nacos.config.server-addr=47.114.1.200:8848
+#\u9ed8\u8ba4\u4e3aPublic\u547d\u540d\u7a7a\u95f4,\u53ef\u4ee5\u7701\u7565\u4e0d\u5199
+spring.cloud.nacos.config.namespace=763783fd-dfd1-4a44-857e-42a27448c42c
+#\u6307\u5b9a\u914d\u7f6e\u7fa4\u7ec4 --\u5982\u679c\u662fPublic\u547d\u540d\u7a7a\u95f4 \u5219\u53ef\u4ee5\u7701\u7565\u7fa4\u7ec4\u914d\u7f6e
+spring.cloud.nacos.config.group=DEFAULT_GROUP
+#\u6587\u4ef6\u540d -- \u5982\u679c\u6ca1\u6709\u914d\u7f6e\u5219\u9ed8\u8ba4\u4e3a ${spring.appliction.name}
+spring.cloud.nacos.config.prefix=audio-analysis
+#\u6307\u5b9a\u6587\u4ef6\u540e\u7f00
+spring.cloud.nacos.config.file-extension=yaml
+#\u662f\u5426\u52a8\u6001\u5237\u65b0
+spring.cloud.nacos.config.refresh.enabled=true
+#\u662f\u5426\u542f\u7528nacos\u914d\u7f6e\u4e2d\u5fc3
+spring.cloud.nacos.config.enabled=true

+ 16 - 0
audio-analysis/src/main/resources/bootstrap-test.properties

@@ -0,0 +1,16 @@
+#\u6307\u5b9a\u5f00\u53d1\u73af\u5883
+#spring.profiles.active=dev
+#\u670d\u52a1\u5668\u5730\u5740
+spring.cloud.nacos.config.server-addr=47.114.176.40:8848
+#\u9ed8\u8ba4\u4e3aPublic\u547d\u540d\u7a7a\u95f4,\u53ef\u4ee5\u7701\u7565\u4e0d\u5199
+spring.cloud.nacos.config.namespace=46f06363-b9d6-46f0-9cd7-7b33dcf26bb0
+#\u6307\u5b9a\u914d\u7f6e\u7fa4\u7ec4 --\u5982\u679c\u662fPublic\u547d\u540d\u7a7a\u95f4 \u5219\u53ef\u4ee5\u7701\u7565\u7fa4\u7ec4\u914d\u7f6e
+spring.cloud.nacos.config.group=DEFAULT_GROUP
+#\u6587\u4ef6\u540d -- \u5982\u679c\u6ca1\u6709\u914d\u7f6e\u5219\u9ed8\u8ba4\u4e3a ${spring.appliction.name}
+spring.cloud.nacos.config.prefix=audio-analysis
+#\u6307\u5b9a\u6587\u4ef6\u540e\u7f00
+spring.cloud.nacos.config.file-extension=yaml
+#\u662f\u5426\u52a8\u6001\u5237\u65b0
+spring.cloud.nacos.config.refresh.enabled=true
+#\u662f\u5426\u542f\u7528nacos\u914d\u7f6e\u4e2d\u5fc3
+spring.cloud.nacos.config.enabled=true

+ 0 - 38
audio-analysis/src/test/java/com/yonge/audio/AppTest.java

@@ -1,38 +0,0 @@
-package com.yonge.audio;
-
-import junit.framework.Test;
-import junit.framework.TestCase;
-import junit.framework.TestSuite;
-
-/**
- * Unit test for simple App.
- */
-public class AppTest 
-    extends TestCase
-{
-    /**
-     * Create the test case
-     *
-     * @param testName name of the test case
-     */
-    public AppTest( String testName )
-    {
-        super( testName );
-    }
-
-    /**
-     * @return the suite of tests being tested
-     */
-    public static Test suite()
-    {
-        return new TestSuite( AppTest.class );
-    }
-
-    /**
-     * Rigourous Test :-)
-     */
-    public void testApp()
-    {
-        assertTrue( true );
-    }
-}

+ 1 - 0
cooleshow-gateway/.gitignore

@@ -0,0 +1 @@
+/bin/

+ 1 - 0
cooleshow-im/.gitignore

@@ -0,0 +1 @@
+/bin/

+ 72 - 0
cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/dao/SysMusicCompareRecordDao.java

@@ -0,0 +1,72 @@
+package com.yonge.cooleshow.biz.dal.dao;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ibatis.annotations.Param;
+
+import com.yonge.cooleshow.biz.dal.dto.StudentTrainChartDto;
+import com.yonge.cooleshow.biz.dal.dto.StudentTrainOverviewDto;
+import com.yonge.cooleshow.biz.dal.entity.SysMusicCompareRecord;
+import com.yonge.cooleshow.biz.dal.enums.HeardLevelEnum;
+import com.yonge.toolset.mybatis.dal.BaseDAO;
+
+public interface SysMusicCompareRecordDao extends BaseDAO<Long, SysMusicCompareRecord> {
+
+    /**
+     * @describe 获取用户最后一次评测数据
+     * @author Joburgess
+     * @date 2021/8/23 0023
+     * @param userId:
+     * @return com.ym.mec.biz.dal.entity.SysMusicCompareRecord
+     */
+    SysMusicCompareRecord getUserLastEvaluationData(@Param("userId") Long userId);
+
+    /**
+     * @describe 获取用户训练总览
+     * @author Joburgess
+     * @date 2021/8/11 0011
+     * @param userId:
+     * @return com.ym.mec.biz.dal.dto.StudentTrainOverviewDto
+     */
+    StudentTrainOverviewDto getUserTrainOverView(@Param("userId") Long userId);
+
+    /**
+     * @describe 获取用户训练图表数据
+     * @author Joburgess
+     * @date 2021/8/12 0012
+     * @param userId:
+     * @param startTime:
+     * @param endTime:
+     * @return java.util.List<com.ym.mec.biz.dal.dto.StudentTrainChartDto>
+     */
+    List<StudentTrainChartDto> getUserTrainChartData(@Param("userId") Long userId,
+                                                     @Param("startTime") String startTime,
+                                                     @Param("endTime") String endTime);
+
+    int countMusicGroupStudentTrainData(Map<String, Object> params);
+
+    /**
+     * @describe 统计分部云教练新增人数
+     * @author Joburgess
+     * @date 2021/8/18 0018
+     * @param organId:
+     * @return int
+     */
+    int getOrganNewCloudStudyNum(@Param("organId") Integer organId);
+    int getOrgansTotalNewCloudStudyNum(@Param("organIds") List<Integer> organIds);
+    List<Map<Integer, Integer>> getOrgansNewCloudStudyNum(@Param("organIds") List<Integer> organIds);
+
+    /**
+     * @describe 获取用户本周最高分对应曲目编号
+     * @author Joburgess
+     * @date 2021/8/30 0030
+     * @param userId:
+     * @param monday:
+     * @param heardLevel:
+     * @return java.lang.Integer
+     */
+    Integer getUserWeekMaxMusicScoreId(@Param("userId") Long userId,
+                                       @Param("monday") String monday,
+                                        @Param("heardLevel") HeardLevelEnum heardLevel);
+}

+ 109 - 0
cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/dto/IndexBaseDto.java

@@ -0,0 +1,109 @@
+package com.yonge.cooleshow.biz.dal.dto;
+
+import java.math.BigDecimal;
+import java.util.Comparator;
+import java.util.Date;
+import java.util.List;
+
+import org.springframework.util.CollectionUtils;
+
+import com.yonge.cooleshow.biz.dal.enums.IndexDataType;
+import com.yonge.cooleshow.common.constant.CommonConstants;
+
+/**
+ * @Author Joburgess
+ * @Date 2021/1/7 0007
+ */
+public class IndexBaseDto {
+
+    private IndexDataType dataType;
+
+    private String title;
+
+    private BigDecimal percent;
+
+    private List<IndexBaseMonthData> indexMonthData;
+
+    private List<IndexBaseMonthData> indexMonthDataDetail;
+
+    public List<IndexBaseMonthData> getIndexMonthDataDetail() {
+        return indexMonthDataDetail;
+    }
+
+    public void setIndexMonthDataDetail(List<IndexBaseMonthData> indexMonthDataDetail) {
+        this.indexMonthDataDetail = indexMonthDataDetail;
+    }
+
+    public IndexBaseDto() {
+    }
+
+    public IndexBaseDto(String title) {
+        this.title = title;
+    }
+
+    public IndexBaseDto(IndexDataType dataType, String title) {
+        this.dataType = dataType;
+        this.title = title;
+    }
+
+    public IndexBaseDto(IndexDataType dataType, List<IndexBaseMonthData> indexMonthData) {
+        this.dataType = dataType;
+        this.title = dataType.getMsg();
+        this.indexMonthData = indexMonthData;
+    }
+
+    public IndexBaseDto(String title, BigDecimal percent, List<IndexBaseMonthData> indexMonthData) {
+        this.title = title;
+        this.percent = percent;
+        this.indexMonthData = indexMonthData;
+    }
+
+    public IndexDataType getDataType() {
+        return dataType;
+    }
+
+    public void setDataType(IndexDataType dataType) {
+        this.dataType = dataType;
+    }
+
+    public String getTitle() {
+        return title;
+    }
+
+    public void setTitle(String title) {
+        this.title = title;
+    }
+
+    public BigDecimal getPercent() {
+        return percent;
+    }
+
+    public void setPercent(BigDecimal percent) {
+        this.percent = percent;
+    }
+
+    public List<IndexBaseMonthData> getIndexMonthData() {
+        return indexMonthData;
+    }
+
+    public void setIndexMonthData(List<IndexBaseMonthData> indexMonthData, Date currentMonth) {
+        this.indexMonthData = indexMonthData;
+        if(!CollectionUtils.isEmpty(indexMonthData)){
+            if(dataType.getCollect()){
+                BigDecimal total = indexMonthData.stream().map(IndexBaseMonthData::getTotalNum).reduce(BigDecimal.ZERO, BigDecimal::add);
+                if(dataType.getPercent()){
+                    if(total.compareTo(BigDecimal.ZERO)==0){
+                        this.percent = BigDecimal.ZERO;
+                    }else{
+                        BigDecimal activateNum = indexMonthData.stream().map(IndexBaseMonthData::getActivateNum).reduce(BigDecimal.ZERO, BigDecimal::add);
+                        this.percent = activateNum.divide(total, CommonConstants.DECIMAL_PLACE, BigDecimal.ROUND_DOWN).multiply(new BigDecimal(100)).setScale(CommonConstants.DECIMAL_FINAL_PLACE, BigDecimal.ROUND_DOWN);
+                    }
+                }else{
+                    this.percent = total;
+                }
+            }else{
+                this.percent = indexMonthData.stream().filter(i->currentMonth.compareTo(i.getMonth())>=0).max(Comparator.comparing(IndexBaseMonthData::getMonth)).get().getPercent();
+            }
+        }
+    }
+}

+ 154 - 0
cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/dto/IndexBaseMonthData.java

@@ -0,0 +1,154 @@
+package com.yonge.cooleshow.biz.dal.dto;
+
+import java.math.BigDecimal;
+import java.util.Date;
+
+import com.yonge.cooleshow.biz.dal.enums.IndexDataType;
+import com.yonge.cooleshow.common.entity.BaseEntity;
+
+/**
+ * 对应数据库表(index_base_month_data):
+ */
+public class IndexBaseMonthData extends BaseEntity {
+
+	/**  */
+	private Long id;
+	
+	/**  */
+	private java.util.Date month;
+
+	/**  */
+	private String monthStr;
+
+	private String title;
+	
+	/** 总数量 */
+	private BigDecimal totalNum = BigDecimal.ZERO;
+	
+	/** 有效数量 */
+	private BigDecimal activateNum = BigDecimal.ZERO;
+	
+	/** 最终结果 */
+	private BigDecimal percent = BigDecimal.ZERO;
+
+	private IndexDataType dataType;
+
+	private String extendInfo;
+
+	private String orderType;
+
+	private Date createTime;
+
+	private Date updateTime;
+
+	public String getOrderType() {
+		return orderType;
+	}
+
+	public void setOrderType(String orderType) {
+		this.orderType = orderType;
+	}
+
+	public String getMonthStr() {
+		return monthStr;
+	}
+
+	public void setMonthStr(String monthStr) {
+		this.monthStr = monthStr;
+	}
+
+	public IndexBaseMonthData() {
+	}
+
+	public IndexBaseMonthData(Date month) {
+		this.month = month;
+		this.totalNum = BigDecimal.ZERO;
+		this.activateNum = BigDecimal.ZERO;
+		this.percent = BigDecimal.ZERO;
+	}
+
+	public IndexBaseMonthData(Date month, Integer organId, BigDecimal percent) {
+		this.month = month;
+		this.percent = percent;
+	}
+
+	public Long getId() {
+		return id;
+	}
+
+	public void setId(Long id) {
+		this.id = id;
+	}
+
+	public Date getMonth() {
+		return month;
+	}
+
+	public void setMonth(Date month) {
+		this.month = month;
+	}
+
+	public BigDecimal getTotalNum() {
+		return totalNum;
+	}
+
+	public void setTotalNum(BigDecimal totalNum) {
+		this.totalNum = totalNum;
+	}
+
+	public BigDecimal getActivateNum() {
+		return activateNum;
+	}
+
+	public void setActivateNum(BigDecimal activateNum) {
+		this.activateNum = activateNum;
+	}
+
+	public BigDecimal getPercent() {
+		return percent;
+	}
+
+	public void setPercent(BigDecimal percent) {
+		this.percent = percent;
+	}
+
+	public IndexDataType getDataType() {
+		return dataType;
+	}
+
+	public void setDataType(IndexDataType dataType) {
+		this.dataType = dataType;
+	}
+
+	public Date getCreateTime() {
+		return createTime;
+	}
+
+	public void setCreateTime(Date createTime) {
+		this.createTime = createTime;
+	}
+
+	public Date getUpdateTime() {
+		return updateTime;
+	}
+
+	public void setUpdateTime(Date updateTime) {
+		this.updateTime = updateTime;
+	}
+
+	public String getTitle() {
+		return title;
+	}
+
+	public void setTitle(String title) {
+		this.title = title;
+	}
+
+	public String getExtendInfo() {
+		return extendInfo;
+	}
+
+	public void setExtendInfo(String extendInfo) {
+		this.extendInfo = extendInfo;
+	}
+}

+ 54 - 0
cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/dto/StudentTrainChartDto.java

@@ -0,0 +1,54 @@
+package com.yonge.cooleshow.biz.dal.dto;
+
+import io.swagger.annotations.ApiModelProperty;
+
+import java.time.LocalDate;
+
+/**
+ * @Author Joburgess
+ * @Date 2021/8/12 0012
+ */
+public class StudentTrainChartDto {
+
+    @ApiModelProperty("训练时长")
+    private int trainTime = 0;
+
+    @ApiModelProperty("训练次数")
+    private int trainNum = 0;
+
+    @ApiModelProperty("训练日期")
+    private LocalDate trainDate;
+
+    public StudentTrainChartDto() {
+    }
+
+    public StudentTrainChartDto(int trainTime, int trainNum, LocalDate trainDate) {
+        this.trainTime = trainTime;
+        this.trainNum = trainNum;
+        this.trainDate = trainDate;
+    }
+
+    public int getTrainTime() {
+        return trainTime;
+    }
+
+    public void setTrainTime(int trainTime) {
+        this.trainTime = trainTime;
+    }
+
+    public int getTrainNum() {
+        return trainNum;
+    }
+
+    public void setTrainNum(int trainNum) {
+        this.trainNum = trainNum;
+    }
+
+    public LocalDate getTrainDate() {
+        return trainDate;
+    }
+
+    public void setTrainDate(LocalDate trainDate) {
+        this.trainDate = trainDate;
+    }
+}

+ 43 - 0
cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/dto/StudentTrainOverviewDto.java

@@ -0,0 +1,43 @@
+package com.yonge.cooleshow.biz.dal.dto;
+
+import io.swagger.annotations.ApiModelProperty;
+
+/**
+ * @Author Joburgess
+ * @Date 2021/8/11 0011
+ */
+public class StudentTrainOverviewDto {
+
+    @ApiModelProperty("训练天数")
+    private int trainDays = 0;
+
+    @ApiModelProperty("训练时长")
+    private int trainTime = 0;
+
+    @ApiModelProperty("训练次数")
+    private int trainNum = 0;
+
+    public int getTrainDays() {
+        return trainDays;
+    }
+
+    public void setTrainDays(int trainDays) {
+        this.trainDays = trainDays;
+    }
+
+    public int getTrainTime() {
+        return trainTime;
+    }
+
+    public void setTrainTime(int trainTime) {
+        this.trainTime = trainTime;
+    }
+
+    public int getTrainNum() {
+        return trainNum;
+    }
+
+    public void setTrainNum(int trainNum) {
+        this.trainNum = trainNum;
+    }
+}

+ 1 - 1
cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/entity/AppVersionInfo.java

@@ -16,7 +16,7 @@ public class AppVersionInfo {
 	/** 版本号(以V开头) */
 	private String version;
 	
-	/** 状态(newest/history) */
+	/** 状态(newest/history/auditing) */
 	private String status;
 	
 	/** 是否强制更新 */

+ 274 - 0
cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/entity/SysMusicCompareRecord.java

@@ -0,0 +1,274 @@
+package com.yonge.cooleshow.biz.dal.entity;
+
+import java.math.BigDecimal;
+import java.time.DayOfWeek;
+import java.time.LocalDate;
+
+import org.apache.commons.lang3.builder.ToStringBuilder;
+
+import com.yonge.cooleshow.biz.dal.enums.DeviceTypeEnum;
+import com.yonge.cooleshow.biz.dal.enums.FeatureType;
+import com.yonge.cooleshow.biz.dal.enums.HeardLevelEnum;
+import com.yonge.cooleshow.common.entity.BaseEntity;
+import com.yonge.toolset.utils.date.DateUtil;
+
+/**
+ * 对应数据库表(sys_music_compare_record):
+ */
+public class SysMusicCompareRecord extends BaseEntity {
+
+	/**  */
+	private Long id;
+
+	/** 行为编号 */
+	private String behaviorId;
+	
+	/** 用户编号 */
+	private Long userId;
+	
+	/** 教程编号 */
+	private Long musicSheetId;
+
+	private HeardLevelEnum heardLevel;
+
+	private String sysMusicScoreName;
+	
+	/** 评分数据 */
+	private String scoreData;
+
+	private int notesDataIndex;
+
+	/** 总分 */
+	private BigDecimal score;
+
+	/** 音准 */
+	private BigDecimal intonation;
+
+	/** 节奏 */
+	private BigDecimal cadence;
+
+	/** 完成度 */
+	private BigDecimal integrity;
+
+	/** 录音文件地址 */
+	private String recordFilePath;
+
+	private String videoFilePath;
+
+	/** 周一日期 */
+	private String monday = LocalDate.now().with(DateUtil.weekFields.dayOfWeek(), DayOfWeek.MONDAY.getValue()).toString();
+
+	private FeatureType feature;
+	
+	/** 创建时间 */
+	private java.util.Date createTime;
+
+	private DeviceTypeEnum deviceType;
+
+	private float sourceTime = 0;
+
+	private float playTime = 0;
+
+	private int speed = 90;
+
+	private String clientId;
+	
+	private String partIndex;
+
+	public SysMusicCompareRecord() {
+	}
+
+	public SysMusicCompareRecord(FeatureType feature) {
+		this.feature = feature;
+	}
+
+	public void setId(Long id){
+		this.id = id;
+	}
+	
+	public Long getId(){
+		return this.id;
+	}
+
+	public String getBehaviorId() {
+		return behaviorId;
+	}
+
+	public void setBehaviorId(String behaviorId) {
+		this.behaviorId = behaviorId;
+	}
+
+	public void setUserId(Long userId){
+		this.userId = userId;
+	}
+	
+	public Long getUserId(){
+		return this.userId;
+	}
+
+	public String getClientId() {
+		return clientId;
+	}
+
+	public void setClientId(String clientId) {
+		this.clientId = clientId;
+	}
+
+	public HeardLevelEnum getHeardLevel() {
+		return heardLevel;
+	}
+
+	public void setHeardLevel(HeardLevelEnum heardLevel) {
+		this.heardLevel = heardLevel;
+	}
+
+	public Long getMusicSheetId() {
+		return musicSheetId;
+	}
+
+	public void setMusicSheetId(Long musicSheetId) {
+		this.musicSheetId = musicSheetId;
+	}
+
+	public String getSysMusicScoreName() {
+		return sysMusicScoreName;
+	}
+
+	public void setSysMusicScoreName(String sysMusicScoreName) {
+		this.sysMusicScoreName = sysMusicScoreName;
+	}
+
+	public void setScoreData(String scoreData){
+		this.scoreData = scoreData;
+	}
+	
+	public String getScoreData(){
+		return this.scoreData;
+	}
+
+	public int getNotesDataIndex() {
+		return notesDataIndex;
+	}
+
+	public void setNotesDataIndex(int notesDataIndex) {
+		this.notesDataIndex = notesDataIndex;
+	}
+
+	public BigDecimal getScore() {
+		return score;
+	}
+
+	public void setScore(BigDecimal score) {
+		this.score = score;
+	}
+
+	public BigDecimal getIntonation() {
+		return intonation;
+	}
+
+	public void setIntonation(BigDecimal intonation) {
+		this.intonation = intonation;
+	}
+
+	public BigDecimal getCadence() {
+		return cadence;
+	}
+
+	public void setCadence(BigDecimal cadence) {
+		this.cadence = cadence;
+	}
+
+	public BigDecimal getIntegrity() {
+		return integrity;
+	}
+
+	public void setIntegrity(BigDecimal integrity) {
+		this.integrity = integrity;
+	}
+
+	public String getRecordFilePath() {
+		return recordFilePath;
+	}
+
+	public void setRecordFilePath(String recordFilePath) {
+		this.recordFilePath = recordFilePath;
+	}
+
+	public String getVideoFilePath() {
+		return videoFilePath;
+	}
+
+	public void setVideoFilePath(String videoFilePath) {
+		this.videoFilePath = videoFilePath;
+	}
+
+	public DeviceTypeEnum getDeviceType() {
+		return deviceType;
+	}
+
+	public void setDeviceType(DeviceTypeEnum deviceType) {
+		this.deviceType = deviceType;
+	}
+
+	public float getSourceTime() {
+		return sourceTime;
+	}
+
+	public void setSourceTime(float sourceTime) {
+		this.sourceTime = sourceTime;
+	}
+
+	public float getPlayTime() {
+		return playTime;
+	}
+
+	public void setPlayTime(float playTime) {
+		this.playTime = playTime;
+	}
+
+	public int getSpeed() {
+		return speed;
+	}
+
+	public void setSpeed(int speed) {
+		this.speed = speed;
+	}
+
+	public String getMonday() {
+		return monday;
+	}
+
+	public void setMonday(String monday) {
+		this.monday = monday;
+	}
+
+	public FeatureType getFeature() {
+		return feature;
+	}
+
+	public void setFeature(FeatureType feature) {
+		this.feature = feature;
+	}
+
+	public void setCreateTime(java.util.Date createTime){
+		this.createTime = createTime;
+	}
+	
+	public java.util.Date getCreateTime(){
+		return this.createTime;
+	}
+			
+	public String getPartIndex() {
+		return partIndex;
+	}
+
+	public void setPartIndex(String partIndex) {
+		this.partIndex = partIndex;
+	}
+
+	@Override
+	public String toString() {
+		return ToStringBuilder.reflectionToString(this);
+	}
+
+}

+ 35 - 0
cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/enums/DeviceTypeEnum.java

@@ -0,0 +1,35 @@
+package com.yonge.cooleshow.biz.dal.enums;
+
+import com.yonge.toolset.base.enums.BaseEnum;
+
+public enum DeviceTypeEnum implements BaseEnum<String, DeviceTypeEnum> {
+	IOS("IOS", "苹果"),
+	ANDROID("ANDROID", "安卓"),
+	WEB("WEB", "浏览器");
+
+	private String code;
+
+	private String msg;
+
+	DeviceTypeEnum(String code, String msg) {
+		this.code = code;
+		this.msg = msg;
+	}
+
+	public void setCode(String code) {
+		this.code = code;
+	}
+
+	public String getMsg() {
+		return msg;
+	}
+
+	public void setMsg(String msg) {
+		this.msg = msg;
+	}
+
+	@Override
+	public String getCode() {
+		return this.code;
+	}
+}

+ 27 - 0
cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/enums/FeatureType.java

@@ -0,0 +1,27 @@
+package com.yonge.cooleshow.biz.dal.enums;
+
+import com.yonge.toolset.base.enums.BaseEnum;
+
+public enum FeatureType implements BaseEnum<String, FeatureType> {
+	CLOUD_STUDY_TRAIN("CLOUD_STUDY_TRAIN", "云教练训练"),
+	CLOUD_STUDY_EVALUATION("CLOUD_STUDY_EVALUATION", "云教练评测");
+
+	private String code;
+
+	private String desc;
+
+	FeatureType(String code, String desc) {
+		this.code = code;
+		this.desc = desc;
+	}
+
+	@Override
+	public String getCode() {
+		return code;
+	}
+
+	public String getDesc() {
+		return desc;
+	}
+
+}

+ 71 - 0
cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/enums/HeardLevelEnum.java

@@ -0,0 +1,71 @@
+package com.yonge.cooleshow.biz.dal.enums;
+
+import com.yonge.toolset.base.enums.BaseEnum;
+
+public enum HeardLevelEnum implements BaseEnum<String, HeardLevelEnum> {
+	BEGINNER("BEGINNER","入门级", 0.1f, 0.7f, 1.5f),
+    ADVANCED("ADVANCED","进阶级", 1, 0.9f, 1.3f),
+    PERFORMER("PERFORMER","大师级", 3, 0.93f, 1.1f);
+
+    private String code;
+
+    private String msg;
+
+    /** 音准音分偏差数 */
+    private float intonationCentsRange;
+
+    /** 节奏评分截取时间占比 */
+    private float cadenceRange;
+
+    /** 完整性 */
+    private float integrityRange;
+
+    HeardLevelEnum(String code, String msg, float intonationCentsRange, float cadenceRange, float integrityRange) {
+        this.code = code;
+        this.msg = msg;
+        this.intonationCentsRange = intonationCentsRange;
+        this.cadenceRange = cadenceRange;
+        this.integrityRange = integrityRange;
+    }
+
+    public void setCode(String code) {
+        this.code = code;
+    }
+
+    public String getMsg() {
+        return msg;
+    }
+
+    public void setMsg(String msg) {
+        this.msg = msg;
+    }
+
+    @Override
+    public String getCode() {
+        return this.code;
+    }
+
+    public float getIntonationCentsRange() {
+        return intonationCentsRange;
+    }
+
+    public void setIntonationCentsRange(float intonationCentsRange) {
+        this.intonationCentsRange = intonationCentsRange;
+    }
+
+    public float getCadenceRange() {
+        return cadenceRange;
+    }
+
+    public void setCadenceRange(float cadenceRange) {
+        this.cadenceRange = cadenceRange;
+    }
+
+    public float getIntegrityRange() {
+        return integrityRange;
+    }
+
+    public void setIntegrityRange(float integrityRange) {
+        this.integrityRange = integrityRange;
+    }
+}

+ 139 - 0
cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/enums/IndexDataType.java

@@ -0,0 +1,139 @@
+package com.yonge.cooleshow.biz.dal.enums;
+
+import com.yonge.toolset.base.enums.BaseEnum;
+
+/**
+ * @Author Joburgess
+ * @Date 2021/1/8 0008
+ **/
+public enum IndexDataType implements BaseEnum<String, IndexDataType> {
+    //学员数据类型
+    STUDENT_REGISTRATION_NUM("STUDENT_REGISTRATION_NUM", "注册学员", false, false),
+    CHARGE_STUDENT_CHANGE_RATE("CHARGE_STUDENT_CHANGE_RATE", "付费转化率", true, false),
+    ACTIVATION_RATE("ACTIVATION_RATE","激活率", true, false),
+
+    //运营数据类型
+    SCHOOL("SCHOOL","合作单位", false, false),
+    PROGRESS_MUSIC_GROUP_NUM("PROGRESS_MUSIC_GROUP_NUM","进行中乐团", false, false),
+    TRANSACTION_TEACHER_NUM("TRANSACTION_TEACHER_NUM", "衔接老师人数", false, false),
+
+    //人事数据
+    TEACHER_NUM("TEACHER_NUM","老师总数", false, false),
+    FULL_TIME_NUM("FULL_TIME_NUM","全职人数", false, false),
+    PART_TIME_NUM("PART_TIME_NUM","兼职人数", false, false),
+
+    DIMISSION_NUM("DIMISSION_NUM","离职人数", false, false),
+
+    //剩余课时
+    SURPLUS_COURSE_NUM("SURPLUS_COURSE_NUM", "剩余课时总数", false, false),
+    SURPLUS_MUSIC_COURSE_NUM("SURPLUS_MUSIC_COURSE_NUM", "剩余乐团课时", false, false),
+    SURPLUS_VIP_COURSE_NUM("SURPLUS_VIP_COURSE_NUM", "剩余VIP课时", false, false),
+    SURPLUS_PRACTICE_COURSE_NUM("SURPLUS_PRACTICE_COURSE_NUM", "剩余网管课时", false, false),
+
+    //已消耗课时
+    OVER_COURSE_NUM("OVER_COURSE_NUM", "已消耗课时总数", false, false),
+    OVER_MUSIC_COURSE_NUM("OVER_MUSIC_COURSE_NUM", "已消耗乐团课时", false, false),
+    OVER_VIP_COURSE_NUM("OVER_VIP_COURSE_NUM", "已消耗VIP课时", false, false),
+    OVER_PRACTICE_COURSE_NUM("OVER_PRACTICE_COURSE_NUM", "已消耗网管课时", false, false),
+
+    //经营数据
+    FINANCE_PAY("FINANCE_PAY","财务支出", false, false),
+    FINANCE_BALANCE_AMOUNT("FINANCE_BALANCE_AMOUNT","余额收入", false, false),
+    FINANCE_AMOUNT("FINANCE_AMOUNT","现金收入", false, false),
+    TOTAL_AMOUNT("TOTAL_AMOUNT","总收入", false, false),
+
+    //经营数据详情
+    APPLY_AMOUNT("APPLY_AMOUNT","报名缴费收入", false, false),
+    RENEW_AMOUNT("RENEW_AMOUNT","乐团续费收入", false, false),
+    VIP_AMOUNT("VIP_AMOUNT","VIP课收入", false, false),
+    PRACTICE_AMOUNT("PRACTICE_AMOUNT","网管课收入", false, false),
+    OTHER_AMOUNT("OTHER_AMOUNT","其他收入", false, false),
+    OTHER_AMOUNT_DETAIL("OTHER_AMOUNT_DETAIL","其他收入明细", false, false),
+
+    //业务数据
+    HOMEWORK_CREATE_RATE("HOMEWORK_CREATE_RATE","训练布置率", true, true),
+    HOMEWORK_SUBMIT_RATE("HOMEWORK_SUBMIT_RATE","训练提交率", true, true),
+    HOMEWORK_COMMENT_RATE("HOMEWORK_COMMENT_RATE","训练点评率", true, true),
+
+    //课程数据
+    MUSIC_GROUP_COURSE("MUSIC_GROUP_COURSE","乐团课", false, true),
+    VIP_GROUP_COURSE("VIP_GROUP_COURSE","VIP课", false, true),
+    VIP_GROUP_ONLINE_COURSE("VIP_GROUP_ONLINE_COURSE","VIP线上课", false, true),
+    VIP_GROUP_OFFLINE_COURSE("VIP_GROUP_OFFLINE_COURSE","VIP线下课", false, true),
+    PRACTICE_GROUP_COURSE("PRACTICE_GROUP_COURSE","网管课", false, true),
+
+    //学员变动
+    ADD_STUDENT_REGISTRATION_NUM("ADD_STUDENT_REGISTRATION_NUM", "新增注册学员", false, true),
+    MUSIC_GROUP_STUDENT("MUSIC_GROUP_STUDENT","乐团在读学员", false, false),
+    NEWLY_STUDENT_NUM("NEWLY_STUDENT_NUM","乐团新增学员", false, true),
+    QUIT_MUSIC_GROUP_STUDENT_NUM("QUIT_MUSIC_GROUP_STUDENT_NUM","退学学员", false, true),
+    VIP_PRACTICE_STUDENT_NUM("VIP_PRACTICE_STUDENT_NUM", "VIP/网管在读", false, false),
+    VIP_PRACTICE_ADD_STUDENT_NUM("VIP_PRACTICE_ADD_STUDENT_NUM", "VIP/网管新增", false, true),
+
+    //活跃用户统计
+    ORGAN_TOTAL_STUDENT_NUM("ORGAN_TOTAL_STUDENT_NUM", "分部学员总数", false, false),
+    CLOUD_STUDY_LIVELY_STUDENT_NUM("CLOUD_STUDY_LIVELY_STUDENT_NUM", "活跃用户", false, false),
+    CLOUD_STUDY_DAY_USE_STUDENT_NUM("CLOUD_STUDY_DAY_USE_STUDENT_NUM", "团练宝使用用户", false, false),
+    CLOUD_STUDY_NEW_STUDENT_NUM("CLOUD_STUDY_NEW_STUDENT_NUM", "团练宝新增用户", false, false),
+    CLOUD_NEW_STUDENT_NUM("CLOUD_NEW_STUDENT_NUM", "团练宝新用户人数", false, false),
+    MEMBER_STUDENT_NUM("MEMBER_STUDENT_NUM", "会员数量", false, false),
+    EXPERIENCE_MEMBER_STUDENT_NUM("EXPERIENCE_MEMBER_STUDENT_NUM", "试用会员数量", false, false),
+    NEW_MEMBER_STUDENT_NUM("NEW_MEMBER_STUDENT_NUM", "新增会员数量", false, false),
+
+    CLOUD_STUDY_TRAIN_NUM("CLOUD_STUDY_TRAIN_NUM", "团练宝训练次数", false, false),
+    CLOUD_STUDY_TRAIN_TIME("CLOUD_STUDY_TRAIN_TIME", "团练宝训练时长", false, false),
+
+    //小课数据统计
+    STUDENT_VIP_ORDER_NUM("STUDENT_VIP_ORDER_NUM", "学员vip课订单数量", false, false),
+    STUDENT_THEORY_ORDER_NUM("STUDENT_THEORY_ORDER_NUM", "学员乐理课订单数量", false, false),
+    STUDENT_PRACTICE_ORDER_NUM("STUDENT_PRACTICE_ORDER_NUM", "学员网管课订单数量", false, false),
+    ;
+
+    private String code;
+
+    private String msg;
+
+    private Boolean percent;
+
+    private Boolean collect;
+
+    IndexDataType(String code, String msg, Boolean percent, Boolean collect) {
+        this.code = code;
+        this.msg = msg;
+        this.percent = percent;
+        this.collect = collect;
+    }
+
+    @Override
+    public String getCode() {
+        return code;
+    }
+
+    public void setCode(String code) {
+        this.code = code;
+    }
+
+    public String getMsg() {
+        return msg;
+    }
+
+    public void setMsg(String msg) {
+        this.msg = msg;
+    }
+
+    public Boolean getCollect() {
+        return collect;
+    }
+
+    public void setCollect(Boolean collect) {
+        this.collect = collect;
+    }
+
+    public Boolean getPercent() {
+        return percent;
+    }
+
+    public void setPercent(Boolean percent) {
+        this.percent = percent;
+    }
+}

+ 125 - 0
cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/queryInfo/SysMusicCompareRecordQueryInfo.java

@@ -0,0 +1,125 @@
+package com.yonge.cooleshow.biz.dal.queryInfo;
+
+import io.swagger.annotations.ApiModelProperty;
+
+import com.yonge.cooleshow.biz.dal.enums.FeatureType;
+import com.yonge.cooleshow.biz.dal.enums.HeardLevelEnum;
+import com.yonge.toolset.base.page.QueryInfo;
+
+/**
+ * @Author Joburgess
+ * @Date 2021/8/11 0011
+ */
+public class SysMusicCompareRecordQueryInfo extends QueryInfo {
+
+    private Long userId;
+
+    private Long teacherId;
+
+    private String startTime;
+
+    private String endTime;
+
+    private String musicGroupId;
+
+    private int orderType;
+
+    @ApiModelProperty("排序字段:TRAINDAYS(训练天数);TRAINTIME(训练时长);TRAINNUM(训练次数)")
+    private String sortField = "TRAINDAYS";
+
+    private FeatureType featureType;
+
+    private HeardLevelEnum heardLevel;
+
+    private Boolean visitFlag;
+
+    private Integer organId;
+
+    public Integer getOrganId() {
+        return organId;
+    }
+
+    public void setOrganId(Integer organId) {
+        this.organId = organId;
+    }
+
+    public Boolean getVisitFlag() {
+        return visitFlag;
+    }
+
+    public void setVisitFlag(Boolean visitFlag) {
+        this.visitFlag = visitFlag;
+    }
+
+    public FeatureType getFeatureType() {
+        return featureType;
+    }
+
+    public void setFeatureType(FeatureType featureType) {
+        this.featureType = featureType;
+    }
+
+    public Long getTeacherId() {
+        return teacherId;
+    }
+
+    public void setTeacherId(Long teacherId) {
+        this.teacherId = teacherId;
+    }
+
+    public int getOrderType() {
+        return orderType;
+    }
+
+    public void setOrderType(int orderType) {
+        this.orderType = orderType;
+    }
+
+    public Long getUserId() {
+        return userId;
+    }
+
+    public void setUserId(Long userId) {
+        this.userId = userId;
+    }
+
+    public String getStartTime() {
+        return startTime;
+    }
+
+    public void setStartTime(String startTime) {
+        this.startTime = startTime;
+    }
+
+    public String getEndTime() {
+        return endTime;
+    }
+
+    public void setEndTime(String endTime) {
+        this.endTime = endTime;
+    }
+
+    public String getMusicGroupId() {
+        return musicGroupId;
+    }
+
+    public void setMusicGroupId(String musicGroupId) {
+        this.musicGroupId = musicGroupId;
+    }
+
+    public String getSortField() {
+        return sortField;
+    }
+
+    public void setSortField(String sortField) {
+        this.sortField = sortField;
+    }
+
+    public HeardLevelEnum getHeardLevel() {
+        return heardLevel;
+    }
+
+    public void setHeardLevel(HeardLevelEnum heardLevel) {
+        this.heardLevel = heardLevel;
+    }
+}

+ 46 - 0
cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/service/SysMusicCompareRecordService.java

@@ -0,0 +1,46 @@
+package com.yonge.cooleshow.biz.dal.service;
+
+import java.util.Map;
+
+import com.yonge.cooleshow.biz.dal.entity.SysMusicCompareRecord;
+import com.yonge.cooleshow.biz.dal.queryInfo.SysMusicCompareRecordQueryInfo;
+import com.yonge.toolset.base.page.PageInfo;
+import com.yonge.toolset.mybatis.service.BaseService;
+
+public interface SysMusicCompareRecordService extends BaseService<Long, SysMusicCompareRecord> {
+
+    /**
+     * @describe 保存用户评测记录
+     * @param sysMusicCompareRecord
+     * @return void
+     */
+    void saveMusicCompareData(SysMusicCompareRecord sysMusicCompareRecord);
+
+    /**
+     * @describe 用户最后一次评测数据
+     * @author Joburgess
+     * @date 2021/8/23 0023
+     * @param userId:
+     * @return java.util.List<com.ym.mec.biz.dal.dto.MusicalNotesPlayStatDto>
+     */
+    Object getLastEvaluationMusicalNotesPlayStats(Long userId, Long recordId);
+
+    /**
+     * @describe 学员训练数据统计
+     * @author Joburgess
+     * @date 2021/8/12 0012
+     * @param queryInfo:
+     * @return java.util.Map<java.lang.String,java.lang.Object>
+     */
+    Map<String, Object> studentTrainData(SysMusicCompareRecordQueryInfo queryInfo);
+
+    /**
+     * @describe 学员训练数据
+     * @author Joburgess
+     * @date 2021/8/12 0012
+     * @param queryInfo:
+     * @return com.ym.mec.common.page.PageInfo<com.ym.mec.biz.dal.entity.SysMusicCompareRecord>
+     */
+    PageInfo<SysMusicCompareRecord> queryStudentTrainData(SysMusicCompareRecordQueryInfo queryInfo);
+
+}

+ 195 - 0
cooleshow-user/user-biz/src/main/java/com/yonge/cooleshow/biz/dal/service/impl/SysMusicCompareRecordServiceImpl.java

@@ -0,0 +1,195 @@
+package com.yonge.cooleshow.biz.dal.service.impl;
+
+import java.math.BigDecimal;
+import java.time.LocalDate;
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import org.apache.commons.lang3.StringUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+import org.springframework.transaction.annotation.Transactional;
+import org.springframework.util.CollectionUtils;
+
+import com.alibaba.fastjson.JSON;
+import com.alibaba.fastjson.JSONObject;
+import com.yonge.cooleshow.auth.api.client.SysUserFeignService;
+import com.yonge.cooleshow.biz.dal.dao.MusicSheetDao;
+import com.yonge.cooleshow.biz.dal.dao.StudentDao;
+import com.yonge.cooleshow.biz.dal.dao.SysMusicCompareRecordDao;
+import com.yonge.cooleshow.biz.dal.dao.TeacherDao;
+import com.yonge.cooleshow.biz.dal.dto.IndexBaseDto;
+import com.yonge.cooleshow.biz.dal.dto.IndexBaseMonthData;
+import com.yonge.cooleshow.biz.dal.dto.StudentTrainChartDto;
+import com.yonge.cooleshow.biz.dal.dto.StudentTrainOverviewDto;
+import com.yonge.cooleshow.biz.dal.entity.MusicSheet;
+import com.yonge.cooleshow.biz.dal.entity.SysMusicCompareRecord;
+import com.yonge.cooleshow.biz.dal.enums.FeatureType;
+import com.yonge.cooleshow.biz.dal.enums.IndexDataType;
+import com.yonge.cooleshow.biz.dal.queryInfo.SysMusicCompareRecordQueryInfo;
+import com.yonge.cooleshow.biz.dal.service.SysMessageService;
+import com.yonge.cooleshow.biz.dal.service.SysMusicCompareRecordService;
+import com.yonge.toolset.base.page.PageInfo;
+import com.yonge.toolset.mybatis.dal.BaseDAO;
+import com.yonge.toolset.mybatis.service.impl.BaseServiceImpl;
+import com.yonge.toolset.utils.collection.MapUtil;
+import com.yonge.toolset.utils.date.DateUtil;
+
+@Service
+public class SysMusicCompareRecordServiceImpl extends BaseServiceImpl<Long, SysMusicCompareRecord>  implements SysMusicCompareRecordService {
+	
+	@Autowired
+	private SysMusicCompareRecordDao sysMusicCompareRecordDao;
+	@Autowired
+	private TeacherDao teacherDao;
+	@Autowired
+	private StudentDao studentDao;
+	@Autowired
+	private SysMessageService sysMessageService;
+	
+    @Autowired
+    private SysUserFeignService sysUserFeignService;
+
+	@Autowired
+	private MusicSheetDao musicSheetDao;
+
+	@Override
+	public BaseDAO<Long, SysMusicCompareRecord> getDAO() {
+		return sysMusicCompareRecordDao;
+	}
+
+	@Override
+	public long insert(SysMusicCompareRecord bean) {
+		long insert = super.insert(bean);
+		return insert;
+	}
+
+	@Override
+	@Transactional
+	public void saveMusicCompareData(SysMusicCompareRecord sysMusicCompareRecord) {
+		
+		sysMusicCompareRecordDao.update(sysMusicCompareRecord);
+	}
+
+	@Override
+	public Object getLastEvaluationMusicalNotesPlayStats(Long userId, Long recordId) {
+		SysMusicCompareRecord userLastEvaluationData;
+		if(Objects.nonNull(recordId)){
+			userLastEvaluationData = sysMusicCompareRecordDao.get(recordId);
+		}else{
+			userLastEvaluationData = sysMusicCompareRecordDao.getUserLastEvaluationData(userId);
+		}
+		if(Objects.isNull(userLastEvaluationData)){
+			return null;
+		}
+		if(StringUtils.isBlank(userLastEvaluationData.getScoreData())){
+			return null;
+		}
+
+		JSONObject jsonObject = JSON.parseObject(userLastEvaluationData.getScoreData());
+		jsonObject.put("recordId", userLastEvaluationData.getId());
+		jsonObject.put("score", userLastEvaluationData.getScore());
+		jsonObject.put("cadence", userLastEvaluationData.getCadence());
+		jsonObject.put("intonation", userLastEvaluationData.getIntonation());
+		jsonObject.put("integrity", userLastEvaluationData.getIntegrity());
+		jsonObject.put("heardLevel", userLastEvaluationData.getHeardLevel());
+		jsonObject.put("videoFilePath", userLastEvaluationData.getVideoFilePath());
+		jsonObject.put("partIndex", userLastEvaluationData.getPartIndex());
+
+		MusicSheet sysMusicScore = musicSheetDao.selectById(userLastEvaluationData.getMusicSheetId());
+		if(Objects.nonNull(sysMusicScore)){
+			jsonObject.put("sysMusicScoreName", sysMusicScore.getMusicSheetName());
+		}
+		return jsonObject;
+	}
+
+	@Override
+	public Map<String, Object> studentTrainData(SysMusicCompareRecordQueryInfo queryInfo) {
+		StudentTrainOverviewDto userTrainOverView = new StudentTrainOverviewDto();
+
+		List<StudentTrainChartDto> userTrainChartData = new ArrayList<>();
+		if(queryInfo.getPage()==1){
+			userTrainOverView = sysMusicCompareRecordDao.getUserTrainOverView(queryInfo.getUserId());
+			userTrainChartData = sysMusicCompareRecordDao.getUserTrainChartData(queryInfo.getUserId(), queryInfo.getStartTime(), queryInfo.getEndTime());
+		}
+
+		if(CollectionUtils.isEmpty(userTrainChartData)||userTrainChartData.size()!=7){
+			Set<LocalDate> trainDates = userTrainChartData.stream().map(StudentTrainChartDto::getTrainDate).collect(Collectors.toSet());
+			LocalDate startLocalDate = LocalDate.parse(queryInfo.getStartTime(), DateUtil.dateFormatter);
+			for (int i = 1; i <= 7; i++) {
+				if(!trainDates.contains(startLocalDate)){
+					userTrainChartData.add(new StudentTrainChartDto(0, 0, startLocalDate));
+				}
+				startLocalDate=startLocalDate.plusDays(1);
+			}
+		}
+
+		PageInfo<SysMusicCompareRecord> pageInfo = new PageInfo<>(queryInfo.getPage(), queryInfo.getRows());
+		Map<String, Object> params = new HashMap<>();
+		queryInfo.setFeatureType(FeatureType.CLOUD_STUDY_EVALUATION);
+		MapUtil.populateMap(params, queryInfo);
+
+		List<SysMusicCompareRecord> dataList = null;
+		int count = this.findCount(params);
+		if (count > 0) {
+			pageInfo.setTotal(count);
+			params.put("offset", pageInfo.getOffset());
+			dataList = this.getDAO().queryPage(params);
+		}
+		if (count == 0) {
+			dataList = new ArrayList<>();
+		}
+		pageInfo.setRows(dataList);
+		Map<String, Object> result = new HashMap<>();
+		userTrainOverView.setTrainTime(userTrainOverView.getTrainTime()/60);
+		result.put("userTrainOverView",userTrainOverView);
+
+		List<IndexBaseDto> userTrainChartResult = new ArrayList<>();
+		List<IndexBaseMonthData> trainNumData = new ArrayList<>();
+		List<IndexBaseMonthData> trainTimeData = new ArrayList<>();
+
+		for (StudentTrainChartDto data : userTrainChartData) {
+			Date trainDate = Date.from(data.getTrainDate().atStartOfDay(DateUtil.zoneId).toInstant());
+			trainTimeData.add(new IndexBaseMonthData(trainDate, null, new BigDecimal(data.getTrainTime()/60)));
+			trainNumData.add(new IndexBaseMonthData(trainDate, null, new BigDecimal(data.getTrainNum())));
+		}
+
+		trainNumData.sort(Comparator.comparing(IndexBaseMonthData::getMonth));
+		trainTimeData.sort(Comparator.comparing(IndexBaseMonthData::getMonth));
+
+		userTrainChartResult.add(new IndexBaseDto(IndexDataType.CLOUD_STUDY_TRAIN_TIME, trainTimeData));
+		userTrainChartResult.add(new IndexBaseDto(IndexDataType.CLOUD_STUDY_TRAIN_NUM, trainNumData));
+
+		result.put("userTrainChartData", userTrainChartResult);
+		result.put("detail",pageInfo);
+		return result;
+	}
+
+	@Override
+	public PageInfo<SysMusicCompareRecord> queryStudentTrainData(SysMusicCompareRecordQueryInfo queryInfo) {
+		PageInfo<SysMusicCompareRecord> pageInfo = new PageInfo<>(queryInfo.getPage(), queryInfo.getRows());
+		Map<String, Object> params = new HashMap<>();
+		MapUtil.populateMap(params, queryInfo);
+
+		List<SysMusicCompareRecord> dataList = null;
+		int count = this.findCount(params);
+		if (count > 0) {
+			pageInfo.setTotal(count);
+			params.put("offset", pageInfo.getOffset());
+			dataList = this.getDAO().queryPage(params);
+		}
+		if (count == 0) {
+			dataList = new ArrayList<>();
+		}
+		pageInfo.setRows(dataList);
+		return pageInfo;
+	}
+
+}

+ 429 - 0
cooleshow-user/user-biz/src/main/resources/config/mybatis/SysMusicCompareRecordMapper.xml

@@ -0,0 +1,429 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
+<!--
+这个文件是自动生成的。
+不要修改此文件。所有改动将在下次重新自动生成时丢失。
+-->
+<mapper namespace="com.yonge.cooleshow.biz.dal.dao.SysMusicCompareRecordDao">
+
+	<resultMap type="com.yonge.cooleshow.biz.dal.entity.SysMusicCompareRecord" id="SysMusicCompareRecord">
+		<result column="id_" property="id" />
+		<result column="user_id_" property="userId" />
+		<result column="music_sheet_id_" property="musicSheetId" />
+		<result column="heard_level_" property="heardLevel" typeHandler="com.yonge.toolset.mybatis.dal.CustomEnumTypeHandler" />
+		<result column="sys_music_score_name_" property="sysMusicScoreName" />
+		<result column="behavior_id_" property="behaviorId"/>
+		<result column="score_data_" property="scoreData" />
+		<result column="notes_data_index_" property="notesDataIndex"/>
+		<result column="score_" property="score" />
+		<result column="intonation_" property="intonation" />
+		<result column="cadence_" property="cadence" />
+		<result column="integrity_" property="integrity" />
+		<result column="record_file_path_" property="recordFilePath" />
+		<result column="video_file_path_" property="videoFilePath" />
+		<result column="device_type_" property="deviceType" typeHandler="com.yonge.toolset.mybatis.dal.CustomEnumTypeHandler" />
+		<result column="client_id_" property="clientId" />
+		<result column="source_time_" property="sourceTime"/>
+		<result column="play_time_" property="playTime"/>
+		<result column="speed_" property="speed"/>
+		<result column="feature_" property="feature" typeHandler="com.yonge.toolset.mybatis.dal.CustomEnumTypeHandler" />
+		<result column="monday_" property="monday"/>
+		<result column="create_time_" property="createTime" />
+		<result column="part_index_" property="partIndex" />
+	</resultMap>
+
+	<!-- 根据主键查询一条记录 -->
+	<select id="get" resultMap="SysMusicCompareRecord" >
+		SELECT * FROM sys_music_compare_record WHERE id_ = #{id}
+	</select>
+
+	<!-- 全查询 -->
+	<select id="findAll" resultMap="SysMusicCompareRecord">
+		SELECT * FROM sys_music_compare_record where tenant_id_ = #{tenantId} ORDER BY id_
+	</select>
+
+	<!-- 向数据库增加一条记录 -->
+	<insert id="insert" parameterType="com.yonge.cooleshow.biz.dal.entity.SysMusicCompareRecord" useGeneratedKeys="true" keyColumn="id" keyProperty="id">
+		INSERT INTO sys_music_compare_record (id_,user_id_,music_sheet_id_,heard_level_,behavior_id_,score_data_,score_,intonation_,cadence_,integrity_,
+		                                      record_file_path_,video_file_path_,device_type_,client_id_,play_time_,speed_,monday_,
+												source_time_,feature_,create_time_,update_time_,part_index_)
+		VALUES(#{id},#{userId},#{musicSheetId},#{heardLevel,typeHandler=com.yonge.toolset.mybatis.dal.CustomEnumTypeHandler},#{behaviorId},#{scoreData},
+		       #{score},#{intonation},#{cadence},#{integrity},
+		       #{recordFilePath},#{videoFilePath},#{deviceType,typeHandler=com.yonge.toolset.mybatis.dal.CustomEnumTypeHandler},#{clientId},#{playTime},#{speed},#{monday},
+		       #{sourceTime},#{feature,typeHandler=com.yonge.toolset.mybatis.dal.CustomEnumTypeHandler}, NOW(), NOW(),#{partIndex})
+	</insert>
+
+	<!-- 根据主键查询一条记录 -->
+	<update id="update" parameterType="com.yonge.cooleshow.biz.dal.entity.SysMusicCompareRecord">
+		UPDATE sys_music_compare_record <set>
+		<if test="userId != null">
+			user_id_ = #{userId},
+		</if>
+		<if test="musicSheetId != null">
+			music_sheet_id_ = #{musicSheetId},
+		</if>
+		<if test="behaviorId!=null">
+			behavior_id_ = #{behaviorId},
+		</if>
+		<if test="scoreData != null">
+			score_data_ = #{scoreData},
+		</if>
+		<if test="score != null">
+			score_ = #{score},
+		</if>
+		<if test="intonation!=null">
+			intonation_ = #{intonation},
+		</if>
+		<if test="integrity != null">
+			integrity_ = #{integrity},
+		</if>
+		<if test="cadence != null">
+			cadence_ = #{cadence},
+		</if>
+		<if test="recordFilePath != null">
+			record_file_path_ = #{recordFilePath},
+		</if>
+		<if test="videoFilePath != null">
+			video_file_path_ = #{videoFilePath},
+		</if>
+		<if test="clientId != null">
+			client_id_ = #{clientId},
+		</if>
+		<if test="deviceType!=null">
+			device_type_ = #{deviceType, typeHandler=com.yonge.toolset.mybatis.dal.CustomEnumTypeHandler},
+		</if>
+		<if test="sourceTime != null">
+			source_time_ = #{sourceTime},
+		</if>
+		<if test="playTime != null">
+			play_time_ = #{playTime},
+		</if>
+		<if test="speed != null">
+			speed_ = #{speed},
+		</if>
+		<if test="monday != null">
+			monday_ = #{monday},
+		</if>
+		<if test="partIndex != null">
+			part_index_ = #{partIndex},
+		</if>
+		<if test="feature!=null">
+			feature_ = #{feature,typeHandler=com.yonge.toolset.mybatis.dal.CustomEnumTypeHandler},
+		</if>
+		<if test="heardLevel!=null">
+			heard_level_ = #{heardLevel,typeHandler=com.yonge.toolset.mybatis.dal.CustomEnumTypeHandler},
+		</if>
+		update_time_ = NOW()
+	</set> WHERE id_ = #{id} 
+	</update>
+
+	<!-- 根据主键删除一条记录 -->
+	<delete id="delete" >
+		DELETE FROM sys_music_compare_record WHERE id_ = #{id}
+	</delete>
+
+	<sql id="queryCondition">
+		<where>
+			<if test="userId!=null">
+				AND smcr.user_id_=#{userId}
+			</if>
+			<if test="featureType != null">
+				AND smcr.feature_ = #{featureType,typeHandler=com.yonge.toolset.mybatis.dal.CustomEnumTypeHandler}
+			</if>
+			<if test="startTime != null and startTime != ''">
+				AND DATE_FORMAT(smcr.create_time_, '%Y-%m-%d') >= #{startTime}
+			</if>
+			<if test="endTime != null and endTime != ''">
+				AND DATE_FORMAT(smcr.create_time_, '%Y-%m-%d') &lt;= #{endTime}
+			</if>
+		</where>
+	</sql>
+
+	<!-- 分页查询 -->
+	<select id="queryPage" resultMap="SysMusicCompareRecord" parameterType="map">
+		SELECT
+			smcr.id_, smcr.user_id_, smcr.music_sheet_id_, smcr.heard_level_,
+			LOCATE('musicalNotesPlayStats', smcr.score_data_) notes_data_index_,
+		       smcr.score_, smcr.intonation_, smcr.cadence_,
+		       smcr.integrity_, smcr.record_file_path_, smcr.video_file_path_, smcr.client_id_, smcr.device_type_, smcr.play_time_,
+		       smcr.monday_, smcr.create_time_,
+			sms.name_ sys_music_score_name_
+		FROM sys_music_compare_record smcr
+		LEFT JOIN sys_music_score sms on smcr.music_sheet_id_ = sms.id_
+		<include refid="queryCondition"/>
+		ORDER BY id_ DESC
+		<include refid="global.limit"/>
+	</select>
+
+	<!-- 查询当前表的总记录数 -->
+	<select id="queryCount" resultType="int">
+		SELECT COUNT(*) FROM sys_music_compare_record smcr
+		<include refid="queryCondition"/>
+	</select>
+
+    <select id="getUserTrainStat" resultType="com.yonge.cooleshow.biz.dal.dto.MusicCompareRankingDto">
+		SELECT
+			smcr.user_id_ userId,
+			su.avatar_ avatar,
+		    su.username_ studentName,
+		    sms.name_ musicScoreName,
+			COUNT(smcr.id_) trainNum,
+		    MAX(smcr.score_) score,
+			COUNT(DISTINCT DATE(smcr.create_time_)) trainDays,
+			SUM(smcr.play_time_) trainTime
+		FROM sys_music_compare_record smcr
+			LEFT JOIN sys_user su ON smcr.user_id_=su.id_
+			LEFT JOIN sys_music_score sms ON smcr.music_sheet_id_ = sms.id_
+		WHERE EXISTS (SELECT user_id_ FROM student WHERE user_id_=smcr.user_id_)
+		AND DATE_FORMAT(smcr.create_time_, '%Y-%m-%d') BETWEEN #{startTime} AND #{endTime}
+		<if test="heardLevel!=null">
+			AND smcr.heard_level_ = #{heardLevel, typeHandler=com.yonge.toolset.mybatis.dal.CustomEnumTypeHandler}
+		</if>
+		GROUP BY smcr.user_id_
+	</select>
+
+	<select id="getUserTrainOverView" resultType="com.yonge.cooleshow.biz.dal.dto.StudentTrainOverviewDto">
+		SELECT
+			COUNT(DISTINCT smcr.behavior_id_) trainNum,
+			COUNT(DISTINCT DATE(smcr.create_time_)) trainDays,
+			SUM(smcr.play_time_) trainTime
+		FROM sys_music_compare_record smcr
+		WHERE smcr.user_id_=#{userId}
+	</select>
+
+	<select id="getUserTrainChartData" resultType="com.yonge.cooleshow.biz.dal.dto.StudentTrainChartDto">
+		SELECT
+			DATE_FORMAT(smcr.create_time_, '%Y-%m-%d') trainDate,
+			COUNT(DISTINCT smcr.behavior_id_) trainNum,
+			SUM(smcr.play_time_) trainTime
+		FROM sys_music_compare_record smcr
+		WHERE smcr.user_id_=#{userId}
+		  AND DATE_FORMAT(smcr.create_time_, '%Y-%m-%d') BETWEEN #{startTime} AND #{endTime}
+		GROUP BY DATE_FORMAT(smcr.create_time_, '%Y-%m-%d')
+		ORDER BY trainDate;
+	</select>
+
+	<sql id="queryMusicGroupStudentTrainDataConditon">
+		<where>
+			sr.music_group_status_='NORMAL' AND stu.user_id_ IS NOT NULL
+			<if test="startTime!=null and endTime!=null">
+				AND DATE_FORMAT( smcr.create_time_, '%Y-%m-%d' ) BETWEEN #{startTime} AND #{endTime}
+			</if>
+			<if test="musicGroupId!=null">
+				AND sr.music_group_id_ = #{musicGroupId}
+			</if>
+			<if test="search!=null">
+				AND (su.username_ LIKE CONCAT('%', #{search}, '%'))
+			</if>
+		</where>
+	</sql>
+
+	<select id="queryMusicGroupStudentTrainData" resultType="com.yonge.cooleshow.biz.dal.dto.MusicCompareRankingDto">
+		SELECT
+			smcr.user_id_ userId,
+			su.avatar_ avatar,
+			su.username_ studentName,
+			COUNT( smcr.id_ ) trainNum,
+			COUNT( DISTINCT DAY ( smcr.create_time_ )) trainDays,
+			SUM( smcr.play_time_ ) trainTime,
+			CASE WHEN stu.member_rank_setting_id_ IS NULL THEN 0 ELSE 1 END vipUser
+		FROM
+			sys_music_compare_record smcr
+				LEFT JOIN sys_user su ON smcr.user_id_ = su.id_
+				LEFT JOIN student stu ON stu.user_id_ = smcr.user_id_
+				LEFT JOIN student_registration sr ON sr.user_id_=smcr.user_id_
+		<include refid="queryMusicGroupStudentTrainDataConditon"></include>
+		GROUP BY smcr.user_id_
+		<if test="sortField!=null and sortField!=''">
+			<if test="sortField == 'TRAINDAYS'">
+				ORDER BY trainDays
+			</if>
+			<if test="sortField == 'TRAINTIME'">
+				ORDER BY trainTime
+			</if>
+			<if test="sortField == 'TRAINNUM'">
+				ORDER BY trainNum
+			</if>
+		</if>
+		<include refid="global.limit"></include>
+	</select>
+
+	<select id="countMusicGroupStudentTrainData" resultType="int">
+		SELECT
+			COUNT(DISTINCT smcr.user_id_)
+		FROM
+			sys_music_compare_record smcr
+		LEFT JOIN sys_user su ON smcr.user_id_ = su.id_
+		LEFT JOIN student stu ON stu.user_id_ = smcr.user_id_
+		LEFT JOIN student_registration sr ON sr.user_id_=smcr.user_id_
+		<include refid="queryMusicGroupStudentTrainDataConditon"/>
+	</select>
+
+	<select id="queryStudentTrainData" resultType="com.yonge.cooleshow.biz.dal.dto.EduOrganStudentListDto">
+		SELECT
+			user_id_ studentId,
+			COUNT(DISTINCT behavior_id_) cloudStudyUseNum,
+			SUM(play_time_) cloudStudyUseTime
+		FROM
+			sys_music_compare_record
+		GROUP BY
+			user_id_
+	</select>
+	<resultMap id="CountStudentTrainDataDto" type="com.yonge.cooleshow.biz.dal.dto.CountStudentTrainDataDto">
+		<result property="trainNum" column="train_num_"/>
+		<result property="recordNum" column="record_num_"/>
+		<result property="avatar" column="avatar_"/>
+		<result property="userId" column="user_id_"/>
+		<result property="username" column="username_"/>
+		<result property="phone" column="phone_"/>
+		<result property="totalPlayTime" column="total_play_time_"/>
+		<result property="trainDay" column="train_day_"/>
+	</resultMap>
+	<select id="queryStudentTrain" resultMap="CountStudentTrainDataDto">
+		SELECT s.user_id_,su.avatar_,su.phone_,su.username_,ROUND(SUM(mcr.play_time_) / 60) total_play_time_,
+			   COUNT(DISTINCT DATE_FORMAT(mcr.create_time_,"%Y-%m-%d")) train_day_,
+			   COUNT(DISTINCT mcr.behavior_id_) train_num_,
+		COUNT(CASE WHEN mcr.feature_ = 'CLOUD_STUDY_EVALUATION' THEN mcr.behavior_id_ ELSE NULL END) record_num_
+		FROM teacher t
+				 LEFT JOIN student s ON t.id_ = s.teacher_id_
+				 LEFT JOIN sys_user su ON su.id_ = s.user_id_
+				 LEFT JOIN sys_music_compare_record mcr ON mcr.user_id_ = s.user_id_
+		<if test="startTime != null and startTime != ''">
+			AND (DATE_FORMAT(mcr.create_time_, '%Y-%m-%d') >= #{startTime} OR mcr.id_ IS NULL)
+		</if>
+		<if test="endTime != null and endTime != ''">
+			AND (DATE_FORMAT(mcr.create_time_, '%Y-%m-%d') &lt;= #{endTime} OR mcr.id_ IS NULL)
+		</if>
+		WHERE s.teacher_id_ = #{teacherId}
+		<if test="search != null and search != ''">
+			AND (su.phone_ LIKE CONCAT('%',#{search},'%') OR su.username_ LIKE CONCAT('%',#{search},'%'))
+		</if>
+		<if test="visitStudents != null and visitStudents.size > 0">
+			AND s.user_id_ NOT IN
+			<foreach collection="visitStudents" open="(" close=")" item="userId" separator=",">
+				#{userId}
+			</foreach>
+		</if>
+		<if test="hasCourseUserIds != null and hasCourseUserIds.size > 0">
+			AND s.user_id_ IN
+			<foreach collection="hasCourseUserIds" open="(" close=")" item="userId" separator=",">
+				#{userId}
+			</foreach>
+		</if>
+		GROUP BY s.user_id_
+		<if test="having == 1">
+			HAVING total_play_time_ &lt; ${totalTime} OR train_num_ &lt; ${trainNum}
+		</if>
+		ORDER BY
+		<if test="sort != null and sort != ''">
+			${sort}
+			<if test="order != null and order != ''">
+				${order}
+			</if>
+		</if>
+		<if test="sort == null or sort == ''">
+			total_play_time_ DESC
+		</if>
+		,s.user_id_
+		<include refid="global.limit"/>
+	</select>
+
+	<select id="getOrganNewCloudStudyNum" resultType="int">
+		SELECT
+			COUNT( DISTINCT user_id_ )
+		FROM
+			(
+				SELECT
+					smcr.user_id_
+				FROM
+					sys_music_compare_record smcr
+						LEFT JOIN sys_user su ON smcr.user_id_ = su.id_
+				WHERE
+					su.del_flag_ = 0
+					AND su.organ_id_ = #{organId}
+				GROUP BY
+					user_id_
+				HAVING
+						COUNT(
+								DISTINCT DATE( smcr.create_time_ ))= 1
+				   AND MAX(
+							   DATE( smcr.create_time_ ))= CURDATE()) t
+	</select>
+
+	<select id="getOrgansTotalNewCloudStudyNum" resultType="int">
+		SELECT
+			COUNT( DISTINCT user_id_ )
+		FROM
+			(
+				SELECT
+					smcr.user_id_
+				FROM
+					sys_music_compare_record smcr
+						LEFT JOIN sys_user su ON smcr.user_id_ = su.id_
+				WHERE
+					su.del_flag_ = 0
+					<if test="organIds!=null and organIds.size()>0">
+						AND su.organ_id_ IN
+						<foreach collection="organIds" item="organId" open="(" close=")" separator=",">
+							#{organId}
+						</foreach>
+					</if>
+				GROUP BY
+					user_id_
+				HAVING
+						COUNT(DISTINCT DATE( smcr.create_time_ ))= 1
+				   AND MAX(DATE( smcr.create_time_ ))= CURDATE()) t
+	</select>
+
+	<select id="getOrgansNewCloudStudyNum" resultType="map">
+		SELECT
+			organ_id_ 'key',
+			COUNT( DISTINCT user_id_ ) 'value'
+		FROM
+			(
+				SELECT
+					su.organ_id_,
+					smcr.user_id_
+				FROM
+					sys_music_compare_record smcr
+						LEFT JOIN sys_user su ON smcr.user_id_ = su.id_
+				WHERE
+					su.del_flag_ = 0
+					<if test="organIds!=null and organIds.size()>0">
+						AND su.organ_id_ IN
+						<foreach collection="organIds" item="organId" open="(" close=")" separator=",">
+							#{organId}
+						</foreach>
+					</if>
+				GROUP BY
+					user_id_
+				HAVING
+						COUNT(
+								DISTINCT DATE( smcr.create_time_ ))= 1
+				   AND MAX(
+							   DATE( smcr.create_time_ ))= CURDATE()) t
+		GROUP BY t.organ_id_
+	</select>
+
+    <select id="getUserLastEvaluationData" resultMap="SysMusicCompareRecord">
+		SELECT * FROM sys_music_compare_record WHERE user_id_=#{userId} AND feature_ = 'CLOUD_STUDY_EVALUATION' ORDER BY create_time_ DESC LIMIT 1
+	</select>
+
+	<select id="getUserWeekCompareData" resultType="com.yonge.cooleshow.biz.dal.entity.SysMusicCompareWeekData">
+		SELECT
+			COUNT(smcr.id_) trainNum,
+			COUNT(DISTINCT DATE(smcr.create_time_)) trainDays,
+			SUM(smcr.play_time_) trainTime,
+			MAX(IF(heard_level_='BEGINNER' AND source_time_ &gt;= play_time_, smcr.score_, 0)) beginnerMaxScore,
+			MAX(IF(heard_level_='ADVANCED' AND source_time_ &gt;= play_time_, smcr.score_, 0)) advancedMaxScore,
+			MAX(IF(heard_level_='PERFORMER' AND source_time_ &gt;= play_time_, smcr.score_, 0)) performerMaxScore,smcr.tenant_id_ tenantId
+		FROM sys_music_compare_record smcr
+		WHERE user_id_=#{userId} AND monday_ = #{monday}
+	</select>
+
+	<select id="getUserWeekMaxMusicScoreId" resultType="java.lang.Integer">
+		SELECT music_sheet_id_ FROM sys_music_compare_record
+		WHERE user_id_=#{userId} AND monday_ = #{monday} AND heard_level_ = #{heardLevel, typeHandler=com.yonge.toolset.mybatis.dal.CustomEnumTypeHandler}
+		ORDER BY score_ DESC LIMIT 1
+	</select>
+</mapper>

+ 73 - 0
cooleshow-user/user-student/src/main/java/com/yonge/cooleshow/student/controller/SysMusicCompareRecordController.java

@@ -0,0 +1,73 @@
+package com.yonge.cooleshow.student.controller;
+
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiOperation;
+
+import java.util.Objects;
+
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PostMapping;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RestController;
+
+import com.yonge.cooleshow.auth.api.client.SysUserFeignService;
+import com.yonge.cooleshow.auth.api.entity.SysUser;
+import com.yonge.cooleshow.biz.dal.entity.SysMusicCompareRecord;
+import com.yonge.cooleshow.biz.dal.queryInfo.SysMusicCompareRecordQueryInfo;
+import com.yonge.cooleshow.biz.dal.service.SysMusicCompareRecordService;
+import com.yonge.cooleshow.common.controller.BaseController;
+import com.yonge.cooleshow.common.entity.HttpResponseResult;
+import com.yonge.toolset.base.exception.BizException;
+
+/**
+ * @Author Joburgess
+ * @Date 2021/8/13 0013
+ */
+@Api(tags = "云教练记录")
+@RequestMapping("sysMusicRecord")
+@RestController
+public class SysMusicCompareRecordController extends BaseController {
+
+    @Autowired
+    private SysUserFeignService sysUserFeignService;
+    @Autowired
+    private SysMusicCompareRecordService sysMusicCompareRecordService;
+
+    @ApiOperation(value = "添加记录")
+    @PostMapping("add")
+    public HttpResponseResult add(SysMusicCompareRecord record){
+        SysUser sysUser = sysUserFeignService.queryUserInfo();
+        if(sysUser == null){
+            throw new BizException("请登录");
+        }
+        if(Objects.isNull(record.getFeature())){
+            return failed("请设置功能点");
+        }
+        record.setUserId(sysUser.getId());
+        record.setClientId("student");
+        return succeed(sysMusicCompareRecordService.insert(record));
+    }
+
+    @ApiOperation(value = "用户最后一次评测数据")
+    @GetMapping("getLastEvaluationMusicalNotesPlayStats")
+    public HttpResponseResult getLastEvaluationMusicalNotesPlayStats(Long recordId){
+        SysUser sysUser = sysUserFeignService.queryUserInfo();
+        if(sysUser == null){
+            throw new BizException("请登录");
+        }
+        return succeed(sysMusicCompareRecordService.getLastEvaluationMusicalNotesPlayStats(sysUser.getId(), recordId));
+    }
+
+    @ApiOperation("学员训练数据统计")
+    @GetMapping("studentTrainData")
+    public HttpResponseResult studentTrainData(SysMusicCompareRecordQueryInfo queryInfo){
+        SysUser sysUser = sysUserFeignService.queryUserInfo();
+        if (sysUser == null) {
+            return failed("获取用户信息失败");
+        }
+        queryInfo.setUserId(sysUser.getId());
+        return succeed(sysMusicCompareRecordService.studentTrainData(queryInfo));
+    }
+
+}