Browse Source

Merge branch 'online1' of http://git.dayaedu.com/yonge/mec into saas

# Conflicts:
#	mec-biz/src/main/java/com/ym/mec/biz/dal/dao/SysMusicCompareWeekDataDao.java
#	mec-biz/src/main/java/com/ym/mec/biz/dal/entity/SysMusicScore.java
#	mec-biz/src/main/java/com/ym/mec/biz/service/impl/DegreeRegistrationServiceImpl.java
#	mec-biz/src/main/java/com/ym/mec/biz/service/impl/StudentServeServiceImpl.java
#	mec-biz/src/main/java/com/ym/mec/biz/service/impl/SysMusicCompareRecordServiceImpl.java
#	mec-biz/src/main/resources/config/mybatis/SysMusicCompareWeekDataMapper.xml
#	mec-biz/src/main/resources/config/mybatis/SysMusicScoreMapper.xml
#	pom.xml
yonge 3 years ago
parent
commit
426c4389b1
70 changed files with 7722 additions and 191 deletions
  1. 81 0
      audio-analysis/pom.xml
  2. 34 0
      audio-analysis/src/main/java/com/yonge/audio/AudioAnalysisServerApplication.java
  3. 1084 0
      audio-analysis/src/main/java/com/yonge/audio/analysis/AudioFloatConverter.java
  4. 158 0
      audio-analysis/src/main/java/com/yonge/audio/analysis/Complex.java
  5. 167 0
      audio-analysis/src/main/java/com/yonge/audio/analysis/FFT.java
  6. 141 0
      audio-analysis/src/main/java/com/yonge/audio/analysis/Signals.java
  7. 52 0
      audio-analysis/src/main/java/com/yonge/audio/analysis/detector/FrequencyDetector.java
  8. 223 0
      audio-analysis/src/main/java/com/yonge/audio/analysis/detector/YINPitchDetector.java
  9. 44 0
      audio-analysis/src/main/java/com/yonge/audio/config/ResourceServerConfig.java
  10. 36 0
      audio-analysis/src/main/java/com/yonge/audio/config/WebMvcConfig.java
  11. 98 0
      audio-analysis/src/main/java/com/yonge/audio/utils/ArrayUtil.java
  12. 42 0
      audio-analysis/src/main/java/com/yonge/netty/common/message/Message.java
  13. 34 0
      audio-analysis/src/main/java/com/yonge/netty/common/message/MessageDispatcher.java
  14. 20 0
      audio-analysis/src/main/java/com/yonge/netty/common/message/MessageHandler.java
  15. 89 0
      audio-analysis/src/main/java/com/yonge/netty/common/message/MessageHandlerContainer.java
  16. 100 0
      audio-analysis/src/main/java/com/yonge/netty/dto/ChunkAnalysis.java
  17. 134 0
      audio-analysis/src/main/java/com/yonge/netty/dto/HardLevelEnum.java
  18. 180 0
      audio-analysis/src/main/java/com/yonge/netty/dto/NoteAnalysis.java
  19. 78 0
      audio-analysis/src/main/java/com/yonge/netty/dto/NoteFrequencyRange.java
  20. 28 0
      audio-analysis/src/main/java/com/yonge/netty/dto/NotePlayResult.java
  21. 78 0
      audio-analysis/src/main/java/com/yonge/netty/dto/SectionAnalysis.java
  22. 940 0
      audio-analysis/src/main/java/com/yonge/netty/dto/UserChannelContext.java
  23. 842 0
      audio-analysis/src/main/java/com/yonge/netty/dto/UserChannelContext2.java
  24. 841 0
      audio-analysis/src/main/java/com/yonge/netty/dto/UserChannelContext3.java
  25. 67 0
      audio-analysis/src/main/java/com/yonge/netty/dto/WebSocketResponse.java
  26. 161 0
      audio-analysis/src/main/java/com/yonge/netty/entity/MusicXmlBasicInfo.java
  27. 106 0
      audio-analysis/src/main/java/com/yonge/netty/entity/MusicXmlNote.java
  28. 50 0
      audio-analysis/src/main/java/com/yonge/netty/entity/MusicXmlSection.java
  29. 155 0
      audio-analysis/src/main/java/com/yonge/netty/server/NettyServer.java
  30. 9 0
      audio-analysis/src/main/java/com/yonge/netty/server/handler/ChannelContextConstants.java
  31. 135 0
      audio-analysis/src/main/java/com/yonge/netty/server/handler/NettyChannelManager.java
  32. 77 0
      audio-analysis/src/main/java/com/yonge/netty/server/handler/NettyServerHandler.java
  33. 78 0
      audio-analysis/src/main/java/com/yonge/netty/server/handler/message/BinaryWebSocketFrameHandler.java
  34. 12 0
      audio-analysis/src/main/java/com/yonge/netty/server/handler/message/MessageHandler.java
  35. 75 0
      audio-analysis/src/main/java/com/yonge/netty/server/handler/message/TextWebSocketHandler.java
  36. 112 0
      audio-analysis/src/main/java/com/yonge/netty/server/processor/WaveformWriter.java
  37. 357 0
      audio-analysis/src/main/java/com/yonge/netty/server/service/AudioCompareHandler.java
  38. 90 0
      audio-analysis/src/main/java/com/yonge/netty/server/service/PitchDetectionHandler.java
  39. 32 0
      audio-analysis/src/main/java/com/yonge/netty/server/service/UserChannelContextService.java
  40. 128 0
      audio-analysis/src/main/resources/application-template.yml
  41. 16 0
      audio-analysis/src/main/resources/bootstrap-dev.properties
  42. 16 0
      audio-analysis/src/main/resources/bootstrap-prod.properties
  43. 16 0
      audio-analysis/src/main/resources/bootstrap-test.properties
  44. 55 0
      audio-analysis/src/main/resources/logback-spring.xml
  45. 2 1
      mec-biz/src/main/java/com/ym/mec/biz/dal/dao/SysMusicCompareWeekDataDao.java
  46. 54 4
      mec-biz/src/main/java/com/ym/mec/biz/dal/entity/SysMusicScore.java
  47. 31 0
      mec-biz/src/main/java/com/ym/mec/biz/dal/entity/SysMusicScoreAccompaniment.java
  48. 10 0
      mec-biz/src/main/java/com/ym/mec/biz/dal/page/SysMusicCompareRecordQueryInfo.java
  49. 7 0
      mec-biz/src/main/java/com/ym/mec/biz/service/SysMusicCompareRecordService.java
  50. 45 16
      mec-biz/src/main/java/com/ym/mec/biz/service/impl/DegreeRegistrationServiceImpl.java
  51. 3 3
      mec-biz/src/main/java/com/ym/mec/biz/service/impl/PayServiceImpl.java
  52. 2 2
      mec-biz/src/main/java/com/ym/mec/biz/service/impl/StudentServeServiceImpl.java
  53. 14 1
      mec-biz/src/main/java/com/ym/mec/biz/service/impl/SysMusicCompareRecordServiceImpl.java
  54. 152 141
      mec-biz/src/main/java/com/ym/mec/biz/service/impl/VipGroupServiceImpl.java
  55. 1 1
      mec-biz/src/main/resources/config/mybatis/CourseScheduleMapper.xml
  56. 8 8
      mec-biz/src/main/resources/config/mybatis/FinancialExpenditureMapper.xml
  57. 1 1
      mec-biz/src/main/resources/config/mybatis/StudentManageDao.xml
  58. 2 1
      mec-biz/src/main/resources/config/mybatis/StudentRegistrationMapper.xml
  59. 4 2
      mec-biz/src/main/resources/config/mybatis/SysMusicCompareWeekDataMapper.xml
  60. 7 2
      mec-biz/src/main/resources/config/mybatis/SysMusicScoreAccompanimentMapper.xml
  61. 14 2
      mec-biz/src/main/resources/config/mybatis/SysMusicScoreMapper.xml
  62. 3 0
      mec-common/common-core/src/main/java/com/ym/mec/common/dal/CustomEnumTypeHandler.java
  63. 0 1
      mec-im/pom.xml
  64. 1 0
      mec-student/src/main/java/com/ym/mec/student/controller/CloudStudyController.java
  65. 2 2
      mec-thirdparty/src/main/java/com/ym/mec/thirdparty/eseal/provider/TsignPlugin.java
  66. 9 0
      mec-thirdparty/src/main/java/com/ym/mec/thirdparty/storage/StoragePlugin.java
  67. 5 0
      mec-thirdparty/src/main/java/com/ym/mec/thirdparty/storage/StoragePluginContext.java
  68. 28 0
      mec-thirdparty/src/main/java/com/ym/mec/thirdparty/storage/provider/AliyunOssStoragePlugin.java
  69. 31 0
      mec-thirdparty/src/main/java/com/ym/mec/thirdparty/storage/provider/KS3StoragePlugin.java
  70. 15 3
      pom.xml

+ 81 - 0
audio-analysis/pom.xml

@@ -0,0 +1,81 @@
+<?xml version="1.0"?>
+<project
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+	xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+	<modelVersion>4.0.0</modelVersion>
+	<parent>
+		<groupId>com.ym</groupId>
+		<artifactId>mec</artifactId>
+		<version>1.0</version>
+	</parent>
+	<groupId>com.yonge.audio</groupId>
+	<artifactId>audio-analysis</artifactId>
+	<name>audio-analysis</name>
+	<url>http://maven.apache.org</url>
+	<properties>
+		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+	</properties>
+
+	<dependencies>
+		<dependency>
+			<groupId>org.springframework.cloud</groupId>
+			<artifactId>spring-cloud-starter-netflix-eureka-client</artifactId>
+		</dependency>
+
+		<dependency>
+			<groupId>org.springframework.cloud</groupId>
+			<artifactId>spring-cloud-starter-alibaba-nacos-config</artifactId>
+		</dependency>
+
+		<dependency>
+			<groupId>de.codecentric</groupId>
+			<artifactId>spring-boot-admin-starter-client</artifactId>
+		</dependency>
+
+		<!-- swagger-spring-boot -->
+		<dependency>
+			<groupId>com.spring4all</groupId>
+			<artifactId>swagger-spring-boot-starter</artifactId>
+		</dependency>
+
+		<dependency>
+			<groupId>com.github.xiaoymin</groupId>
+			<artifactId>swagger-bootstrap-ui</artifactId>
+		</dependency>
+
+		<dependency>
+			<groupId>com.alibaba</groupId>
+			<artifactId>druid-spring-boot-starter</artifactId>
+		</dependency>
+
+		<dependency>
+			<groupId>mysql</groupId>
+			<artifactId>mysql-connector-java</artifactId>
+		</dependency>
+
+		<dependency>
+			<groupId>com.ym</groupId>
+			<artifactId>mec-auth-api</artifactId>
+		</dependency>
+
+		<dependency>
+			<groupId>com.ym</groupId>
+			<artifactId>mec-biz</artifactId>
+		</dependency>
+
+		<dependency>
+			<groupId>io.netty</groupId>
+			<artifactId>netty-all</artifactId>
+			<version>4.1.68.Final</version>
+		</dependency>
+	</dependencies>
+	
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.springframework.boot</groupId>
+				<artifactId>spring-boot-maven-plugin</artifactId>
+			</plugin>
+		</plugins>
+	</build>
+</project>

+ 34 - 0
audio-analysis/src/main/java/com/yonge/audio/AudioAnalysisServerApplication.java

@@ -0,0 +1,34 @@
+package com.yonge.audio;
+
+import org.mybatis.spring.annotation.MapperScan;
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
+import org.springframework.cloud.client.loadbalancer.LoadBalanced;
+import org.springframework.cloud.openfeign.EnableFeignClients;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.ComponentScan;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.web.client.RestTemplate;
+
+import com.spring4all.swagger.EnableSwagger2Doc;
+
+@SpringBootApplication
+@EnableDiscoveryClient
+@EnableFeignClients("com.ym.mec")
+@MapperScan("com.ym.mec.biz.dal.dao")
+@ComponentScan(basePackages = { "com.yonge.netty", "com.ym.mec", "com.yonge.log" })
+@Configuration
+@EnableSwagger2Doc
+public class AudioAnalysisServerApplication {
+
+	public static void main(String[] args) {
+		SpringApplication.run(AudioAnalysisServerApplication.class, args);
+	}
+
+	@Bean
+	@LoadBalanced
+	public RestTemplate restTemplate() {
+		return new RestTemplate();
+	}
+}

+ 1084 - 0
audio-analysis/src/main/java/com/yonge/audio/analysis/AudioFloatConverter.java

@@ -0,0 +1,1084 @@
+/*
+*      _______                       _____   _____ _____  
+*     |__   __|                     |  __ \ / ____|  __ \ 
+*        | | __ _ _ __ ___  ___  ___| |  | | (___ | |__) |
+*        | |/ _` | '__/ __|/ _ \/ __| |  | |\___ \|  ___/ 
+*        | | (_| | |  \__ \ (_) \__ \ |__| |____) | |     
+*        |_|\__,_|_|  |___/\___/|___/_____/|_____/|_|     
+*                                                         
+* -------------------------------------------------------------
+*
+* TarsosDSP is developed by Joren Six at IPEM, University Ghent
+*  
+* -------------------------------------------------------------
+*
+*  Info: http://0110.be/tag/TarsosDSP
+*  Github: https://github.com/JorenSix/TarsosDSP
+*  Releases: http://0110.be/releases/TarsosDSP/
+*  
+*  TarsosDSP includes modified source code by various authors,
+*  for credits and info, see README.
+* 
+*/
+
+
+/*
+ * Copyright 2007 Sun Microsystems, Inc.  All Rights Reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.  Sun designates this
+ * particular file as subject to the "Classpath" exception as provided
+ * by Sun in the LICENSE file that accompanied this code.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
+ * CA 95054 USA or visit www.sun.com if you need additional information or
+ * have any questions.
+ */
+package com.yonge.audio.analysis;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.DoubleBuffer;
+import java.nio.FloatBuffer;
+
+import javax.sound.sampled.AudioFormat;
+import javax.sound.sampled.AudioFormat.Encoding;
+
+
+/**
+ * This class is used to convert between 8,16,24,32,32+ bit signed/unsigned
+ * big/litle endian fixed/floating point byte buffers and float buffers.
+ * 
+ * @author Karl Helgason
+ */
+public abstract class AudioFloatConverter {
+
+    public static final Encoding PCM_FLOAT = new Encoding("PCM_FLOAT");
+    
+    /***************************************************************************
+     * 
+     * LSB Filter, used filter least significant byte in samples arrays.
+     * 
+     * Is used filter out data in lsb byte when SampleSizeInBits is not
+     * dividable by 8.
+     * 
+     **************************************************************************/
+
+    private static class AudioFloatLSBFilter extends AudioFloatConverter {
+
+        private AudioFloatConverter converter;
+
+        final private int offset;
+
+        final private int stepsize;
+
+        final private byte mask;
+
+        private byte[] mask_buffer;
+
+        public AudioFloatLSBFilter(AudioFloatConverter converter,
+        		AudioFormat format) {
+            int bits = format.getSampleSizeInBits();
+            boolean bigEndian = format.isBigEndian();
+            this.converter = converter;
+            stepsize = (bits + 7) / 8;
+            offset = bigEndian ? (stepsize - 1) : 0;
+            int lsb_bits = bits % 8;
+            if (lsb_bits == 0)
+                mask = (byte) 0x00;
+            else if (lsb_bits == 1)
+                mask = (byte) 0x80;
+            else if (lsb_bits == 2)
+                mask = (byte) 0xC0;
+            else if (lsb_bits == 3)
+                mask = (byte) 0xE0;
+            else if (lsb_bits == 4)
+                mask = (byte) 0xF0;
+            else if (lsb_bits == 5)
+                mask = (byte) 0xF8;
+            else if (lsb_bits == 6)
+                mask = (byte) 0xFC;
+            else if (lsb_bits == 7)
+                mask = (byte) 0xFE;
+            else
+                mask = (byte) 0xFF;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            byte[] ret = converter.toByteArray(in_buff, in_offset, in_len,
+                    out_buff, out_offset);
+
+            int out_offset_end = in_len * stepsize;
+            for (int i = out_offset + offset; i < out_offset_end; i += stepsize) {
+                out_buff[i] = (byte) (out_buff[i] & mask);
+            }
+
+            return ret;
+        }
+
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            if (mask_buffer == null || mask_buffer.length < in_buff.length)
+                mask_buffer = new byte[in_buff.length];
+            System.arraycopy(in_buff, 0, mask_buffer, 0, in_buff.length);
+            int in_offset_end = out_len * stepsize;
+            for (int i = in_offset + offset; i < in_offset_end; i += stepsize) {
+                mask_buffer[i] = (byte) (mask_buffer[i] & mask);
+            }
+            float[] ret = converter.toFloatArray(mask_buffer, in_offset,
+                    out_buff, out_offset, out_len);
+            return ret;
+        }
+
+    }
+
+    /***************************************************************************
+     * 
+     * 64 bit float, little/big-endian
+     * 
+     **************************************************************************/
+
+    // PCM 64 bit float, little-endian
+    private static class AudioFloatConversion64L extends AudioFloatConverter {
+        ByteBuffer bytebuffer = null;
+
+        DoubleBuffer floatbuffer = null;
+
+        double[] double_buff = null;
+
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int in_len = out_len * 8;
+            if (bytebuffer == null || bytebuffer.capacity() < in_len) {
+                bytebuffer = ByteBuffer.allocate(in_len).order(
+                        ByteOrder.LITTLE_ENDIAN);
+                floatbuffer = bytebuffer.asDoubleBuffer();
+            }
+            bytebuffer.position(0);
+            floatbuffer.position(0);
+            bytebuffer.put(in_buff, in_offset, in_len);
+            if (double_buff == null
+                    || double_buff.length < out_len + out_offset)
+                double_buff = new double[out_len + out_offset];
+            floatbuffer.get(double_buff, out_offset, out_len);
+            int out_offset_end = out_offset + out_len;
+            for (int i = out_offset; i < out_offset_end; i++) {
+                out_buff[i] = (float) double_buff[i];
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int out_len = in_len * 8;
+            if (bytebuffer == null || bytebuffer.capacity() < out_len) {
+                bytebuffer = ByteBuffer.allocate(out_len).order(
+                        ByteOrder.LITTLE_ENDIAN);
+                floatbuffer = bytebuffer.asDoubleBuffer();
+            }
+            floatbuffer.position(0);
+            bytebuffer.position(0);
+            if (double_buff == null || double_buff.length < in_offset + in_len)
+                double_buff = new double[in_offset + in_len];
+            int in_offset_end = in_offset + in_len;
+            for (int i = in_offset; i < in_offset_end; i++) {
+                double_buff[i] = in_buff[i];
+            }
+            floatbuffer.put(double_buff, in_offset, in_len);
+            bytebuffer.get(out_buff, out_offset, out_len);
+            return out_buff;
+        }
+    }
+
+    // PCM 64 bit float, big-endian
+    private static class AudioFloatConversion64B extends AudioFloatConverter {
+        ByteBuffer bytebuffer = null;
+
+        DoubleBuffer floatbuffer = null;
+
+        double[] double_buff = null;
+
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int in_len = out_len * 8;
+            if (bytebuffer == null || bytebuffer.capacity() < in_len) {
+                bytebuffer = ByteBuffer.allocate(in_len).order(
+                        ByteOrder.BIG_ENDIAN);
+                floatbuffer = bytebuffer.asDoubleBuffer();
+            }
+            bytebuffer.position(0);
+            floatbuffer.position(0);
+            bytebuffer.put(in_buff, in_offset, in_len);
+            if (double_buff == null
+                    || double_buff.length < out_len + out_offset)
+                double_buff = new double[out_len + out_offset];
+            floatbuffer.get(double_buff, out_offset, out_len);
+            int out_offset_end = out_offset + out_len;
+            for (int i = out_offset; i < out_offset_end; i++) {
+                out_buff[i] = (float) double_buff[i];
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int out_len = in_len * 8;
+            if (bytebuffer == null || bytebuffer.capacity() < out_len) {
+                bytebuffer = ByteBuffer.allocate(out_len).order(
+                        ByteOrder.BIG_ENDIAN);
+                floatbuffer = bytebuffer.asDoubleBuffer();
+            }
+            floatbuffer.position(0);
+            bytebuffer.position(0);
+            if (double_buff == null || double_buff.length < in_offset + in_len)
+                double_buff = new double[in_offset + in_len];
+            int in_offset_end = in_offset + in_len;
+            for (int i = in_offset; i < in_offset_end; i++) {
+                double_buff[i] = in_buff[i];
+            }
+            floatbuffer.put(double_buff, in_offset, in_len);
+            bytebuffer.get(out_buff, out_offset, out_len);
+            return out_buff;
+        }
+    }
+
+    /***************************************************************************
+     * 
+     * 32 bit float, little/big-endian
+     * 
+     **************************************************************************/
+
+    // PCM 32 bit float, little-endian
+    private static class AudioFloatConversion32L extends AudioFloatConverter {
+        ByteBuffer bytebuffer = null;
+
+        FloatBuffer floatbuffer = null;
+
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int in_len = out_len * 4;
+            if (bytebuffer == null || bytebuffer.capacity() < in_len) {
+                bytebuffer = ByteBuffer.allocate(in_len).order(
+                        ByteOrder.LITTLE_ENDIAN);
+                floatbuffer = bytebuffer.asFloatBuffer();
+            }
+            bytebuffer.position(0);
+            floatbuffer.position(0);
+            bytebuffer.put(in_buff, in_offset, in_len);
+            floatbuffer.get(out_buff, out_offset, out_len);
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int out_len = in_len * 4;
+            if (bytebuffer == null || bytebuffer.capacity() < out_len) {
+                bytebuffer = ByteBuffer.allocate(out_len).order(
+                        ByteOrder.LITTLE_ENDIAN);
+                floatbuffer = bytebuffer.asFloatBuffer();
+            }
+            floatbuffer.position(0);
+            bytebuffer.position(0);
+            floatbuffer.put(in_buff, in_offset, in_len);
+            bytebuffer.get(out_buff, out_offset, out_len);
+            return out_buff;
+        }
+    }
+
+    // PCM 32 bit float, big-endian
+    private static class AudioFloatConversion32B extends AudioFloatConverter {
+        ByteBuffer bytebuffer = null;
+
+        FloatBuffer floatbuffer = null;
+
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int in_len = out_len * 4;
+            if (bytebuffer == null || bytebuffer.capacity() < in_len) {
+                bytebuffer = ByteBuffer.allocate(in_len).order(
+                        ByteOrder.BIG_ENDIAN);
+                floatbuffer = bytebuffer.asFloatBuffer();
+            }
+            bytebuffer.position(0);
+            floatbuffer.position(0);
+            bytebuffer.put(in_buff, in_offset, in_len);
+            floatbuffer.get(out_buff, out_offset, out_len);
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int out_len = in_len * 4;
+            if (bytebuffer == null || bytebuffer.capacity() < out_len) {
+                bytebuffer = ByteBuffer.allocate(out_len).order(
+                        ByteOrder.BIG_ENDIAN);
+                floatbuffer = bytebuffer.asFloatBuffer();
+            }
+            floatbuffer.position(0);
+            bytebuffer.position(0);
+            floatbuffer.put(in_buff, in_offset, in_len);
+            bytebuffer.get(out_buff, out_offset, out_len);
+            return out_buff;
+        }
+    }
+
+    /***************************************************************************
+     * 
+     * 8 bit signed/unsigned
+     * 
+     **************************************************************************/
+
+    // PCM 8 bit, signed
+    private static class AudioFloatConversion8S extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++)
+                out_buff[ox++] = in_buff[ix++] * (1.0f / 127.0f);
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++)
+                out_buff[ox++] = (byte) (in_buff[ix++] * 127.0f);
+            return out_buff;
+        }
+    }
+
+    // PCM 8 bit, unsigned
+    private static class AudioFloatConversion8U extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++)
+                out_buff[ox++] = ((in_buff[ix++] & 0xFF) - 127)
+                        * (1.0f / 127.0f);
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++)
+                out_buff[ox++] = (byte) (127 + in_buff[ix++] * 127.0f);
+            return out_buff;
+        }
+    }
+
+    /***************************************************************************
+     * 
+     * 16 bit signed/unsigned, little/big-endian
+     * 
+     **************************************************************************/
+
+    // PCM 16 bit, signed, little-endian
+    private static class AudioFloatConversion16SL extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int len = out_offset + out_len;
+            for (int ox = out_offset; ox < len; ox++) {
+                out_buff[ox] = ((short) ((in_buff[ix++] & 0xFF) | 
+                           (in_buff[ix++] << 8))) * (1.0f / 32767.0f);
+            }
+
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ox = out_offset;
+            int len = in_offset + in_len;
+            for (int ix = in_offset; ix < len; ix++) {
+                int x = (int) (in_buff[ix] * 32767.0);
+                out_buff[ox++] = (byte) x;
+                out_buff[ox++] = (byte) (x >>> 8);
+            }
+            return out_buff;
+        }
+    }
+
+    // PCM 16 bit, signed, big-endian
+    private static class AudioFloatConversion16SB extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                out_buff[ox++] = ((short) ((in_buff[ix++] << 8) | 
+                        (in_buff[ix++] & 0xFF))) * (1.0f / 32767.0f);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * 32767.0);
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) x;
+            }
+            return out_buff;
+        }
+    }
+
+    // PCM 16 bit, unsigned, little-endian
+    private static class AudioFloatConversion16UL extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                int x = (in_buff[ix++] & 0xFF) | ((in_buff[ix++] & 0xFF) << 8);
+                out_buff[ox++] = (x - 32767) * (1.0f / 32767.0f);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = 32767 + (int) (in_buff[ix++] * 32767.0);
+                out_buff[ox++] = (byte) x;
+                out_buff[ox++] = (byte) (x >>> 8);
+            }
+            return out_buff;
+        }
+    }
+
+    // PCM 16 bit, unsigned, big-endian
+    private static class AudioFloatConversion16UB extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                int x = ((in_buff[ix++] & 0xFF) << 8) | (in_buff[ix++] & 0xFF);
+                out_buff[ox++] = (x - 32767) * (1.0f / 32767.0f);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = 32767 + (int) (in_buff[ix++] * 32767.0);
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) x;
+            }
+            return out_buff;
+        }
+    }
+
+    /***************************************************************************
+     * 
+     * 24 bit signed/unsigned, little/big-endian
+     * 
+     **************************************************************************/
+
+    // PCM 24 bit, signed, little-endian
+    private static class AudioFloatConversion24SL extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                int x = (in_buff[ix++] & 0xFF) | ((in_buff[ix++] & 0xFF) << 8)
+                        | ((in_buff[ix++] & 0xFF) << 16);
+                if (x > 0x7FFFFF)
+                    x -= 0x1000000;
+                out_buff[ox++] = x * (1.0f / (float)0x7FFFFF);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * (float)0x7FFFFF);
+                if (x < 0)
+                    x += 0x1000000;
+                out_buff[ox++] = (byte) x;
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) (x >>> 16);
+            }
+            return out_buff;
+        }
+    }
+
+    // PCM 24 bit, signed, big-endian
+    private static class AudioFloatConversion24SB extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                int x = ((in_buff[ix++] & 0xFF) << 16)
+                        | ((in_buff[ix++] & 0xFF) << 8) | (in_buff[ix++] & 0xFF);
+                if (x > 0x7FFFFF)
+                    x -= 0x1000000;
+                out_buff[ox++] = x * (1.0f / (float)0x7FFFFF);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * (float)0x7FFFFF);
+                if (x < 0)
+                    x += 0x1000000;
+                out_buff[ox++] = (byte) (x >>> 16);
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) x;
+            }
+            return out_buff;
+        }
+    }
+
+    // PCM 24 bit, unsigned, little-endian
+    private static class AudioFloatConversion24UL extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                int x = (in_buff[ix++] & 0xFF) | ((in_buff[ix++] & 0xFF) << 8)
+                        | ((in_buff[ix++] & 0xFF) << 16);
+                x -= 0x7FFFFF;
+                out_buff[ox++] = x * (1.0f / (float)0x7FFFFF);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * (float)0x7FFFFF);
+                x += 0x7FFFFF;
+                out_buff[ox++] = (byte) x;
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) (x >>> 16);
+            }
+            return out_buff;
+        }
+    }
+
+    // PCM 24 bit, unsigned, big-endian
+    private static class AudioFloatConversion24UB extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                int x = ((in_buff[ix++] & 0xFF) << 16)
+                        | ((in_buff[ix++] & 0xFF) << 8) | (in_buff[ix++] & 0xFF);
+                x -= 0x7FFFFF;
+                out_buff[ox++] = x * (1.0f / (float)0x7FFFFF);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * (float)0x7FFFFF);
+                x += 0x7FFFFF;
+                out_buff[ox++] = (byte) (x >>> 16);
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) x;
+            }
+            return out_buff;
+        }
+    }
+
+    /***************************************************************************
+     * 
+     * 32 bit signed/unsigned, little/big-endian
+     * 
+     **************************************************************************/
+
+    // PCM 32 bit, signed, little-endian
+    private static class AudioFloatConversion32SL extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                int x = (in_buff[ix++] & 0xFF) | ((in_buff[ix++] & 0xFF) << 8) |
+                        ((in_buff[ix++] & 0xFF) << 16) |
+                        ((in_buff[ix++] & 0xFF) << 24);
+                out_buff[ox++] = x * (1.0f / (float)0x7FFFFFFF);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * (float)0x7FFFFFFF);
+                out_buff[ox++] = (byte) x;
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) (x >>> 16);
+                out_buff[ox++] = (byte) (x >>> 24);
+            }
+            return out_buff;
+        }
+    }
+
+    // PCM 32 bit, signed, big-endian
+    private static class AudioFloatConversion32SB extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                int x = ((in_buff[ix++] & 0xFF) << 24) |
+                        ((in_buff[ix++] & 0xFF) << 16) |
+                        ((in_buff[ix++] & 0xFF) << 8) | (in_buff[ix++] & 0xFF);
+                out_buff[ox++] = x * (1.0f / (float)0x7FFFFFFF);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * (float)0x7FFFFFFF);
+                out_buff[ox++] = (byte) (x >>> 24);
+                out_buff[ox++] = (byte) (x >>> 16);
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) x;
+            }
+            return out_buff;
+        }
+    }
+
+    // PCM 32 bit, unsigned, little-endian
+    private static class AudioFloatConversion32UL extends AudioFloatConverter {
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                int x = (in_buff[ix++] & 0xFF) | ((in_buff[ix++] & 0xFF) << 8) |
+                        ((in_buff[ix++] & 0xFF) << 16) | 
+                        ((in_buff[ix++] & 0xFF) << 24);
+                x -= 0x7FFFFFFF;
+                out_buff[ox++] = x * (1.0f / (float)0x7FFFFFFF);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * (float)0x7FFFFFFF);
+                x += 0x7FFFFFFF;
+                out_buff[ox++] = (byte) x;
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) (x >>> 16);
+                out_buff[ox++] = (byte) (x >>> 24);
+            }
+            return out_buff;
+        }
+    }
+
+    // PCM 32 bit, unsigned, big-endian
+    private static class AudioFloatConversion32UB extends AudioFloatConverter {
+
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                int x = ((in_buff[ix++] & 0xFF) << 24) |
+                        ((in_buff[ix++] & 0xFF) << 16) |
+                        ((in_buff[ix++] & 0xFF) << 8) | (in_buff[ix++] & 0xFF);
+                x -= 0x7FFFFFFF;
+                out_buff[ox++] = x * (1.0f / (float)0x7FFFFFFF);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * (float)0x7FFFFFFF);
+                x += 0x7FFFFFFF;
+                out_buff[ox++] = (byte) (x >>> 24);
+                out_buff[ox++] = (byte) (x >>> 16);
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) x;
+            }
+            return out_buff;
+        }
+    }
+
+    /***************************************************************************
+     * 
+     * 32+ bit signed/unsigned, little/big-endian
+     * 
+     **************************************************************************/
+
+    // PCM 32+ bit, signed, little-endian
+    private static class AudioFloatConversion32xSL extends AudioFloatConverter {
+
+        final int xbytes;
+
+        public AudioFloatConversion32xSL(int xbytes) {
+            this.xbytes = xbytes;
+        }
+
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                ix += xbytes;
+                int x = (in_buff[ix++] & 0xFF) | ((in_buff[ix++] & 0xFF) << 8)
+                        | ((in_buff[ix++] & 0xFF) << 16)
+                        | ((in_buff[ix++] & 0xFF) << 24);
+                out_buff[ox++] = x * (1.0f / (float)0x7FFFFFFF);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * (float)0x7FFFFFFF);
+                for (int j = 0; j < xbytes; j++) {
+                    out_buff[ox++] = 0;
+                }
+                out_buff[ox++] = (byte) x;
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) (x >>> 16);
+                out_buff[ox++] = (byte) (x >>> 24);
+            }
+            return out_buff;
+        }
+    }
+
+    // PCM 32+ bit, signed, big-endian
+    private static class AudioFloatConversion32xSB extends AudioFloatConverter {
+
+        final int xbytes;
+
+        public AudioFloatConversion32xSB(int xbytes) {
+            this.xbytes = xbytes;
+        }
+
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                int x = ((in_buff[ix++] & 0xFF) << 24)
+                        | ((in_buff[ix++] & 0xFF) << 16)
+                        | ((in_buff[ix++] & 0xFF) << 8)
+                        | (in_buff[ix++] & 0xFF);
+                ix += xbytes;
+                out_buff[ox++] = x * (1.0f / (float)0x7FFFFFFF);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * (float)0x7FFFFFFF);
+                out_buff[ox++] = (byte) (x >>> 24);
+                out_buff[ox++] = (byte) (x >>> 16);
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) x;
+                for (int j = 0; j < xbytes; j++) {
+                    out_buff[ox++] = 0;
+                }
+            }
+            return out_buff;
+        }
+    }
+
+    // PCM 32+ bit, unsigned, little-endian
+    private static class AudioFloatConversion32xUL extends AudioFloatConverter {
+
+        final int xbytes;
+
+        public AudioFloatConversion32xUL(int xbytes) {
+            this.xbytes = xbytes;
+        }
+
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                ix += xbytes;
+                int x = (in_buff[ix++] & 0xFF) | ((in_buff[ix++] & 0xFF) << 8)
+                        | ((in_buff[ix++] & 0xFF) << 16)
+                        | ((in_buff[ix++] & 0xFF) << 24);
+                x -= 0x7FFFFFFF;
+                out_buff[ox++] = x * (1.0f / (float)0x7FFFFFFF);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * (float)0x7FFFFFFF);
+                x += 0x7FFFFFFF;
+                for (int j = 0; j < xbytes; j++) {
+                    out_buff[ox++] = 0;
+                }
+                out_buff[ox++] = (byte) x;
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) (x >>> 16);
+                out_buff[ox++] = (byte) (x >>> 24);
+            }
+            return out_buff;
+        }
+    }
+
+    // PCM 32+ bit, unsigned, big-endian
+    private static class AudioFloatConversion32xUB extends AudioFloatConverter {
+
+        final int xbytes;
+
+        public AudioFloatConversion32xUB(int xbytes) {
+            this.xbytes = xbytes;
+        }
+
+        public float[] toFloatArray(byte[] in_buff, int in_offset,
+                float[] out_buff, int out_offset, int out_len) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < out_len; i++) {
+                int x = ((in_buff[ix++] & 0xFF) << 24) |
+                        ((in_buff[ix++] & 0xFF) << 16) |
+                        ((in_buff[ix++] & 0xFF) << 8) | (in_buff[ix++] & 0xFF);
+                ix += xbytes;
+                x -= 2147483647;
+                out_buff[ox++] = x * (1.0f / 2147483647.0f);
+            }
+            return out_buff;
+        }
+
+        public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+                byte[] out_buff, int out_offset) {
+            int ix = in_offset;
+            int ox = out_offset;
+            for (int i = 0; i < in_len; i++) {
+                int x = (int) (in_buff[ix++] * 2147483647.0);
+                x += 2147483647;
+                out_buff[ox++] = (byte) (x >>> 24);
+                out_buff[ox++] = (byte) (x >>> 16);
+                out_buff[ox++] = (byte) (x >>> 8);
+                out_buff[ox++] = (byte) x;
+                for (int j = 0; j < xbytes; j++) {
+                    out_buff[ox++] = 0;
+                }
+            }
+            return out_buff;
+        }
+    }
+
+    public static AudioFloatConverter getConverter(AudioFormat format) {
+    	AudioFloatConverter conv = null;
+        if (format.getFrameSize() == 0)
+            return null;
+        if (format.getFrameSize() != 
+                ((format.getSampleSizeInBits() + 7) / 8) * format.getChannels()) {
+            return null;
+        }
+        if (format.getEncoding().equals(Encoding.PCM_SIGNED)) {
+            if (format.isBigEndian()) {
+                if (format.getSampleSizeInBits() <= 8) {
+                    conv = new AudioFloatConversion8S();
+                } else if (format.getSampleSizeInBits() > 8 &&
+                      format.getSampleSizeInBits() <= 16) {
+                    conv = new AudioFloatConversion16SB();
+                } else if (format.getSampleSizeInBits() > 16 &&
+                      format.getSampleSizeInBits() <= 24) {
+                    conv = new AudioFloatConversion24SB();
+                } else if (format.getSampleSizeInBits() > 24 &&
+                      format.getSampleSizeInBits() <= 32) {
+                    conv = new AudioFloatConversion32SB();
+                } else if (format.getSampleSizeInBits() > 32) {
+                    conv = new AudioFloatConversion32xSB(((format
+                            .getSampleSizeInBits() + 7) / 8) - 4);
+                } 
+            } else {
+                if (format.getSampleSizeInBits() <= 8) {
+                    conv = new AudioFloatConversion8S();
+                } else if (format.getSampleSizeInBits() > 8 &&
+                         format.getSampleSizeInBits() <= 16) {
+                    conv = new AudioFloatConversion16SL();
+                } else if (format.getSampleSizeInBits() > 16 &&
+                         format.getSampleSizeInBits() <= 24) {
+                    conv = new AudioFloatConversion24SL();
+                } else if (format.getSampleSizeInBits() > 24 &&
+                         format.getSampleSizeInBits() <= 32) {
+                    conv = new AudioFloatConversion32SL();
+                } else if (format.getSampleSizeInBits() > 32) {
+                    conv = new AudioFloatConversion32xSL(((format
+                            .getSampleSizeInBits() + 7) / 8) - 4);
+                }
+            }
+        } else if (format.getEncoding().equals(Encoding.PCM_UNSIGNED)) {
+            if (format.isBigEndian()) {
+                if (format.getSampleSizeInBits() <= 8) {
+                    conv = new AudioFloatConversion8U();
+                } else if (format.getSampleSizeInBits() > 8 &&
+                        format.getSampleSizeInBits() <= 16) {
+                    conv = new AudioFloatConversion16UB();
+                } else if (format.getSampleSizeInBits() > 16 &&
+                        format.getSampleSizeInBits() <= 24) {
+                    conv = new AudioFloatConversion24UB();
+                } else if (format.getSampleSizeInBits() > 24 &&
+                        format.getSampleSizeInBits() <= 32) {
+                    conv = new AudioFloatConversion32UB();
+                } else if (format.getSampleSizeInBits() > 32) {
+                    conv = new AudioFloatConversion32xUB(((
+                            format.getSampleSizeInBits() + 7) / 8) - 4);
+                }
+            } else {
+                if (format.getSampleSizeInBits() <= 8) {
+                    conv = new AudioFloatConversion8U();
+                } else if (format.getSampleSizeInBits() > 8 &&
+                        format.getSampleSizeInBits() <= 16) {
+                    conv = new AudioFloatConversion16UL();
+                } else if (format.getSampleSizeInBits() > 16 &&
+                        format.getSampleSizeInBits() <= 24) {
+                    conv = new AudioFloatConversion24UL();
+                } else if (format.getSampleSizeInBits() > 24 &&
+                        format.getSampleSizeInBits() <= 32) {
+                    conv = new AudioFloatConversion32UL();
+                } else if (format.getSampleSizeInBits() > 32) {
+                    conv = new AudioFloatConversion32xUL(((
+                            format.getSampleSizeInBits() + 7) / 8) - 4);
+                }
+            }
+        } else if (format.getEncoding().equals(PCM_FLOAT)) {
+            if (format.getSampleSizeInBits() == 32) {
+                if (format.isBigEndian())
+                    conv = new AudioFloatConversion32B();
+                else
+                    conv = new AudioFloatConversion32L();
+            } else if (format.getSampleSizeInBits() == 64) {
+                if (format.isBigEndian()) 
+                    conv = new AudioFloatConversion64B();
+                else 
+                    conv = new AudioFloatConversion64L();                
+            }
+
+        }
+
+        if ((format.getEncoding().equals(Encoding.PCM_SIGNED) || 
+                format.getEncoding().equals(Encoding.PCM_UNSIGNED)) && 
+                (format.getSampleSizeInBits() % 8 != 0)) {
+            conv = new AudioFloatLSBFilter(conv, format);
+        }
+
+        if (conv != null)
+            conv.format = format;
+        return conv;
+    }
+
+    private AudioFormat format;
+
+    public AudioFormat getFormat() {
+        return format;
+    }
+
+    public abstract float[] toFloatArray(byte[] in_buff, int in_offset,
+            float[] out_buff, int out_offset, int out_len);
+
+    public float[] toFloatArray(byte[] in_buff, float[] out_buff,
+            int out_offset, int out_len) {
+        return toFloatArray(in_buff, 0, out_buff, out_offset, out_len);
+    }
+
+    public float[] toFloatArray(byte[] in_buff, int in_offset,
+            float[] out_buff, int out_len) {
+        return toFloatArray(in_buff, in_offset, out_buff, 0, out_len);
+    }
+
+    public float[] toFloatArray(byte[] in_buff, float[] out_buff, int out_len) {
+        return toFloatArray(in_buff, 0, out_buff, 0, out_len);
+    }
+
+    public float[] toFloatArray(byte[] in_buff, float[] out_buff) {
+        return toFloatArray(in_buff, 0, out_buff, 0, out_buff.length);
+    }
+
+    public abstract byte[] toByteArray(float[] in_buff, int in_offset,
+            int in_len, byte[] out_buff, int out_offset);
+
+    public byte[] toByteArray(float[] in_buff, int in_len, byte[] out_buff,
+            int out_offset) {
+        return toByteArray(in_buff, 0, in_len, out_buff, out_offset);
+    }
+
+    public byte[] toByteArray(float[] in_buff, int in_offset, int in_len,
+            byte[] out_buff) {
+        return toByteArray(in_buff, in_offset, in_len, out_buff, 0);
+    }
+
+    public byte[] toByteArray(float[] in_buff, int in_len, byte[] out_buff) {
+        return toByteArray(in_buff, 0, in_len, out_buff, 0);
+    }
+
+    public byte[] toByteArray(float[] in_buff, byte[] out_buff) {
+        return toByteArray(in_buff, 0, in_buff.length, out_buff, 0);
+    }
+
+}

+ 158 - 0
audio-analysis/src/main/java/com/yonge/audio/analysis/Complex.java

@@ -0,0 +1,158 @@
+package com.yonge.audio.analysis;
+
+/*************************************************************************
+ * Compilation: javac Complex.java Execution: java Complex
+ * 
+ * Data type for complex numbers.
+ * 
+ * The data type is "immutable" so once you create and initialize a Complex
+ * object, you cannot change it. The "final" keyword when declaring re and im
+ * enforces this rule, making it a compile-time error to change the .re or .im
+ * fields after they've been initialized.
+ * 
+ * % java Complex a = 5.0 + 6.0i b = -3.0 + 4.0i Re(a) = 5.0 Im(a) = 6.0 b + a =
+ * 2.0 + 10.0i a - b = 8.0 + 2.0i a * b = -39.0 + 2.0i b * a = -39.0 + 2.0i a /
+ * b = 0.36 - 1.52i (a / b) * b = 5.0 + 6.0i conj(a) = 5.0 - 6.0i |a| =
+ * 7.810249675906654 tan(a) = -6.685231390246571E-6 + 1.0000103108981198i
+ * 
+ *************************************************************************/
+
+public class Complex {
+	private final double re; // the real part
+	private final double im; // the imaginary part
+
+	// create a new object with the given real and imaginary parts
+	public Complex(double real, double imag) {
+		re = real;
+		im = imag;
+	}
+
+	// return a string representation of the invoking Complex object
+	public String toString() {
+		if (im == 0)
+			return re + "";
+		if (re == 0)
+			return im + "i";
+		if (im < 0)
+			return re + " - " + (-im) + "i";
+		return re + " + " + im + "i";
+	}
+
+	// return abs/modulus/magnitude and angle/phase/argument
+	public double abs() {
+		return Math.hypot(re, im);
+	} // Math.sqrt(re*re + im*im)
+
+	public double phase() {
+		return Math.atan2(im, re);
+	} // between -pi and pi
+
+	// return a new Complex object whose value is (this + b)
+	public Complex plus(Complex b) {
+		Complex a = this; // invoking object
+		double real = a.re + b.re;
+		double imag = a.im + b.im;
+		return new Complex(real, imag);
+	}
+
+	// return a new Complex object whose value is (this - b)
+	public Complex minus(Complex b) {
+		Complex a = this;
+		double real = a.re - b.re;
+		double imag = a.im - b.im;
+		return new Complex(real, imag);
+	}
+
+	// return a new Complex object whose value is (this * b)
+	public Complex times(Complex b) {
+		Complex a = this;
+		double real = a.re * b.re - a.im * b.im;
+		double imag = a.re * b.im + a.im * b.re;
+		return new Complex(real, imag);
+	}
+
+	// scalar multiplication
+	// return a new object whose value is (this * alpha)
+	public Complex times(double alpha) {
+		return new Complex(alpha * re, alpha * im);
+	}
+
+	// return a new Complex object whose value is the conjugate of this
+	public Complex conjugate() {
+		return new Complex(re, -im);
+	}
+
+	// return a new Complex object whose value is the reciprocal of this
+	public Complex reciprocal() {
+		double scale = re * re + im * im;
+		return new Complex(re / scale, -im / scale);
+	}
+
+	// return the real or imaginary part
+	public double re() {
+		return re;
+	}
+
+	public double im() {
+		return im;
+	}
+
+	// return a / b
+	public Complex divides(Complex b) {
+		Complex a = this;
+		return a.times(b.reciprocal());
+	}
+
+	// return a new Complex object whose value is the complex exponential of
+	// this
+	public Complex exp() {
+		return new Complex(Math.exp(re) * Math.cos(im), Math.exp(re)
+				* Math.sin(im));
+	}
+
+	// return a new Complex object whose value is the complex sine of this
+	public Complex sin() {
+		return new Complex(Math.sin(re) * Math.cosh(im), Math.cos(re)
+				* Math.sinh(im));
+	}
+
+	// return a new Complex object whose value is the complex cosine of this
+	public Complex cos() {
+		return new Complex(Math.cos(re) * Math.cosh(im), -Math.sin(re)
+				* Math.sinh(im));
+	}
+
+	// return a new Complex object whose value is the complex tangent of this
+	public Complex tan() {
+		return sin().divides(cos());
+	}
+
+	// a static version of plus
+	public static Complex plus(Complex a, Complex b) {
+		double real = a.re + b.re;
+		double imag = a.im + b.im;
+		Complex sum = new Complex(real, imag);
+		return sum;
+	}
+
+	
+	public static void main(String[] args) {
+		Complex a = new Complex(5.0, 0.0);
+		Complex b = new Complex(-3.0, 4.0);
+
+		System.out.println("a            = " + a);
+		System.out.println("b            = " + b);
+		System.out.println("Re(a)        = " + a.re());
+		System.out.println("Im(a)        = " + a.im());
+		System.out.println("b + a        = " + b.plus(a));
+		System.out.println("a - b        = " + a.minus(b));
+		System.out.println("a * b        = " + a.times(b));
+		System.out.println("b * a        = " + b.times(a));
+		System.out.println("a / b        = " + a.divides(b));
+		System.out.println("(a / b) * b  = " + a.divides(b).times(b));
+		System.out.println("conj(a)      = " + a.conjugate());
+		System.out.println("|a|          = " + a.abs());
+		System.out.println("tan(a)       = " + a.tan());
+	}
+
+}

+ 167 - 0
audio-analysis/src/main/java/com/yonge/audio/analysis/FFT.java

@@ -0,0 +1,167 @@
+package com.yonge.audio.analysis;
+
+
+/*************************************************************************
+ * Compilation: javac FFT.java Execution: java FFT N Dependencies: Complex.java
+ * 
+ * Compute the FFT and inverse FFT of a length N complex sequence. Bare bones
+ * implementation that runs in O(N log N) time. Our goal is to optimize the
+ * clarity of the code, rather than performance.
+ * 
+ * Limitations ----------- - assumes N is a power of 2
+ * 
+ * - not the most memory efficient algorithm (because it uses an object type for
+ * representing complex numbers and because it re-allocates memory for the
+ * subarray, instead of doing in-place or reusing a single temporary array)
+ * 
+ *************************************************************************/
+
+public class FFT {
+
+	// compute the FFT of x[], assuming its length is a power of 2
+	public static Complex[] fft(Complex[] x) {
+		int N = x.length;
+
+		// base case
+		if (N == 1)
+			return new Complex[] { x[0] };
+
+		// radix 2 Cooley-Tukey FFT
+		if (N % 2 != 0) {
+			throw new RuntimeException("N is not a power of 2");
+		}
+
+		// fft of even terms
+		Complex[] even = new Complex[N / 2];
+		for (int k = 0; k < N / 2; k++) {
+			even[k] = x[2 * k];
+		}
+		Complex[] q = fft(even);
+
+		// fft of odd terms
+		Complex[] odd = even; // reuse the array
+		for (int k = 0; k < N / 2; k++) {
+			odd[k] = x[2 * k + 1];
+		}
+		Complex[] r = fft(odd);
+
+		// combine
+		Complex[] y = new Complex[N];
+		for (int k = 0; k < N / 2; k++) {
+			double kth = -2 * k * Math.PI / N;
+			Complex wk = new Complex(Math.cos(kth), Math.sin(kth));
+			y[k] = q[k].plus(wk.times(r[k]));
+			y[k + N / 2] = q[k].minus(wk.times(r[k]));
+		}
+		return y;
+	}
+
+	// compute the inverse FFT of x[], assuming its length is a power of 2
+	public static Complex[] ifft(Complex[] x) {
+		int N = x.length;
+		Complex[] y = new Complex[N];
+
+		// take conjugate
+		for (int i = 0; i < N; i++) {
+			y[i] = x[i].conjugate();
+		}
+
+		// compute forward FFT
+		y = fft(y);
+
+		// take conjugate again
+		for (int i = 0; i < N; i++) {
+			y[i] = y[i].conjugate();
+		}
+
+		// divide by N
+		for (int i = 0; i < N; i++) {
+			y[i] = y[i].times(1.0 / N);
+		}
+
+		return y;
+
+	}
+
+	// compute the circular convolution of x and y
+	public static Complex[] cconvolve(Complex[] x, Complex[] y) {
+
+		// should probably pad x and y with 0s so that they have same length
+		// and are powers of 2
+		if (x.length != y.length) {
+			throw new RuntimeException("Dimensions don't agree");
+		}
+
+		int N = x.length;
+
+		// compute FFT of each sequence
+		Complex[] a = fft(x);
+		Complex[] b = fft(y);
+
+		// point-wise multiply
+		Complex[] c = new Complex[N];
+		for (int i = 0; i < N; i++) {
+			c[i] = a[i].times(b[i]);
+		}
+
+		// compute inverse FFT
+		return ifft(c);
+	}
+
+	// compute the linear convolution of x and y
+	public static Complex[] convolve(Complex[] x, Complex[] y) {
+		Complex ZERO = new Complex(0, 0);
+
+		Complex[] a = new Complex[2 * x.length];
+		for (int i = 0; i < x.length; i++)
+			a[i] = x[i];
+		for (int i = x.length; i < 2 * x.length; i++)
+			a[i] = ZERO;
+
+		Complex[] b = new Complex[2 * y.length];
+		for (int i = 0; i < y.length; i++)
+			b[i] = y[i];
+		for (int i = y.length; i < 2 * y.length; i++)
+			b[i] = ZERO;
+
+		return cconvolve(a, b);
+	}
+
+	// display an array of Complex numbers to standard output
+	public static void show(Complex[] x, String title) {
+		System.out.println(title);
+		System.out.println("-------------------");
+		for (int i = 0; i < x.length; i++) {
+			System.out.println(x[i]);
+		}
+		System.out.println();
+	}
+
+	/*********************************************************************
+	 * Test client and sample execution
+	 * 
+	 * % java FFT 4 x ------------------- -0.03480425839330703
+	 * 0.07910192950176387 0.7233322451735928 0.1659819820667019
+	 * 
+	 * y = fft(x) ------------------- 0.9336118983487516 -0.7581365035668999 +
+	 * 0.08688005256493803i 0.44344407521182005 -0.7581365035668999 -
+	 * 0.08688005256493803i
+	 * 
+	 * z = ifft(y) ------------------- -0.03480425839330703 0.07910192950176387
+	 * + 2.6599344570851287E-18i 0.7233322451735928 0.1659819820667019 -
+	 * 2.6599344570851287E-18i
+	 * 
+	 * c = cconvolve(x, x) ------------------- 0.5506798633981853
+	 * 0.23461407150576394 - 4.033186818023279E-18i -0.016542951108772352
+	 * 0.10288019294318276 + 4.033186818023279E-18i
+	 * 
+	 * d = convolve(x, x) ------------------- 0.001211336402308083 -
+	 * 3.122502256758253E-17i -0.005506167987577068 - 5.058885073636224E-17i
+	 * -0.044092969479563274 + 2.1934338938072244E-18i 0.10288019294318276 -
+	 * 3.6147323062478115E-17i 0.5494685269958772 + 3.122502256758253E-17i
+	 * 0.240120239493341 + 4.655566391833896E-17i 0.02755001837079092 -
+	 * 2.1934338938072244E-18i 4.01805098805014E-17i
+	 * 
+	 *********************************************************************/
+
+}

+ 141 - 0
audio-analysis/src/main/java/com/yonge/audio/analysis/Signals.java

@@ -0,0 +1,141 @@
+package com.yonge.audio.analysis;
+
+import javax.sound.sampled.AudioFormat;
+import javax.sound.sampled.AudioSystem;
+import javax.sound.sampled.DataLine;
+import javax.sound.sampled.LineUnavailableException;
+import javax.sound.sampled.TargetDataLine;
+
+public class Signals {
+	public static float mean(float[] signal) {
+		float mean = 0;
+		for (int i = 0; i < signal.length; i++)
+			mean += signal[i];
+		mean /= signal.length;
+		return mean;
+	}
+
+	public static float energy(float[] signal) {
+		float totalEnergy = 0;
+		for (int i = 0; i < signal.length; i++)
+			totalEnergy += Math.pow(signal[i], 2);
+		return totalEnergy;
+	}
+
+	public static float power(float[] signal) {
+		return energy(signal) / signal.length;
+	}
+
+	public static float norm(float[] signal) {
+		return (float) Math.sqrt(energy(signal));
+	}
+
+	public static float minimum(float[] signal) {
+		float min = Float.POSITIVE_INFINITY;
+		for (int i = 0; i < signal.length; i++)
+			min = Math.min(min, signal[i]);
+		return min;
+	}
+
+	public static float maximum(float[] signal) {
+		float max = Float.NEGATIVE_INFINITY;
+		for (int i = 0; i < signal.length; i++)
+			max = Math.max(max, signal[i]);
+		return max;
+	}
+
+	public static void scale(float[] signal, float scale) {
+		for (int i = 0; i < signal.length; i++) {
+			signal[i] *= scale;
+			if (signal[i] > 32767) {
+				signal[i] = 32767;
+			} else if (signal[i] < -32768) {
+				signal[i] = -32768;
+			}
+		}
+	}
+
+	public static float rms(float[] samples) {
+		// 均方根 (RMS) 功率
+		return (float) Math.sqrt(power(samples));
+
+	}
+
+	public static double soundPressureLevel(float[] samples) {
+
+		double rms = rms(samples);
+		// 计算声强级(Sound Pressure Level)
+		return (20.0 * Math.log10(rms));
+	}
+
+	public static int decibels(float[] samples) {
+		// 声音的分贝范围
+		double minDecibels = 0, db = 0, maxDecibels = 127;
+
+		double rms = rms(samples);
+
+		if (rms > 0) {
+			db = 20 * Math.log10(rms / 0.00002);// 空气中常用的“零”参考声压为20 uPa RMS,通常被认为是人类听力的阈值
+
+			if (db > maxDecibels) {
+				db = maxDecibels;
+			} else if (db < minDecibels) {
+				db = minDecibels;
+			}
+		}
+
+		return (int) db;
+	}
+
+	public static void main(String[] args) throws LineUnavailableException {
+
+		float sampleRate = 44100;
+
+		AudioFormat audioFormat = new AudioFormat(sampleRate, 16, 1, true, false);
+
+		DataLine.Info dataLineInfo = new DataLine.Info(TargetDataLine.class, audioFormat);
+
+		TargetDataLine targetDataLine = (TargetDataLine) AudioSystem.getLine(dataLineInfo);
+
+		targetDataLine.open(audioFormat);
+		targetDataLine.start();
+
+		AudioFloatConverter converter = AudioFloatConverter.getConverter(audioFormat);
+
+		byte[] buffer = new byte[1024 * 8];
+
+		while (true) {
+			targetDataLine.read(buffer, 0, buffer.length);
+
+			float[] sampleFloats = new float[buffer.length / 2];
+			converter.toFloatArray(buffer, sampleFloats);
+
+			// 计算声强级(Sound Pressure Level)
+			double splDb = soundPressureLevel(sampleFloats);
+
+			int db = decibels(sampleFloats);
+
+			Complex[] complex = new Complex[sampleFloats.length];
+
+			for (int i = 0; i < sampleFloats.length; i++) {
+				complex[i] = new Complex(sampleFloats[i], 0);
+			}
+			Complex[] result = FFT.fft(complex);
+
+			double maxMagnitude = result[0].abs();
+			int maxIndex = 0;
+
+			for (int i = 1; i < result.length / 2; i++) {
+				if (maxMagnitude < result[i].abs()) {
+					maxMagnitude = result[i].abs();
+					maxIndex = i;
+				}
+			}
+
+			double f = maxIndex * sampleRate / result.length;
+
+			System.out.println("db:" + db + "  energy:" + energy(sampleFloats) + "	power:" + power(sampleFloats) + "  rms:" + rms(sampleFloats) + "	splDb: "
+					+ splDb + "	frequency: " + f);
+		}
+	}
+}

+ 52 - 0
audio-analysis/src/main/java/com/yonge/audio/analysis/detector/FrequencyDetector.java

@@ -0,0 +1,52 @@
+package com.yonge.audio.analysis.detector;
+
+import com.yonge.audio.analysis.Complex;
+import com.yonge.audio.analysis.FFT;
+
+public class FrequencyDetector {
+
+	private float[] samples;
+
+	private boolean isUseHanmingWindow;
+
+	private float sampleRate;
+
+	public FrequencyDetector(float[] samples, float sampleRate, boolean isUseHanmingWindow) {
+		this.samples = samples;
+		this.sampleRate = sampleRate;
+		this.isUseHanmingWindow = isUseHanmingWindow;
+	}
+
+	public double getFrequency() {
+
+		if (isUseHanmingWindow) {
+			// 加汉明窗
+			hamming(samples);
+		}
+
+		Complex[] complex = new Complex[samples.length];
+
+		for (int i = 0; i < samples.length; i++) {
+			complex[i] = new Complex(samples[i], 0);
+		}
+		Complex[] result = FFT.fft(complex);
+
+		double maxMagnitude = result[0].abs();
+		int maxIndex = 0;
+
+		for (int i = 1; i < result.length / 2; i++) {
+			if (maxMagnitude < result[i].abs()) {
+				maxMagnitude = result[i].abs();
+				maxIndex = i;
+			}
+		}
+
+		return maxIndex * sampleRate / result.length;
+	}
+
+	private void hamming(float[] samples) {
+		for (int i = 0; i < samples.length; i++) {
+			samples[i] *= (0.54f - 0.46f * Math.cos((2 * Math.PI) * i / (samples.length - 1)));
+		}
+	}
+}

+ 223 - 0
audio-analysis/src/main/java/com/yonge/audio/analysis/detector/YINPitchDetector.java

@@ -0,0 +1,223 @@
+package com.yonge.audio.analysis.detector;
+
+/**
+ * A {@link PitchDetector} implementation that uses a YIN algorithm to determine the frequency of
+ * the provided waveform data. The YIN algorithm is similar to the Auto-correlation Function used
+ * for pitch detection but adds additional steps to better the accuracy of the results. Each step
+ * lowers the error rate further. The following implementation was inspired by
+ * <a href="https://github.com/JorenSix/TarsosDSP/blob/master/src/core/be/tarsos/dsp/pitch/Yin.java">TarsosDsp</a>
+ * and
+ * <a href="http://recherche.ircam.fr/equipes/pcm/cheveign/ps/2002_JASA_YIN_proof.pdf">this YIN paper</a>.
+ * The six steps in the YIN algorithm are (according to the YIN paper):
+ * <p>
+ * <ol>
+ * <li>Auto-correlation Method</li>
+ * <li>Difference Function</li>
+ * <li>Cumulative Mean Normalized Difference Function</li>
+ * <li>Absolute Threshold</li>
+ * <li>Parabolic Interpolation</li>
+ * <li>Best Local Estimate</li>
+ * </ol>
+ * </p>
+ * The first two steps, the Auto-correlation Method and the Difference Function, can seemingly be
+ * combined into a single difference function step according to the YIN paper.
+ */
+public class YINPitchDetector {
+    // According to the YIN Paper, the threshold should be between 0.10 and 0.15
+    private static final float ABSOLUTE_THRESHOLD = 0.125f;
+
+	/**
+	 * C-1 = 16.35 / 2 Hz.
+	 */
+	private static final double REF_FREQ = 8.17579892;
+
+	/**
+	 * Cache LOG 2 calculation.
+	 */
+	private static final double LOG_TWO = Math.log(2.0);
+
+    private final double sampleRate;
+    private final float[] resultBuffer;
+
+    public YINPitchDetector(int bufferSize, float sampleRate) {
+        this.sampleRate = sampleRate;
+        this.resultBuffer = new float[bufferSize/2];
+    }
+    
+    /**
+	 * The reference frequency is configured. The default reference frequency is
+	 * 16.35Hz. This is C0 on a piano keyboard with A4 tuned to 440 Hz. This
+	 * means that 0 cents is C0; 1200 is C1; 2400 is C2; ... also -1200 cents is
+	 * C-1
+	 * 
+	 * @param hertzValue
+	 *            The pitch in Hertz.
+	 * @return The value in absolute cents using the configured reference
+	 *         frequency
+	 */
+	public static double hertzToAbsoluteCent(final double hertzValue) {
+		double pitchInAbsCent = 0.0;
+		if (hertzValue > 0) {
+			pitchInAbsCent = 1200 * Math.log(hertzValue / REF_FREQ) / LOG_TWO;
+		}
+		return pitchInAbsCent;
+	}
+
+    public double getFrequency(float[] wave) {
+        int tau;
+
+        // First, perform the functions to normalize the wave data
+
+        // The first and second steps in the YIN algorithm
+        autoCorrelationDifference(wave);
+
+        // The third step in the YIN algorithm
+        cumulativeMeanNormalizedDifference();
+
+        // Then perform the functions to retrieve the tau (the approximate period)
+
+        // The fourth step in the YIN algorithm
+        tau = absoluteThreshold();
+
+        // The fifth step in the YIN algorithm
+        float betterTau = parabolicInterpolation(tau);
+
+        // TODO implement the sixth and final step of the YIN algorithm
+        // (it isn't implemented in the Tarsos DSP project but is briefly explained in the YIN
+        // paper).
+
+        // The fundamental frequency (note frequency) is the sampling rate divided by the tau (index
+        // within the resulting buffer array that marks the period).
+        // The period is the duration (or index here) of one cycle.
+        // Frequency = 1 / Period, with respect to the sampling rate, Frequency = Sample Rate / Period
+        return sampleRate / betterTau;
+    }
+
+    /**
+     * Performs the first and second step of the YIN Algorithm on the provided array buffer values.
+     * This is a "combination" of the AutoCorrelation Method and the Difference Function. The
+     * AutoCorrelation Method multiplies the array value at the specified index with the array value
+     * at the specified index plus the "tau" (greek letter used in the formula). Whereas the
+     * Difference Function takes the square of the difference of the two values. This is supposed to
+     * provide a more accurate result (from about 10% to about 1.95% error rate). Note that this
+     * formula is a riemann sum, meaning the operation specified above is performed and accumulated
+     * for every value in the array. The result of this function is stored in a global array,
+     * {@link #resultBuffer}, which the subsequent steps of the algorithm should use.
+     *
+     * @param wave The waveform data to perform the AutoCorrelation Difference function on.
+     */
+    private void autoCorrelationDifference(final float[] wave) {
+        // Note this algorithm is currently slow (O(n^2)). Should look for any possible optimizations.
+        int length = resultBuffer.length;
+        int i, j;
+
+        for (j = 1; j < length; j++) {
+            for (i = 0; i < length; i++) {
+                // d sub t (tau) = (x(i) - x(i - tau))^2, from i = 1 to result buffer size
+                resultBuffer[j] += Math.pow((wave[i] - wave[i + j]), 2);
+            }
+        }
+    }
+
+    /**
+     * Performs the third step in the YIN Algorithm on the {@link #resultBuffer}. The result of this
+     * function yields an even lower error rate (about 1.69% from 1.95%). The {@link #resultBuffer}
+     * is updated when this function is performed.
+     */
+    private void cumulativeMeanNormalizedDifference() {
+        // newValue = oldValue / (runningSum / tau)
+        // == (oldValue / 1) * (tau / runningSum)
+        // == oldValue * (tau / runningSum)
+
+        // Here we're using index i as the "tau" in the equation
+        int i;
+        int length = resultBuffer.length;
+        float runningSum = 0;
+
+        // Set the first value in the result buffer to the value of one
+        resultBuffer[0] = 1;
+
+        for (i = 1; i < length; i++) {
+            // The sum of this value plus all the previous values in the buffer array
+            runningSum += resultBuffer[i];
+
+            // The current value is updated to be the current value multiplied by the index divided by the running sum value
+            resultBuffer[i] *= i / runningSum;
+        }
+    }
+
+    /**
+     * Performs step four of the YIN Algorithm on the {@link #resultBuffer}. This is the first step
+     * in the algorithm to attempt finding the period of the wave data. When attempting to determine
+     * the period of a wave, it's common to search for the high or low peaks or dips of the wave.
+     * This will allow you to determine the length of a cycle or its period. However, especially
+     * with a natural sound sample, it is possible to have false dips. This makes determining the
+     * period more difficult. This function attempts to resolve this issue by introducing a
+     * threshold. The result of this function yields an even lower rate (about 0.78% from about
+     * 1.69%).
+     *
+     * @return The tau indicating the approximate period.
+     */
+    private int absoluteThreshold() {
+        int tau;
+        int length = resultBuffer.length;
+
+        // The first two values in the result buffer should be 1, so start at the third value
+        for (tau = 2; tau < length; tau++) {
+            // If we are less than the threshold, continue on until we find the lowest value
+            // indicating the lowest dip in the wave since we first crossed the threshold.
+            if (resultBuffer[tau] < ABSOLUTE_THRESHOLD) {
+                while (tau + 1 < length && resultBuffer[tau + 1] < resultBuffer[tau]) {
+                    tau++;
+                }
+
+                // We have the approximate tau value, so break the loop
+                break;
+            }
+        }
+
+        // Some implementations of this algorithm set the tau value to -1 to indicate no correct tau
+        // value was found. This implementation will just return the last tau.
+        tau = tau >= length ? length - 1 : tau;
+
+        return tau;
+    }
+
+    /**
+     * Further lowers the error rate by using parabolas to smooth the wave between the minimum and
+     * maximum points. Especially helps to detect higher frequencies more precisely. The result of
+     * this function results in only a small error rate decline from about 0.78% to about 0.77%.
+     */
+    private float parabolicInterpolation(final int currentTau) {
+        // Finds the points to fit the parabola between
+        int x0 = currentTau < 1 ? currentTau : currentTau - 1;
+        int x2 = currentTau + 1 < resultBuffer.length ? currentTau + 1 : currentTau;
+
+        // Finds the better tau estimate
+        float betterTau;
+
+        if (x0 == currentTau) {
+            if (resultBuffer[currentTau] <= resultBuffer[x2]) {
+                betterTau = currentTau;
+            } else {
+                betterTau = x2;
+            }
+        } else if (x2 == currentTau) {
+            if (resultBuffer[currentTau] <= resultBuffer[x0]) {
+                betterTau = currentTau;
+            } else {
+                betterTau = x0;
+            }
+        } else {
+            // Fit the parabola between the first point, current tau, and the last point to find a
+            // better tau estimate.
+            float s0 = resultBuffer[x0];
+            float s1 = resultBuffer[currentTau];
+            float s2 = resultBuffer[x2];
+
+            betterTau = currentTau + (s2 - s0) / (2 * (2 * s1 - s2 - s0));
+        }
+
+        return betterTau;
+    }
+}

+ 44 - 0
audio-analysis/src/main/java/com/yonge/audio/config/ResourceServerConfig.java

@@ -0,0 +1,44 @@
+package com.yonge.audio.config;
+
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity;
+import org.springframework.security.config.annotation.web.builders.HttpSecurity;
+import org.springframework.security.oauth2.config.annotation.web.configuration.EnableResourceServer;
+import org.springframework.security.oauth2.config.annotation.web.configuration.ResourceServerConfigurerAdapter;
+import org.springframework.security.oauth2.config.annotation.web.configurers.ResourceServerSecurityConfigurer;
+
+import com.ym.mec.common.security.BaseAccessDeniedHandler;
+import com.ym.mec.common.security.BaseAuthenticationEntryPoint;
+
+@Configuration
+@EnableResourceServer
+@EnableGlobalMethodSecurity(prePostEnabled = true)
+public class ResourceServerConfig extends ResourceServerConfigurerAdapter {
+
+	@Autowired
+	private BaseAccessDeniedHandler baseAccessDeniedHandler;
+
+	@Autowired
+	private BaseAuthenticationEntryPoint baseAuthenticationEntryPoint;
+
+	@Override
+	public void configure(HttpSecurity http) throws Exception {
+		http.authorizeRequests()
+		.antMatchers("/task/**")
+		.hasIpAddress("0.0.0.0/0")
+				.antMatchers("/v2/api-docs")
+				.permitAll()
+				// 任何人不登录都可以获取的资源
+				// .antMatchers("/ipController/**").hasIpAddress("127.0.0.1") //特定ip可以不登录获取资源
+				// .antMatchers("/ipControll/**").access("isAuthenticated() and hasIpAddress('127.0.0.1')")// 特定ip必须登录才能获取
+				.anyRequest().authenticated().and().csrf().disable().exceptionHandling().accessDeniedHandler(baseAccessDeniedHandler)
+				.authenticationEntryPoint(baseAuthenticationEntryPoint).and();
+	}
+
+	@Override
+	public void configure(ResourceServerSecurityConfigurer resources) throws Exception {
+		resources.authenticationEntryPoint(baseAuthenticationEntryPoint).accessDeniedHandler(baseAccessDeniedHandler);
+	}
+
+}

+ 36 - 0
audio-analysis/src/main/java/com/yonge/audio/config/WebMvcConfig.java

@@ -0,0 +1,36 @@
+package com.yonge.audio.config;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.springframework.boot.autoconfigure.http.HttpMessageConverters;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.format.FormatterRegistry;
+import org.springframework.http.MediaType;
+import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
+
+import com.ym.mec.common.config.EnumConverterFactory;
+import com.ym.mec.common.config.LocalFastJsonHttpMessageConverter;
+
+@Configuration
+public class WebMvcConfig implements WebMvcConfigurer {
+	
+	/**
+	 * 枚举类的转换器 addConverterFactory
+	 */
+	@Override
+	public void addFormatters(FormatterRegistry registry) {
+		registry.addConverterFactory(new EnumConverterFactory());
+	}
+	
+	@Bean
+    public HttpMessageConverters fastJsonHttpMessageConverters(){
+		LocalFastJsonHttpMessageConverter converter = new LocalFastJsonHttpMessageConverter();
+        List<MediaType> fastMediaTypes =  new ArrayList<MediaType>();
+        fastMediaTypes.add(MediaType.APPLICATION_JSON_UTF8);
+        converter.setSupportedMediaTypes(fastMediaTypes);
+        return new HttpMessageConverters(converter);
+    }
+
+}

+ 98 - 0
audio-analysis/src/main/java/com/yonge/audio/utils/ArrayUtil.java

@@ -0,0 +1,98 @@
+package com.yonge.audio.utils;
+
+public class ArrayUtil {
+
+	/**
+	 * 合并2个数组
+	 * @param bt1
+	 * @param bt2
+	 * @return bt1 + bt2
+	 */
+	public static byte[] mergeByte(byte[] bt1, byte[] bt2) {
+		if (bt2.length == 0) {
+			return bt1;
+		}
+
+		byte[] bt3 = new byte[bt1.length + bt2.length];
+
+		if (bt1.length > 0) {
+			System.arraycopy(bt1, 0, bt3, 0, bt1.length);
+		}
+
+		if (bt2.length > 0) {
+			System.arraycopy(bt2, 0, bt3, bt1.length, bt2.length);
+		}
+
+		return bt3;
+	}
+
+	/**
+	 * 根据指定的起始、结束为止提取数组中的数据(起止都包含),并返回
+	 * @param src
+	 * @param startIndex
+	 * @param endIndex
+	 * @return
+	 */
+	public static byte[] extractByte(byte[] src, int startIndex, int endIndex) {
+
+		if (startIndex > endIndex) {
+			throw new RuntimeException("结束索引[" + endIndex + "]不能小于起始索引[" + startIndex + "]");
+		}
+
+		byte[] target = new byte[endIndex - startIndex + 1];
+		System.arraycopy(src, startIndex, target, 0, target.length);
+
+		return target;
+	}
+
+	/**
+	 * 合并2个数组
+	 * @param bt1
+	 * @param bt2
+	 * @return bt1 + bt2
+	 */
+	public static float[] mergeFloat(float[] bt1, float[] bt2) {
+		if (bt2.length == 0) {
+			return bt1;
+		}
+
+		float[] bt3 = new float[bt1.length + bt2.length];
+
+		if (bt1.length > 0) {
+			System.arraycopy(bt1, 0, bt3, 0, bt1.length);
+		}
+
+		if (bt2.length > 0) {
+			System.arraycopy(bt2, 0, bt3, bt1.length, bt2.length);
+		}
+
+		return bt3;
+	}
+
+	/**
+	 * 根据指定的起始、结束为止提取数组中的数据(起止都包含),并返回
+	 * @param src
+	 * @param startIndex
+	 * @param endIndex
+	 * @return
+	 */
+	public static float[] extractFloat(float[] src, int startIndex, int endIndex) {
+		if (startIndex > endIndex) {
+			throw new RuntimeException("结束索引[" + endIndex + "]不能小于起始索引[" + startIndex + "]");
+		}
+
+		float[] target = new float[endIndex - startIndex + 1];
+		System.arraycopy(src, startIndex, target, 0, target.length);
+
+		return target;
+	}
+
+	public static void main(String[] args) {
+		byte[] b1 = { 1, 2, 3, 4, 5 };
+		//byte[] b2 = { 3, 2, 1 };
+		byte[] r = extractByte(b1, 0, 4);
+		for (int i = 0; i < r.length; i++) {
+			System.out.println(r[i]);
+		}
+	}
+}

+ 42 - 0
audio-analysis/src/main/java/com/yonge/netty/common/message/Message.java

@@ -0,0 +1,42 @@
+package com.yonge.netty.common.message;
+
+/**
+ * 通信协议的消息体
+ */
+public class Message<T> {
+
+	/**
+	 * 类型
+	 */
+	private String type;
+
+	/**
+	 * 消息
+	 */
+	private T data;
+
+	// 空构造方法
+	public Message() {
+	}
+
+	public Message(String type, T data) {
+		this.type = type;
+		this.data = data;
+	}
+
+	public String getType() {
+		return type;
+	}
+
+	public void setType(String type) {
+		this.type = type;
+	}
+
+	public T getData() {
+		return data;
+	}
+
+	public void setData(T data) {
+		this.data = data;
+	}
+}

+ 34 - 0
audio-analysis/src/main/java/com/yonge/netty/common/message/MessageDispatcher.java

@@ -0,0 +1,34 @@
+package com.yonge.netty.common.message;
+
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.SimpleChannelInboundHandler;
+
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+import org.springframework.beans.factory.annotation.Autowired;
+
+public class MessageDispatcher extends SimpleChannelInboundHandler<Message<?>> {
+
+	@Autowired
+	private MessageHandlerContainer messageHandlerContainer;
+
+	private final ExecutorService executor = Executors.newFixedThreadPool(200);
+
+	@Override
+	protected void channelRead0(ChannelHandlerContext ctx, Message<?> message) {
+		// 获得 type 对应的 MessageHandler 处理器
+		MessageHandler messageHandler = messageHandlerContainer.getMessageHandler(message.getType());
+		// 获得 MessageHandler 处理器 的消息类
+		// Class<? extends Message> messageClass = MessageHandlerContainer.getMessageClass(messageHandler);
+		// 执行逻辑
+		executor.submit(new Runnable() {
+
+			@Override
+			public void run() {
+				messageHandler.execute(ctx.channel(), message.getData());
+			}
+
+		});
+	}
+}

+ 20 - 0
audio-analysis/src/main/java/com/yonge/netty/common/message/MessageHandler.java

@@ -0,0 +1,20 @@
+package com.yonge.netty.common.message;
+
+import io.netty.channel.Channel;
+
+public interface MessageHandler<T> {
+
+	/**
+	 * 执行处理消息
+	 *
+	 * @param channel 通道
+	 * @param message 消息
+	 */
+	void execute(Channel channel, T message);
+
+	/**
+	 * @return 消息类型,即每个 Message 实现类上的 TYPE 静态字段
+	 */
+	String getType();
+
+}

+ 89 - 0
audio-analysis/src/main/java/com/yonge/netty/common/message/MessageHandlerContainer.java

@@ -0,0 +1,89 @@
+package com.yonge.netty.common.message;
+
+import java.lang.reflect.ParameterizedType;
+import java.lang.reflect.Type;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Objects;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.aop.framework.AopProxyUtils;
+import org.springframework.beans.factory.InitializingBean;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.ApplicationContext;
+import org.springframework.stereotype.Component;
+
+//@Component
+public class MessageHandlerContainer implements InitializingBean {
+
+	private static final Logger LOGGER = LoggerFactory.getLogger(MessageHandlerContainer.class);
+
+	/**
+	 * 消息类型与 MessageHandler 的映射
+	 */
+	private final Map<String, MessageHandler<?>> handlers = new HashMap<String, MessageHandler<?>>();
+
+	@Autowired
+	private ApplicationContext applicationContext;
+
+	@Override
+	public void afterPropertiesSet() throws Exception {
+		// 通过 ApplicationContext 获得所有 MessageHandler Bean
+		applicationContext.getBeansOfType(MessageHandler.class).values() // 获得所有 MessageHandler Bean
+				.forEach(messageHandler -> handlers.put(messageHandler.getType(), messageHandler)); // 添加到 handlers 中
+		LOGGER.info("[afterPropertiesSet][消息处理器数量:{}]", handlers.size());
+	}
+
+	/**
+	 * 获得类型对应的 MessageHandler
+	 *
+	 * @param type 类型
+	 * @return MessageHandler
+	 */
+	MessageHandler<?> getMessageHandler(String type) {
+		MessageHandler<?> handler = handlers.get(type);
+		if (handler == null) {
+			throw new IllegalArgumentException(String.format("类型(%s) 找不到匹配的 MessageHandler 处理器", type));
+		}
+		return handler;
+	}
+
+	/**
+	 * 获得 MessageHandler 处理的消息类
+	 *
+	 * @param handler 处理器
+	 * @return 消息类
+	 */
+	static Class<? extends Message> getMessageClass(MessageHandler<?> handler) {
+		// 获得 Bean 对应的 Class 类名。因为有可能被 AOP 代理过。
+		Class<?> targetClass = AopProxyUtils.ultimateTargetClass(handler);
+		// 获得接口的 Type 数组
+		Type[] interfaces = targetClass.getGenericInterfaces();
+		Class<?> superclass = targetClass.getSuperclass();
+		while ((Objects.isNull(interfaces) || 0 == interfaces.length) && Objects.nonNull(superclass)) { // 此处,是以父类的接口为准
+			interfaces = superclass.getGenericInterfaces();
+			superclass = targetClass.getSuperclass();
+		}
+		if (Objects.nonNull(interfaces)) {
+			// 遍历 interfaces 数组
+			for (Type type : interfaces) {
+				// 要求 type 是泛型参数
+				if (type instanceof ParameterizedType) {
+					ParameterizedType parameterizedType = (ParameterizedType) type;
+					// 要求是 MessageHandler 接口
+					if (Objects.equals(parameterizedType.getRawType(), MessageHandler.class)) {
+						Type[] actualTypeArguments = parameterizedType.getActualTypeArguments();
+						// 取首个元素
+						if (Objects.nonNull(actualTypeArguments) && actualTypeArguments.length > 0) {
+							return (Class<Message>) actualTypeArguments[0];
+						} else {
+							throw new IllegalStateException(String.format("类型(%s) 获得不到消息类型", handler));
+						}
+					}
+				}
+			}
+		}
+		throw new IllegalStateException(String.format("类型(%s) 获得不到消息类型", handler));
+	}
+}

+ 100 - 0
audio-analysis/src/main/java/com/yonge/netty/dto/ChunkAnalysis.java

@@ -0,0 +1,100 @@
+package com.yonge.netty.dto;
+
+public class ChunkAnalysis {
+
+	private double startTime;
+
+	private double endTime;
+
+	private double durationTime;
+
+	private int frequency;
+
+	private int splDb;
+
+	private int power;
+	
+	private int amplitude;
+	
+	private boolean isPeak;
+
+	public ChunkAnalysis(double startTime, double endTime, int frequency, int splDb, int power, int amplitude) {
+		this.startTime = startTime;
+		this.endTime = endTime;
+		this.frequency = frequency;
+		this.splDb = splDb;
+		this.power = power;
+		this.amplitude = amplitude;
+		this.durationTime = endTime - startTime;
+	}
+
+	public ChunkAnalysis(int frequency, int splDb, int power) {
+		this.frequency = frequency;
+		this.splDb = splDb;
+		this.power = power;
+	}
+
+	public double getStartTime() {
+		return startTime;
+	}
+
+	public void setStartTime(double startTime) {
+		this.startTime = startTime;
+	}
+
+	public double getEndTime() {
+		return endTime;
+	}
+
+	public void setEndTime(double endTime) {
+		this.endTime = endTime;
+	}
+
+	public double getDurationTime() {
+		return durationTime;
+	}
+
+	public void setDurationTime(double durationTime) {
+		this.durationTime = durationTime;
+	}
+
+	public int getFrequency() {
+		return frequency;
+	}
+
+	public void setFrequency(int frequency) {
+		this.frequency = frequency;
+	}
+
+	public int getSplDb() {
+		return splDb;
+	}
+
+	public void setSplDb(int splDb) {
+		this.splDb = splDb;
+	}
+
+	public int getPower() {
+		return power;
+	}
+
+	public void setPower(int power) {
+		this.power = power;
+	}
+
+	public int getAmplitude() {
+		return amplitude;
+	}
+
+	public void setAmplitude(int amplitude) {
+		this.amplitude = amplitude;
+	}
+
+	public boolean isPeak() {
+		return isPeak;
+	}
+
+	public void setPeak(boolean isPeak) {
+		this.isPeak = isPeak;
+	}
+}

+ 134 - 0
audio-analysis/src/main/java/com/yonge/netty/dto/HardLevelEnum.java

@@ -0,0 +1,134 @@
+package com.yonge.netty.dto;
+
+import com.ym.mec.common.enums.BaseEnum;
+
+public enum HardLevelEnum implements BaseEnum<String, HardLevelEnum> {
+	/**
+	 * 入门级, 振幅阈值, 频率阈值 <br>
+	 * 节奏有效范围(1分音符), 节奏有效范围(2分音符), 节奏有效范围(4分音符), 节奏有效范围(8分音符), 节奏有效范围(16分音符), 节奏有效范围(32分音符)<br>
+	 * 完成度范围, 未演奏的范围
+	 */
+	//BEGINNER("入门级", 3, 5, 5, 5, 10, 10, 13, 15, 60, 10), 
+	BEGINNER("入门级", 3, 5, 10, 10, 15, 15, 22, 22, 75, 25), 
+	/**
+	 * 进阶级, 振幅阈值, 频率阈值 <br>
+	 * 节奏有效范围(1分音符), 节奏有效范围(2分音符), 节奏有效范围(4分音符), 节奏有效范围(8分音符), 节奏有效范围(16分音符), 节奏有效范围(32分音符)<br>
+	 * 完成度范围, 未演奏的范围
+	 */
+	ADVANCED("进阶级", 3, 5, 8, 8, 12, 12, 20, 20, 85, 15),
+	//ADVANCED("进阶级", 3, 5, 50, 50, 50, 50, 50, 5, 80, 10),
+	/**
+	 * 大师级, 振幅阈值, 频率阈值 <br>
+	 * 节奏有效范围(1分音符), 节奏有效范围(2分音符), 节奏有效范围(4分音符), 节奏有效范围(8分音符), 节奏有效范围(16分音符), 节奏有效范围(32分音符)<br>
+	 * 完成度范围, 未演奏的范围
+	 */
+	PERFORMER("大师级", 3, 3, 5, 5, 10, 10, 13, 15, 95, 10);
+
+	private String msg;
+
+	private int amplitudeThreshold;
+
+	private int frequencyThreshold;
+
+	private int tempoEffectiveRangeOf1;
+
+	private int tempoEffectiveRangeOf2;
+
+	private int tempoEffectiveRangeOf4;
+
+	private int tempoEffectiveRangeOf8;
+
+	private int tempoEffectiveRangeOf16;
+
+	private int tempoEffectiveRangeOf32;
+
+	private int integrityRange;
+
+	private int notPlayRange;
+
+	/**
+	 * 
+	 * @param msg
+	 * @param amplitudeThreshold 振幅阈值
+	 * @param frequencyThreshold 频率阈值
+	 * @param tempoEffectiveRangeOf1 节奏偏移量百分比(在当前范围内节奏才算正确)
+	 * @param tempoEffectiveRangeOf2 节奏偏移量百分比(在当前范围内节奏才算正确)
+	 * @param tempoEffectiveRangeOf4 节奏偏移量百分比(在当前范围内节奏才算正确)
+	 * @param tempoEffectiveRangeOf8 节奏偏移量百分比(在当前范围内节奏才算正确)
+	 * @param tempoEffectiveRangeOf16 节奏偏移量百分比(在当前范围内节奏才算正确)
+	 * @param tempoEffectiveRangeOf32 节奏偏移量百分比(在当前范围内节奏才算正确)
+	 * @param integrityRange 完成度范围
+	 * @param notPlayRange 未演奏的范围
+	 */
+	HardLevelEnum(String msg, int amplitudeThreshold, int frequencyThreshold, int tempoEffectiveRangeOf1, int tempoEffectiveRangeOf2,
+			int tempoEffectiveRangeOf4, int tempoEffectiveRangeOf8, int tempoEffectiveRangeOf16, int tempoEffectiveRangeOf32, int integrityRange,
+			int notPlayRange) {
+		this.msg = msg;
+		this.amplitudeThreshold = amplitudeThreshold;
+		this.frequencyThreshold = frequencyThreshold;
+		this.tempoEffectiveRangeOf1 = tempoEffectiveRangeOf1;
+		this.tempoEffectiveRangeOf2 = tempoEffectiveRangeOf2;
+		this.tempoEffectiveRangeOf4 = tempoEffectiveRangeOf4;
+		this.tempoEffectiveRangeOf8 = tempoEffectiveRangeOf8;
+		this.tempoEffectiveRangeOf16 = tempoEffectiveRangeOf16;
+		this.tempoEffectiveRangeOf32 = tempoEffectiveRangeOf32;
+		this.integrityRange = integrityRange;
+		this.notPlayRange = notPlayRange;
+	}
+
+	public String getMsg() {
+		return msg;
+	}
+
+	public int getAmplitudeThreshold() {
+		return amplitudeThreshold;
+	}
+
+	public int getFrequencyThreshold() {
+		return frequencyThreshold;
+	}
+
+	public int getTempoEffectiveRange(int denominator) {
+		
+		int tempoEffectiveRange = 0;
+		
+		switch (denominator) {
+		case 1:
+			tempoEffectiveRange = tempoEffectiveRangeOf1;
+			break;
+		case 2:
+			tempoEffectiveRange = tempoEffectiveRangeOf2;
+			break;
+		case 4:
+			tempoEffectiveRange = tempoEffectiveRangeOf4;
+			break;
+		case 8:
+			tempoEffectiveRange = tempoEffectiveRangeOf8;
+			break;
+		case 16:
+			tempoEffectiveRange = tempoEffectiveRangeOf16;
+			break;
+		case 32:
+			tempoEffectiveRange = tempoEffectiveRangeOf32;
+			break;
+
+		default:
+			break;
+		}
+		return tempoEffectiveRange;
+	}
+
+	public int getIntegrityRange() {
+		return integrityRange;
+	}
+
+	public int getNotPlayRange() {
+		return notPlayRange;
+	}
+
+	@Override
+	public String getCode() {
+		return this.name();
+	}
+
+}

+ 180 - 0
audio-analysis/src/main/java/com/yonge/netty/dto/NoteAnalysis.java

@@ -0,0 +1,180 @@
+package com.yonge.netty.dto;
+
+import com.ym.mec.common.enums.BaseEnum;
+
+public class NoteAnalysis {
+
+	public enum NoteErrorType implements BaseEnum<String, NoteErrorType> {
+		RIGHT("演奏正确"), CADENCE_WRONG("节奏错误"), INTONATION_WRONG("音准错误"), INTEGRITY_WRONG("完整度不足"), NOT_PLAY("未演奏");
+
+		private String msg;
+
+		NoteErrorType(String msg) {
+			this.msg = msg;
+		}
+
+		public String getMsg() {
+			return msg;
+		}
+
+		@Override
+		public String getCode() {
+			return this.name();
+		}
+	}
+
+	private int index;
+
+	private int sectionIndex;
+
+	private double startTime;
+
+	private double endTime;
+	
+	private double durationTime;
+
+	private int frequency;
+
+	private int playFrequency = -1;
+
+	private boolean tempo = true;
+
+	private NoteErrorType noteErrorType = NoteErrorType.RIGHT;
+
+	private int score;
+	
+	private int intonationScore;
+	
+	private int tempoScore;
+	
+	private int integrityScore;
+
+	private boolean ignore;
+	
+	public NoteAnalysis(int index, int sectionIndex, int frequency, double durationTime) {
+		this.durationTime = durationTime;
+		this.index = index;
+		this.sectionIndex = sectionIndex;
+		this.frequency = frequency;
+	}
+
+	public NoteAnalysis(double startTime, double endTime, int playFrequency) {
+		this.startTime = startTime;
+		this.endTime = endTime;
+		this.durationTime = endTime - startTime;
+		this.playFrequency = playFrequency;
+	}
+
+	public int getMusicalNotesIndex() {
+		return index;
+	}
+
+	public void setMusicalNotesIndex(int index) {
+		this.index = index;
+	}
+
+	public double getStartTime() {
+		return startTime;
+	}
+
+	public void setStartTime(double startTime) {
+		this.startTime = startTime;
+	}
+
+	public double getEndTime() {
+		return endTime;
+	}
+
+	public void setEndTime(double endTime) {
+		this.endTime = endTime;
+	}
+
+	public double getDurationTime() {
+		return durationTime;
+	}
+
+	public void setDurationTime(double durationTime) {
+		this.durationTime = durationTime;
+	}
+
+	public double getPlayFrequency() {
+		return playFrequency;
+	}
+
+	public void setPlayFrequency(int playFrequency) {
+		this.playFrequency = playFrequency;
+	}
+
+	public int getFrequency() {
+		return frequency;
+	}
+
+	public void setFrequency(int frequency) {
+		this.frequency = frequency;
+	}
+
+	public boolean isTempo() {
+		return tempo;
+	}
+
+	public void setTempo(boolean tempo) {
+		this.tempo = tempo;
+	}
+
+	public int getSectionIndex() {
+		return sectionIndex;
+	}
+
+	public void setSectionIndex(int sectionIndex) {
+		this.sectionIndex = sectionIndex;
+	}
+
+	public boolean isIgnore() {
+		return ignore;
+	}
+
+	public void setIgnore(boolean ignore) {
+		this.ignore = ignore;
+	}
+
+	public NoteErrorType getMusicalErrorType() {
+		return noteErrorType;
+	}
+
+	public void setMusicalErrorType(NoteErrorType noteErrorType) {
+		this.noteErrorType = noteErrorType;
+	}
+
+	public int getScore() {
+		return score;
+	}
+
+	public void setScore(int score) {
+		this.score = score;
+	}
+
+	public int getIntonationScore() {
+		return intonationScore;
+	}
+
+	public void setIntonationScore(int intonationScore) {
+		this.intonationScore = intonationScore;
+	}
+
+	public int getTempoScore() {
+		return tempoScore;
+	}
+
+	public void setTempoScore(int tempoScore) {
+		this.tempoScore = tempoScore;
+	}
+
+	public int getIntegrityScore() {
+		return integrityScore;
+	}
+
+	public void setIntegrityScore(int integrityScore) {
+		this.integrityScore = integrityScore;
+	}
+
+}

+ 78 - 0
audio-analysis/src/main/java/com/yonge/netty/dto/NoteFrequencyRange.java

@@ -0,0 +1,78 @@
+package com.yonge.netty.dto;
+
+import java.math.BigDecimal;
+
+/**
+ * 一个音符的频率范围,包含最大值和最小值
+ */
+public class NoteFrequencyRange {
+
+	private double minFrequency;
+
+	private double maxFrequency;
+
+	public NoteFrequencyRange(double standardFrequecy, double frequency) {
+		int midiNoteSize = 128;
+		double[] midiNoteFrequencies = new double[midiNoteSize];
+
+		for (int x = 0; x < midiNoteSize; ++x) {
+			midiNoteFrequencies[x] = new BigDecimal(standardFrequecy).multiply(
+					new BigDecimal(Math.pow(2, new BigDecimal(x - 69).divide(new BigDecimal(12), 6, BigDecimal.ROUND_HALF_UP).doubleValue()))).doubleValue();
+
+			if(frequency <= 0){
+				continue;
+			}
+			
+			if (midiNoteFrequencies[x] >= frequency) {
+				if (midiNoteFrequencies[x] - frequency > frequency - midiNoteFrequencies[x - 1]) {
+					// frequency演奏的是上一个音符
+					maxFrequency = midiNoteFrequencies[x - 1] + (midiNoteFrequencies[x] - midiNoteFrequencies[x - 1]) / 2;
+					minFrequency = midiNoteFrequencies[x - 1] - (midiNoteFrequencies[x - 1] - midiNoteFrequencies[x - 2]) / 2;
+				} else {
+					// frequency演奏的是当前音符
+					midiNoteFrequencies[x + 1] = new BigDecimal(standardFrequecy).multiply(
+							new BigDecimal(Math.pow(2, new BigDecimal((x + 1) - 69).divide(new BigDecimal(12), 6, BigDecimal.ROUND_HALF_UP).doubleValue())))
+							.doubleValue();
+					maxFrequency = midiNoteFrequencies[x] + (midiNoteFrequencies[x + 1] - midiNoteFrequencies[x]) / 2;
+					minFrequency = midiNoteFrequencies[x] - (midiNoteFrequencies[x] - midiNoteFrequencies[x - 1]) / 2;
+				}
+				break;
+			}
+		}
+	}
+
+	public NoteFrequencyRange(double frequency) {
+		new NoteFrequencyRange(442, frequency);
+	}
+
+	public double getMinFrequency() {
+		return minFrequency;
+	}
+
+	public void setMinFrequency(double minFrequency) {
+		this.minFrequency = minFrequency;
+	}
+
+	public double getMaxFrequency() {
+		return maxFrequency;
+	}
+
+	public void setMaxFrequency(double maxFrequency) {
+		this.maxFrequency = maxFrequency;
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if (obj instanceof NoteFrequencyRange) {
+			NoteFrequencyRange nfr = (NoteFrequencyRange) obj;
+			return this.minFrequency == nfr.minFrequency && this.maxFrequency == nfr.maxFrequency;
+		}
+		return false;
+	}
+	
+	public static void main(String[] args) {
+		NoteFrequencyRange nfr = new NoteFrequencyRange(442,442);
+		System.out.println(nfr.getMinFrequency() + "-"+ nfr.getMaxFrequency());
+	}
+
+}

+ 28 - 0
audio-analysis/src/main/java/com/yonge/netty/dto/NotePlayResult.java

@@ -0,0 +1,28 @@
+package com.yonge.netty.dto;
+
+public class NotePlayResult {
+
+	private boolean status;
+	
+	private double migrationRate;
+	
+	public NotePlayResult() {
+		// TODO Auto-generated constructor stub
+	}
+
+	public boolean getStatus() {
+		return status;
+	}
+
+	public void setStatus(boolean status) {
+		this.status = status;
+	}
+
+	public double getMigrationRate() {
+		return migrationRate;
+	}
+
+	public void setMigrationRate(double migrationRate) {
+		this.migrationRate = migrationRate;
+	}
+}

+ 78 - 0
audio-analysis/src/main/java/com/yonge/netty/dto/SectionAnalysis.java

@@ -0,0 +1,78 @@
+package com.yonge.netty.dto;
+
+import org.apache.commons.lang3.builder.ToStringBuilder;
+
+public class SectionAnalysis {
+
+	// 小节下标
+	private int measureIndex;
+
+	// 音符数
+	private int noteNum;
+
+	// 持续时长
+	private double durationTime;
+
+	// 得分
+	private float score;
+	
+	private boolean isIngore;
+	
+	public SectionAnalysis() {
+		// TODO Auto-generated constructor stub
+	}
+
+	public SectionAnalysis(int index, int noteNum, float durationTime, float score, boolean isIngore) {
+		this.measureIndex = index;
+		this.noteNum = noteNum;
+		this.durationTime = durationTime;
+		this.score = score;
+		this.isIngore = isIngore;
+	}
+
+	public int getIndex() {
+		return measureIndex;
+	}
+
+	public void setIndex(int measureIndex) {
+		this.measureIndex = measureIndex;
+	}
+
+	public int getNoteNum() {
+		return noteNum;
+	}
+
+	public void setNoteNum(int noteNum) {
+		this.noteNum = noteNum;
+	}
+
+	public double getDurationTime() {
+		return durationTime;
+	}
+
+	public void setDurationTime(double durationTime) {
+		this.durationTime = durationTime;
+	}
+
+	public float getScore() {
+		return score;
+	}
+
+	public void setScore(float score) {
+		this.score = score;
+	}
+	
+	public boolean isIngore() {
+		return isIngore;
+	}
+
+	public void setIsIngore(boolean isIngore) {
+		this.isIngore = isIngore;
+	}
+
+	@Override
+	public String toString() {
+		return ToStringBuilder.reflectionToString(this);
+	}
+
+}

+ 940 - 0
audio-analysis/src/main/java/com/yonge/netty/dto/UserChannelContext.java

@@ -0,0 +1,940 @@
+package com.yonge.netty.dto;
+
+import java.math.BigDecimal;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.stream.Collectors;
+
+import javax.sound.sampled.AudioFormat;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import be.tarsos.dsp.pitch.FastYin;
+
+import com.yonge.audio.analysis.Signals;
+import com.yonge.audio.analysis.detector.YINPitchDetector;
+import com.yonge.netty.dto.NoteAnalysis.NoteErrorType;
+import com.yonge.netty.entity.MusicXmlBasicInfo;
+import com.yonge.netty.entity.MusicXmlNote;
+import com.yonge.netty.entity.MusicXmlSection;
+import com.yonge.netty.server.processor.WaveformWriter;
+
+/**
+ * 用户通道上下文
+ */
+public class UserChannelContext {
+	
+	private final static Logger LOGGER = LoggerFactory.getLogger(UserChannelContext3.class);
+	
+	//打击乐
+	private final static List<Integer> percussionList = Arrays.asList(23, 113);
+	
+	private final static int MIN_FREQUECY = 100;
+	
+	private final static int MAX_FREQUECY = 2000;
+	
+	private FastYin detector;
+	
+	private String user;
+	
+	private double standardFrequecy = 442;
+	
+	private float offsetMS;
+	
+	private double dynamicOffset;
+	
+	private String platform;
+	
+	private Long recordId;
+	
+	private Integer subjectId;
+	
+	private float beatDuration;
+	
+	private boolean delayProcessed;
+	
+	// 曲目与musicxml对应关系
+	private ConcurrentHashMap<Integer, MusicXmlBasicInfo> songMusicXmlMap = new ConcurrentHashMap<Integer, MusicXmlBasicInfo>();
+
+	private WaveformWriter waveFileProcessor;
+
+	private NoteAnalysis processingNote = new NoteAnalysis(0, 0, -1);
+	
+	private AtomicInteger evaluatingSectionIndex = new AtomicInteger(0);
+	
+	private List<NoteAnalysis> doneNoteAnalysisList = new ArrayList<NoteAnalysis>();
+	
+	private List<SectionAnalysis> doneSectionAnalysisList = new ArrayList<SectionAnalysis>();
+	
+	private List<ChunkAnalysis> totalChunkAnalysisList = new ArrayList<ChunkAnalysis>();
+	
+	private byte[] channelBufferBytes = new byte[0];
+	
+	private double playTime;
+	
+	private HardLevelEnum hardLevel = HardLevelEnum.ADVANCED;
+	
+	private boolean handlerSwitch;
+	
+	private NotePlayResult queryNoteFrequency(MusicXmlNote xmlNote, double playFrequency) {
+
+		NotePlayResult result = new NotePlayResult();
+
+		boolean status = false;
+		double migrationRate = 0;
+
+		if (Math.round(xmlNote.getFrequency()) == Math.round(playFrequency)) {
+			status = true;
+			migrationRate = 0;
+		} else {
+			NoteFrequencyRange noteFrequencyRange = new NoteFrequencyRange(standardFrequecy, xmlNote.getFrequency());
+
+			if (noteFrequencyRange.getMinFrequency() > playFrequency || playFrequency > noteFrequencyRange.getMaxFrequency()) {
+				status = false;
+			} else {
+
+				status = true;
+
+				if (Math.round(playFrequency) < Math.round(xmlNote.getFrequency())) {
+					double min = Math.abs(xmlNote.getFrequency() - noteFrequencyRange.getMinFrequency()) / 2;
+					migrationRate = Math.abs(playFrequency - xmlNote.getFrequency()) / min;
+				} else {
+					double max = Math.abs(xmlNote.getFrequency() - noteFrequencyRange.getMaxFrequency()) / 2;
+					migrationRate = Math.abs(playFrequency - xmlNote.getFrequency()) / max;
+				}
+			}
+		}
+
+		result.setStatus(status);
+		result.setMigrationRate(migrationRate);
+
+		return result;
+	}
+	
+	public void init(String platform, String heardLevel, int subjectId, float beatDuration,float sampleRate, int bufferSize) {
+		this.platform = platform;
+		this.subjectId = subjectId;
+		this.beatDuration = beatDuration;
+		hardLevel = HardLevelEnum.valueOf(heardLevel);
+		if(detector == null){
+			detector = new FastYin(sampleRate, bufferSize);
+		}
+	}
+	
+	public void setUser(String user) {
+		this.user = user;
+	}
+
+	public Long getRecordId() {
+		return recordId;
+	}
+
+	public void setRecordId(Long recordId) {
+		this.recordId = recordId;
+	}
+
+	public boolean getHandlerSwitch() {
+		return handlerSwitch;
+	}
+
+	public void setHandlerSwitch(boolean handlerSwitch) {
+		this.handlerSwitch = handlerSwitch;
+	}
+
+	public float getOffsetMS() {
+		return offsetMS;
+	}
+
+	public void setOffsetMS(float offsetMS) {
+		this.offsetMS = offsetMS;
+	}
+
+	public float getBeatDuration() {
+		return beatDuration;
+	}
+
+	public void setBeatDuration(float beatDuration) {
+		this.beatDuration = beatDuration;
+	}
+
+	public HardLevelEnum getHardLevel() {
+		return hardLevel;
+	}
+
+	public ConcurrentHashMap<Integer, MusicXmlBasicInfo> getSongMusicXmlMap() {
+		return songMusicXmlMap;
+	}
+
+	public WaveformWriter getWaveFileProcessor() {
+		return waveFileProcessor;
+	}
+
+	public void setWaveFileProcessor(WaveformWriter waveFileProcessor) {
+		this.waveFileProcessor = waveFileProcessor;
+	}
+
+	public NoteAnalysis getProcessingNote() {
+		return processingNote;
+	}
+
+	public void setProcessingNote(NoteAnalysis processingNote) {
+		this.processingNote = processingNote;
+	}
+	
+	public List<SectionAnalysis> getDoneSectionAnalysisList() {
+		return doneSectionAnalysisList;
+	}
+
+	public List<NoteAnalysis> getDoneNoteAnalysisList() {
+		return doneNoteAnalysisList;
+	}
+
+	public void resetUserInfo() {
+		waveFileProcessor = null;
+		processingNote = new NoteAnalysis(0,0,-1);
+		evaluatingSectionIndex = new AtomicInteger(0);
+		channelBufferBytes = new byte[0];
+		doneNoteAnalysisList = new ArrayList<NoteAnalysis>();
+		doneSectionAnalysisList = new ArrayList<SectionAnalysis>();
+		totalChunkAnalysisList = new ArrayList<ChunkAnalysis>();
+		recordId = null;
+		playTime = 0;
+		delayProcessed = false;
+		offsetMS = 0;
+		dynamicOffset = 0;
+		handlerSwitch = false;
+	}
+	
+	public MusicXmlBasicInfo getMusicXmlBasicInfo(Integer songId){
+		MusicXmlBasicInfo musicXmlBasicInfo = null;
+		if (songId == null) {
+			musicXmlBasicInfo = songMusicXmlMap.values().stream().findFirst().get();
+		} else {
+			musicXmlBasicInfo = songMusicXmlMap.get(songId);
+		}
+		return musicXmlBasicInfo;
+	}
+	
+	public MusicXmlSection getCurrentMusicSection(Integer songId, int sectionIndex){
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+		return musicXmlBasicInfo.getMusicXmlSectionMap().get(sectionIndex);
+	}
+
+	public MusicXmlNote getCurrentMusicNote(Integer songId, Integer noteIndex) {
+		if (songMusicXmlMap.size() == 0) {
+			return null;
+		}
+		if(noteIndex == null){
+			noteIndex = processingNote.getMusicalNotesIndex();
+		}
+		final int index = noteIndex;
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		int totalNoteIndex = getTotalMusicNoteIndex(null);
+		if (musicXmlBasicInfo != null && index <= totalNoteIndex) {
+			return musicXmlBasicInfo.getMusicXmlInfos().stream().filter(t -> t.getMusicalNotesIndex() == index).findFirst().get();
+		}
+
+		return null;
+	}
+
+	public int getTotalMusicNoteIndex(Integer songId) {
+		if (songMusicXmlMap.size() == 0) {
+			return -1;
+		}
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null) {
+			return musicXmlBasicInfo.getMusicXmlInfos().stream().map(t -> t.getMusicalNotesIndex()).distinct().max(Integer::compareTo).get();
+		}
+
+		return -1;
+	}
+
+	public List<MusicXmlNote> getCurrentMusicSection(Integer songId, Integer sectionIndex) {
+		if (songMusicXmlMap.size() == 0) {
+			return null;
+		}
+		if(sectionIndex == null){
+			sectionIndex = processingNote.getSectionIndex();
+		}
+		final int index = sectionIndex;
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null) {
+			return musicXmlBasicInfo.getMusicXmlInfos().stream().filter(t -> t.getMusicalNotesIndex() == index)
+					.sorted(Comparator.comparing(MusicXmlNote::getMusicalNotesIndex)).collect(Collectors.toList());
+		}
+
+		return null;
+	}
+
+	public int getTotalMusicSectionSize(Integer songId) {
+		if (songMusicXmlMap.size() == 0) {
+			return -1;
+		}
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null) {
+			return (int) musicXmlBasicInfo.getMusicXmlInfos().stream().map(t -> t.getMeasureIndex()).distinct().count();
+		}
+
+		return -1;
+	}
+	
+	public int getMusicSectionIndex(Integer songId, int musicXmlNoteIndex) {
+		if (songMusicXmlMap.size() == 0) {
+			return -1;
+		}
+		
+		if(getTotalMusicNoteIndex(null) < musicXmlNoteIndex){
+			return -1;
+		}
+		
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null) {
+			return musicXmlBasicInfo.getMusicXmlInfos().stream().filter(t -> t.getMusicalNotesIndex() == musicXmlNoteIndex).findFirst().get().getMeasureIndex();
+		}
+
+		return -1;
+	}
+	
+	public byte[] getChannelBufferBytes() {
+		return channelBufferBytes;
+	}
+
+	public void setChannelBufferBytes(byte[] channelBufferBytes) {
+		this.channelBufferBytes = channelBufferBytes;
+	}
+
+	public AtomicInteger getEvaluatingSectionIndex() {
+		return evaluatingSectionIndex;
+	}
+
+	public void handle(float[] samples, AudioFormat audioFormat){
+		
+		//YINPitchDetector frequencyDetector = new YINPitchDetector(samples.length , audioFormat.getSampleRate());
+		//int playFrequency = (int) frequencyDetector.getFrequency(samples);
+		
+		int playFrequency = -1;
+		if(!percussionList.contains(subjectId)){
+			playFrequency = (int)detector.getPitch(samples).getPitch();
+		}
+		
+		int splDb = (int) Signals.soundPressureLevel(samples);
+		int power = (int) Signals.power(samples);
+		int amplitude = (int) Signals.norm(samples);
+		//float rms = Signals.rms(samples);
+		
+		double durationTime = 1000 * (samples.length * 2) / audioFormat.getSampleRate() / (audioFormat.getSampleSizeInBits() / 8);
+		
+		playTime += durationTime;
+		
+		// 获取当前音符信息
+		MusicXmlNote musicXmlNote = getCurrentMusicNote(null,null);
+
+		if (musicXmlNote == null) {
+			return;
+		}
+		
+		//取出当前处理中的音符信息
+		NoteAnalysis noteAnalysis = getProcessingNote();
+		if(noteAnalysis == null || noteAnalysis.getDurationTime() == 0) {
+			noteAnalysis = new NoteAnalysis(musicXmlNote.getMusicalNotesIndex(), musicXmlNote.getMeasureIndex(), (int)musicXmlNote.getFrequency(), musicXmlNote.getDuration());
+		}
+		
+		evaluatingSectionIndex.set(noteAnalysis.getSectionIndex());
+		
+		if (noteAnalysis.getMusicalNotesIndex() >= 0 && noteAnalysis.getMusicalNotesIndex() <= getTotalMusicNoteIndex(null)) {
+			
+			LOGGER.info("user:{}  delayProcessed:{}  dynamicOffset:{}  Frequency:{}  splDb:{}  amplitude:{}  time:{}", user, delayProcessed, dynamicOffset, playFrequency, splDb, amplitude, playTime);
+			
+			ChunkAnalysis chunkAnalysis = new ChunkAnalysis(playTime - durationTime, playTime, playFrequency, splDb, power, amplitude);
+			
+			if(totalChunkAnalysisList.size() > 0){
+				if(totalChunkAnalysisList.get(totalChunkAnalysisList.size() - 1).getAmplitude() + 2 < chunkAnalysis.getAmplitude()){
+					chunkAnalysis.setPeak(true);//只针对打击乐
+				}
+			}
+			totalChunkAnalysisList.add(chunkAnalysis);
+			
+			boolean flag = false;
+			if(percussionList.contains(subjectId)){
+				flag = chunkAnalysis.getAmplitude() > hardLevel.getAmplitudeThreshold();
+			}else{
+				flag = chunkAnalysis.getFrequency() > MIN_FREQUECY && chunkAnalysis.getFrequency() < MAX_FREQUECY;
+			}
+			
+			if(delayProcessed == false && flag){
+				
+				delayProcessed = true;
+				
+				//计算延迟偏移值
+				//playTime = musicXmlNote.getTimeStamp() + durationTime;
+				dynamicOffset = chunkAnalysis.getStartTime() - musicXmlNote.getTimeStamp();
+				if(100 * dynamicOffset / musicXmlNote.getDuration() > (100 - hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()))){
+					dynamicOffset = 0;
+				}
+			}
+			
+			if (playTime >= (musicXmlNote.getDuration() + musicXmlNote.getTimeStamp() + dynamicOffset)) {
+
+				if (musicXmlNote.getDontEvaluating()) {
+					noteAnalysis.setIgnore(true);
+				}
+				
+				//判断节奏(音符持续时间内有不间断的音高,就节奏正确)
+				boolean tempo = true;
+				if (percussionList.contains(subjectId)) {
+					noteAnalysis.setPlayFrequency(-1);
+					tempo = computeTempoWithAmplitude2(musicXmlNote);
+				}else{
+					noteAnalysis.setPlayFrequency(computeFrequency(musicXmlNote));
+					tempo = computeTempoWithFrequency(musicXmlNote);
+				}
+				
+				noteAnalysis.setTempo(tempo);
+				
+				evaluateForNote(musicXmlNote, noteAnalysis);
+
+				LOGGER.info("当前音符下标[{}] 预计频率:{} 实际频率:{} 节奏:{}", noteAnalysis.getMusicalNotesIndex(), musicXmlNote.getFrequency(), noteAnalysis.getPlayFrequency(),
+						noteAnalysis.isTempo());
+				
+				doneNoteAnalysisList.add(noteAnalysis);
+				
+				// 准备处理下一个音符
+				int nextNoteIndex = musicXmlNote.getMusicalNotesIndex() + 1;
+				float nextNoteFrequence = -1;
+				double standDuration = 0;
+				MusicXmlNote nextMusicXmlNote = getCurrentMusicNote(null, nextNoteIndex);
+				if(nextMusicXmlNote != null){
+					nextNoteFrequence = nextMusicXmlNote.getFrequency();
+					standDuration = nextMusicXmlNote.getDuration();
+				}
+				
+				NoteAnalysis nextNoteAnalysis = new NoteAnalysis(nextNoteIndex, getMusicSectionIndex(null, nextNoteIndex), (int)nextNoteFrequence, standDuration);
+
+				noteAnalysis = nextNoteAnalysis;
+
+			}
+
+			setProcessingNote(noteAnalysis);
+		}
+		
+	}
+	
+
+	public int evaluateForSection(int sectionIndex, int subjectId){
+
+		int score = -1;
+		if(doneSectionAnalysisList.size() >= getTotalMusicSectionSize(null)){
+			return score;
+		}
+		
+		//取出当前小节的所有音符
+		List<NoteAnalysis> noteAnalysisList = doneNoteAnalysisList.stream().filter(t -> t.getSectionIndex() == sectionIndex).collect(Collectors.toList());
+		
+		long ignoreSize = noteAnalysisList.stream().filter(t -> t.isIgnore()).count();
+
+		SectionAnalysis sectionAnalysis = new SectionAnalysis();
+		sectionAnalysis.setIndex(sectionIndex);
+		sectionAnalysis.setNoteNum(noteAnalysisList.size());
+		sectionAnalysis.setIsIngore(ignoreSize == noteAnalysisList.size());
+		
+		//判断是否需要评分
+		MusicXmlSection musicXmlSection = getCurrentMusicSection(null, sectionIndex);
+		if(noteAnalysisList.size() == musicXmlSection.getNoteNum()){
+			//取出需要评测的音符
+			List<NoteAnalysis>  noteList = noteAnalysisList.stream().filter(t -> t.isIgnore() == false).collect(Collectors.toList());
+			
+			if(noteList != null && noteList.size() > 0){
+				score = noteList.stream().mapToInt(t -> t.getScore()).sum() / noteList.size();
+			}
+			sectionAnalysis.setDurationTime(noteAnalysisList.stream().mapToDouble(t -> t.getDurationTime()).sum());
+			sectionAnalysis.setScore(score);
+
+			LOGGER.info("小节评分:{}",sectionAnalysis);
+			doneSectionAnalysisList.add(sectionAnalysis);
+		}
+		
+		return score;
+	}
+	
+	public Map<String, Integer> evaluateForMusic() {
+
+		Map<String, Integer> result = new HashMap<String, Integer>();
+		
+		result.put("playTime", (int) doneNoteAnalysisList.stream().mapToDouble(t -> t.getDurationTime()).sum());
+		
+		// 取出需要评测的音符
+		List<NoteAnalysis> noteAnalysisList = doneNoteAnalysisList.stream().filter(t -> t.isIgnore() == false).collect(Collectors.toList());
+
+		if (noteAnalysisList != null && noteAnalysisList.size() > 0) {
+			int intonationScore = 0;
+			int tempoScore = 0;
+			int integrityScore = 0;
+			int socre = 0;
+
+			for (NoteAnalysis note : noteAnalysisList) {
+				intonationScore += note.getIntonationScore();
+				tempoScore += note.getTempoScore();
+				integrityScore += note.getIntegrityScore();
+				socre += note.getScore();
+			}
+
+			tempoScore = tempoScore / noteAnalysisList.size();
+			intonationScore = intonationScore / noteAnalysisList.size();
+			integrityScore = integrityScore / noteAnalysisList.size();
+
+			result.put("cadence", tempoScore);
+			result.put("intonation", intonationScore);
+			result.put("integrity", integrityScore);
+	        result.put("recordId", recordId.intValue());
+
+			int score = socre / noteAnalysisList.size();
+
+			// 平均得分
+			if (getMusicXmlBasicInfo(null).getSubjectId() == 23 || getMusicXmlBasicInfo(null).getSubjectId() == 113) {
+				score = tempoScore;
+			}
+			result.put("score", score);
+		}
+		return result;
+	}
+	
+
+	public void evaluateForNote(MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis) {
+		
+		double floatingRange = musicXmlNote.getDuration() * hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) / 100;
+		
+		double endTime = musicXmlNote.getTimeStamp() + dynamicOffset + floatingRange;
+		double startTime = musicXmlNote.getTimeStamp() + dynamicOffset - floatingRange;
+		
+		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
+		
+		double correctedStartTime = queryFirstNoteStartTime(chunkAnalysisList, musicXmlNote);
+		double correctedEndTime = correctedStartTime + musicXmlNote.getDuration();
+		
+		chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(correctedStartTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(correctedEndTime)).collect(Collectors.toList());
+		
+		double durationTime = chunkAnalysisList.get(chunkAnalysisList.size() - 1).getEndTime() - chunkAnalysisList.get(0).getStartTime();
+		
+		double playDurationTime = 0;
+		
+		if (percussionList.contains(subjectId)) {
+			if (noteAnalysis.getFrequency() == -1) {// 休止符
+				if (!noteAnalysis.isTempo()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				} else {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				}
+			}else{
+				int beatTimes = (int) chunkAnalysisList.stream().filter(t -> t.getAmplitude() > hardLevel.getAmplitudeThreshold()).count();
+				if(beatTimes == 0){
+					noteAnalysis.setMusicalErrorType(NoteErrorType.NOT_PLAY);
+				}else if (!noteAnalysis.isTempo()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				} else {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				}
+			}
+		} else {
+			
+			NotePlayResult notePlayResult = queryNoteFrequency(musicXmlNote, noteAnalysis.getPlayFrequency());
+			
+			if (noteAnalysis.getFrequency() == -1) {// 休止符
+
+				playDurationTime = chunkAnalysisList.stream().filter(t -> t.getFrequency() <= MIN_FREQUECY).mapToDouble(t -> t.getDurationTime()).sum();
+
+				if (!noteAnalysis.isTempo()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				} else if (playDurationTime * 100 / durationTime < hardLevel.getIntegrityRange()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.INTEGRITY_WRONG);
+				} else if (notePlayResult.getStatus() == false) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.INTONATION_WRONG);
+				} else {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				}
+			} else {
+				playDurationTime = chunkAnalysisList.stream().filter(t -> t.getFrequency() > MIN_FREQUECY && t.getFrequency() < MAX_FREQUECY)
+						.mapToDouble(t -> t.getDurationTime()).sum();
+
+				if (playDurationTime * 100 / durationTime < hardLevel.getNotPlayRange()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.NOT_PLAY);
+					LOGGER.info("未演奏:{}", playDurationTime * 100 / durationTime);
+				} else if (playDurationTime * 100 / durationTime < hardLevel.getIntegrityRange()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.INTEGRITY_WRONG);
+					LOGGER.info("完整度不足:{}", playDurationTime * 100 / durationTime);
+				} else if (!noteAnalysis.isTempo()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				} else if (notePlayResult.getStatus() == false) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.INTONATION_WRONG);
+				} else {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				}
+			}
+			
+		}
+
+		// 计算音分
+		int tempoScore = 0;
+		int integrityScore = 0;
+		int intonationScore = 100 - new BigDecimal(Math.abs(YINPitchDetector.hertzToAbsoluteCent(noteAnalysis.getPlayFrequency())
+				- YINPitchDetector.hertzToAbsoluteCent(noteAnalysis.getFrequency()))).multiply(new BigDecimal(20)).divide(new BigDecimal(17), BigDecimal.ROUND_UP)
+				.setScale(0, BigDecimal.ROUND_UP).intValue();
+		if (intonationScore < 0) {
+			intonationScore = 0;
+		} else if (intonationScore > 100) {
+			intonationScore = 100;
+		}
+
+		if (noteAnalysis.getMusicalErrorType() == NoteErrorType.NOT_PLAY) {
+			intonationScore = 0;
+		} else {
+
+			if (noteAnalysis.isTempo()) {
+				tempoScore = 100;
+				noteAnalysis.setTempoScore(tempoScore);
+			}
+
+			integrityScore = (int) (playDurationTime * 100 * 100 / hardLevel.getIntegrityRange() / durationTime);
+			if (integrityScore > 100) {
+				integrityScore = 100;
+			}
+			noteAnalysis.setIntegrityScore(integrityScore);
+		}
+		noteAnalysis.setIntonationScore(intonationScore);
+		if (percussionList.contains(subjectId)) {
+			noteAnalysis.setScore(tempoScore);
+		} else {
+			noteAnalysis.setScore(new BigDecimal(intonationScore + tempoScore + integrityScore).divide(new BigDecimal(3), 2).setScale(0, BigDecimal.ROUND_UP)
+					.intValue());
+		}
+	}
+	
+	private int computeFrequency(MusicXmlNote musicXmlNote) {
+		
+		double floatingRange = musicXmlNote.getDuration() * hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) / 100;
+		
+		double endTime = musicXmlNote.getTimeStamp() + dynamicOffset + floatingRange;
+		double startTime = musicXmlNote.getTimeStamp() + dynamicOffset - floatingRange;
+		
+		LOGGER.info("------------TimeStamp:{}  Duration:{}  floatingRange:{}  StartTime:{}  EndTime:{}------------", musicXmlNote.getTimeStamp(), musicXmlNote.getDuration(), floatingRange, startTime, endTime);
+		
+		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
+		
+		double correctedStartTime = queryFirstNoteStartTime(chunkAnalysisList, musicXmlNote);
+		double correctedEndTime = correctedStartTime + musicXmlNote.getDuration();
+		
+		chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(correctedStartTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(correctedEndTime)).collect(Collectors.toList());
+
+		LOGGER.info("------------ correctedStartTime:{}  correctedEndTime:{}------------", correctedStartTime, correctedEndTime);
+		
+		//根据完整度取部分有效信号
+		int elementSize = chunkAnalysisList.size() * hardLevel.getIntegrityRange() / 100;
+		chunkAnalysisList = chunkAnalysisList.subList(0, elementSize);
+		if(chunkAnalysisList == null || chunkAnalysisList.size() == 0){
+			return -1;
+		}
+		
+		ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
+		
+		LOGGER.info("-------startTime:{}  endTime:{}------", firstChunkAnalysis.getStartTime(), chunkAnalysisList.get(chunkAnalysisList.size() - 1)
+				.getEndTime());
+		
+		List<ChunkAnalysis> chunkList = new ArrayList<ChunkAnalysis>(chunkAnalysisList);
+		
+		List<Integer> chunkFrequencyList = chunkList.stream().map(t -> t.getFrequency()).filter(t -> t.doubleValue() > MIN_FREQUECY && t.doubleValue() < MAX_FREQUECY)
+				.collect(Collectors.toList());
+		
+		if (chunkFrequencyList.size() == 0) {
+			return -1;
+		}
+
+		int frequency = (int) (chunkFrequencyList.stream().mapToInt(t -> t).sum() / chunkFrequencyList.size());
+
+		return frequency;
+	}
+	
+	/**
+	 * 时值范围内有且只有一个音,且不能间断,且在合理范围内需开始演奏
+	 * 与上一个音相同时,2个音之间需要间断
+	 * @param musicXmlNote
+	 * @return
+	 */
+	private boolean computeTempoWithFrequency(MusicXmlNote musicXmlNote){
+		
+		double floatingRange = musicXmlNote.getDuration() * hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) / 100;
+		
+		double endTime = musicXmlNote.getTimeStamp() + dynamicOffset + floatingRange;
+		double startTime = musicXmlNote.getTimeStamp() + dynamicOffset - floatingRange;
+		
+		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
+		
+		double correctedStartTime = queryFirstNoteStartTime(chunkAnalysisList, musicXmlNote);
+		double correctedEndTime = correctedStartTime + musicXmlNote.getDuration();
+		
+		chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(correctedStartTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(correctedEndTime)).collect(Collectors.toList());
+		
+		//根据完整度取部分有效信号
+		int elementSize = chunkAnalysisList.size() * hardLevel.getIntegrityRange() / 100;
+		List<ChunkAnalysis> chunkList = chunkAnalysisList.subList(0, elementSize);
+		
+		if(chunkList == null || chunkList.size() == 0){
+			return false;
+		}
+		
+		if (musicXmlNote.getFrequency() == -1) {// 休止符
+			return chunkList.stream().filter(t -> t.getFrequency() > MIN_FREQUECY).count() <= 1;
+		}
+		
+		ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
+		
+		Optional<ChunkAnalysis> chunkAnalysisOptional = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getEndTime()) < Double.doubleToLongBits(firstChunkAnalysis.getStartTime())).findFirst();
+
+		ChunkAnalysis lastChunkAnalysis = null;
+		if (chunkAnalysisOptional.isPresent()) {
+			lastChunkAnalysis = chunkAnalysisOptional.get();
+		}
+		
+		if(lastChunkAnalysis == null){
+			lastChunkAnalysis = new ChunkAnalysis(0, 0, -1, 0, 0, 0);
+		}
+		
+		/*List<ChunkAnalysis> chunkList = new ArrayList<ChunkAnalysis>(chunkAnalysisList);
+		
+		if(chunkList.size() == 0){
+			return false;
+		}*/
+		
+		NoteFrequencyRange noteFrequencyRange = null;
+		ChunkAnalysis chunkAnalysis = null;
+		boolean tempo = false;
+		boolean isContinue = true;
+		int unplayedSize = 0;
+		int firstPeakIndex = -1;
+		for (int i = 0; i < chunkList.size(); i++) {
+			chunkAnalysis = chunkList.get(i);
+			if (chunkAnalysis != null) {
+				if (chunkAnalysis.getFrequency() > MIN_FREQUECY) {
+					
+					tempo = true;
+					if (firstPeakIndex == -1) {
+						firstPeakIndex = i;
+						noteFrequencyRange = new NoteFrequencyRange(standardFrequecy, chunkAnalysis.getFrequency());
+					} else if (noteFrequencyRange.getMinFrequency() > chunkAnalysis.getFrequency()
+							|| chunkAnalysis.getFrequency() > noteFrequencyRange.getMaxFrequency()) {// 判断是否是同一个音
+						//是否是低八度或高八度
+						if(!((noteFrequencyRange.getMinFrequency() < chunkAnalysis.getFrequency() * 2 && chunkAnalysis.getFrequency() * 2 < noteFrequencyRange.getMaxFrequency())
+								|| (noteFrequencyRange.getMinFrequency() < chunkAnalysis.getFrequency() / 2 && chunkAnalysis.getFrequency() / 2 < noteFrequencyRange.getMaxFrequency()))){
+							tempo = false;
+							LOGGER.info("节奏错误原因:不是同一个音[{}]:{}-{}", chunkAnalysis.getFrequency(), noteFrequencyRange.getMinFrequency(), noteFrequencyRange.getMaxFrequency());
+							break;
+						}
+					}
+					if (isContinue == false) {
+						if ((i + 1) / chunkAnalysisList.size() < hardLevel.getIntegrityRange()) {
+							if (unplayedSize > 0) {
+								tempo = false;
+								LOGGER.info("节奏错误原因:信号不连续");
+								break;
+							}
+						}
+					}
+				} else {
+					if (tempo == true) {
+						isContinue = false;
+						unplayedSize++;
+					}
+				}
+			}
+		}
+		
+		if (tempo) {
+			// 判断进入时间点
+			if(firstPeakIndex * 100 /chunkAnalysisList.size() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator())){
+				tempo = false;
+				LOGGER.info("节奏错误原因:进入时间点太晚");
+			}else{
+				//判断是否与上一个音延续下来的
+				if(firstChunkAnalysis.getFrequency() > MIN_FREQUECY && lastChunkAnalysis.getFrequency() > MIN_FREQUECY){
+					tempo = new NoteFrequencyRange(standardFrequecy, firstChunkAnalysis.getFrequency()).equals(new NoteFrequencyRange(standardFrequecy, lastChunkAnalysis.getFrequency())) == false;
+					if(tempo == false){
+						LOGGER.info("节奏错误原因:上一个音[{}]延续下来导致的", lastChunkAnalysis.getFrequency());
+					}
+				}
+			}
+		}
+		
+		return tempo;
+	}
+	
+	private boolean computeTempoWithAmplitude2(MusicXmlNote musicXmlNote) {
+
+		double floatingRange = musicXmlNote.getDuration() * hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) / 100;
+		
+		double endTime = musicXmlNote.getTimeStamp() + dynamicOffset + floatingRange;
+		double startTime = musicXmlNote.getTimeStamp() + dynamicOffset - floatingRange;
+		
+		LOGGER.info("------------TimeStamp:{}  floatingRange:{}  StartTime:{}  EndTime:{}------------", musicXmlNote.getTimeStamp(), floatingRange, startTime, endTime);
+		
+		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
+		
+		double correctedStartTime = queryFirstNoteStartTime(chunkAnalysisList, musicXmlNote);
+		double correctedEndTime = correctedStartTime + musicXmlNote.getDuration();
+		
+		chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(correctedStartTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(correctedEndTime)).collect(Collectors.toList());
+		
+		//根据完整度取部分有效信号
+		int elementSize = chunkAnalysisList.size() * (100 - hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator())) / 100;
+		List<ChunkAnalysis> chunkList = chunkAnalysisList.subList(0, elementSize);
+		
+		if(chunkList == null || chunkList.size() == 0){
+			return false;
+		}
+		
+		ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
+		
+		LOGGER.info("-------startTime:{}  endTime:{}------", firstChunkAnalysis.getStartTime(), chunkList.get(chunkList.size() - 1)
+				.getEndTime());
+
+		if (musicXmlNote.getFrequency() == -1) {// 休止符
+			
+			LOGGER.info("--Amplitude:{}  Denominator:{}",chunkList.stream().map(t -> t).collect(Collectors.toList()), musicXmlNote.getDenominator());
+			return chunkList.stream().filter(t -> t.getAmplitude() > hardLevel.getAmplitudeThreshold()).count() <= 0;
+		}
+		
+		Optional<ChunkAnalysis> chunkAnalysisOptional = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getEndTime()) < Double.doubleToLongBits(firstChunkAnalysis.getStartTime())).findFirst();
+
+		ChunkAnalysis lastChunkAnalysis = null;
+		if (chunkAnalysisOptional.isPresent()) {
+			lastChunkAnalysis = chunkAnalysisOptional.get();
+		}
+		if(lastChunkAnalysis == null){
+			lastChunkAnalysis = new ChunkAnalysis(0, 0, -1, 0, 0, 0);
+		}
+		
+		List<Integer> chunkAmplitudeList = chunkList.stream().map(ChunkAnalysis::getAmplitude).collect(Collectors.toList());
+
+		chunkAmplitudeList.add(0, lastChunkAnalysis.getAmplitude());
+		
+		LOGGER.info("--Amplitude:{}  Denominator:{}",chunkAmplitudeList.stream().map(t -> t).collect(Collectors.toList()), musicXmlNote.getDenominator());
+		
+		// 检测是否有多个波峰
+		boolean tempo = false;
+		boolean isContinue = true;
+		int firstPeakIndex = -1;
+		int peakSize = 0;
+		for (int i = 1; i < chunkAmplitudeList.size(); i++) {
+			if (Math.abs(chunkAmplitudeList.get(i) - chunkAmplitudeList.get(i - 1)) < hardLevel.getAmplitudeThreshold()) {
+				continue;
+			}
+			if (chunkAmplitudeList.get(i) > hardLevel.getAmplitudeThreshold() && chunkAmplitudeList.get(i) > chunkAmplitudeList.get(i - 1)) {
+				tempo = true;
+				if(firstPeakIndex == -1){
+					firstPeakIndex = i;
+					peakSize++;
+				}
+				if (isContinue == false) {
+					tempo = false;
+					peakSize++;
+					break;
+				}
+			} else {
+				if (tempo == true) {
+					isContinue = false;
+				}
+			}
+		}
+		
+		if(peakSize == 0){
+			tempo = lastChunkAnalysis.isPeak();
+		}else if(peakSize == 1){
+			tempo = true;
+		}else{
+			tempo = false;
+		}
+		
+		if (tempo) {
+			// 判断进入时间点
+			if((firstPeakIndex - 1) * 100 /chunkAmplitudeList.size() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) * 2){
+				LOGGER.info("超过范围:{}", (firstPeakIndex - 1) * 100 /chunkAmplitudeList.size());
+				tempo = false;
+			}
+		}
+		
+		return tempo;
+	}
+	
+	private double queryFirstNoteStartTime(List<ChunkAnalysis> chunkAnalysisList, MusicXmlNote musicXmlNote) {
+		
+		if(chunkAnalysisList == null || chunkAnalysisList.size() == 0){
+			LOGGER.info("找不到数据,correctedStartTime:{}", musicXmlNote.getTimeStamp() + dynamicOffset);
+			return musicXmlNote.getTimeStamp() + dynamicOffset;
+		}
+		
+		if (percussionList.contains(subjectId)) {
+			Optional<ChunkAnalysis> optional = chunkAnalysisList.stream().filter(t -> t.getAmplitude() > hardLevel.getAmplitudeThreshold()).findFirst();
+			if(optional.isPresent()){
+				LOGGER.info("范围内查询到信号,correctedStartTime:{}", optional.get().getStartTime());
+				return optional.get().getStartTime();
+			}else{
+				LOGGER.info("范围内未查询到信号,correctedStartTime:{}", musicXmlNote.getTimeStamp() + dynamicOffset);
+				return musicXmlNote.getTimeStamp() + dynamicOffset;
+			}
+		}
+		
+		//判断是否与上一个音是同一个音符
+		if(musicXmlNote.getMusicalNotesIndex() > 0){
+			MusicXmlNote preMusicXmlNote = getCurrentMusicNote(null, musicXmlNote.getMusicalNotesIndex() - 1);
+			if((int)preMusicXmlNote.getFrequency() == (int)musicXmlNote.getFrequency()){
+				Optional<ChunkAnalysis> optional = chunkAnalysisList.stream().filter(t -> t.getFrequency() <= MIN_FREQUECY).findFirst();
+				if(optional.isPresent()){
+					LOGGER.info("与上一个音同音,有断开,correctedStartTime:{}", optional.get().getStartTime());
+					return optional.get().getEndTime();
+				}else{
+					LOGGER.info("与上一个音同音,未断开,correctedStartTime:{}", musicXmlNote.getTimeStamp() + dynamicOffset);
+					return musicXmlNote.getTimeStamp() + dynamicOffset;
+				}
+			}
+		}
+
+		NoteFrequencyRange standardNote = new NoteFrequencyRange(standardFrequecy, musicXmlNote.getFrequency());
+
+		NoteFrequencyRange noteFrequencyRange = null;
+
+		for (ChunkAnalysis ca : chunkAnalysisList) {
+			noteFrequencyRange = new NoteFrequencyRange(standardFrequecy, ca.getFrequency());
+			if (standardNote.equals(noteFrequencyRange)) {
+				LOGGER.info("范围内查询到信号,correctedStartTime:{}", ca.getStartTime());
+				return ca.getStartTime();
+			}
+		}
+		
+		LOGGER.info("范围内未查询到信号,correctedStartTime:{}", musicXmlNote.getTimeStamp() + dynamicOffset);
+
+		//return chunkAnalysisList.get(chunkAnalysisList.size() - 1).getEndTime();
+		return musicXmlNote.getTimeStamp() + dynamicOffset;
+	}
+	
+	public static void main(String[] args) {
+		double[] midi = new double[128];;
+		int standardPitch = 440; // a is 440 hz...
+		for (int x = 0; x < midi.length; ++x)
+		{
+		   midi[x] = new BigDecimal(standardPitch).multiply(new BigDecimal(Math.pow(2, new BigDecimal(x-69).divide(new BigDecimal(12),6,BigDecimal.ROUND_HALF_UP).doubleValue()))).doubleValue();
+		   System.out.println("x=" + x +"  "+ midi[x]);
+		}
+		
+	}
+	
+}

+ 842 - 0
audio-analysis/src/main/java/com/yonge/netty/dto/UserChannelContext2.java

@@ -0,0 +1,842 @@
+package com.yonge.netty.dto;
+
+import java.math.BigDecimal;
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.stream.Collectors;
+
+import javax.sound.sampled.AudioFormat;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.yonge.audio.analysis.Signals;
+import com.yonge.audio.analysis.detector.YINPitchDetector;
+import com.yonge.audio.utils.ArrayUtil;
+import com.yonge.netty.dto.NoteAnalysis.NoteErrorType;
+import com.yonge.netty.entity.MusicXmlBasicInfo;
+import com.yonge.netty.entity.MusicXmlNote;
+import com.yonge.netty.entity.MusicXmlSection;
+import com.yonge.netty.server.processor.WaveformWriter;
+
+/**
+ * 用户通道上下文
+ */
+public class UserChannelContext2 {
+	
+	private final static Logger LOGGER = LoggerFactory.getLogger(UserChannelContext.class);
+	
+	private int offsetMS = 350;
+	
+	private String platform;
+	
+	private Long recordId;
+	
+	private Integer subjectId;
+	
+	private int beatDuration;
+	
+	private int beatByteLength;
+	
+	// 曲目与musicxml对应关系
+	private ConcurrentHashMap<Integer, MusicXmlBasicInfo> songMusicXmlMap = new ConcurrentHashMap<Integer, MusicXmlBasicInfo>();
+
+	private WaveformWriter waveFileProcessor;
+
+	private NoteAnalysis processingNote = new NoteAnalysis(0, 0, -1);
+	
+	private AtomicInteger evaluatingSectionIndex = new AtomicInteger(0);
+	
+	private List<NoteAnalysis> doneNoteAnalysisList = new ArrayList<NoteAnalysis>();
+	
+	private List<SectionAnalysis> doneSectionAnalysisList = new ArrayList<SectionAnalysis>();
+	
+	private List<ChunkAnalysis> chunkAnalysisList = new ArrayList<ChunkAnalysis>();
+	
+	private byte[] channelBufferBytes = new byte[0];
+	
+	private double playTime;
+	
+	private double receivedTime;
+	
+	private List<ChunkAnalysis> lastChunkAnalysisList = new ArrayList<ChunkAnalysis>();
+	
+	private HardLevelEnum hardLevel = HardLevelEnum.ADVANCED;
+	
+	public void init(String platform, String heardLevel, int subjectId, int beatDuration) {
+		this.platform = platform;
+		this.subjectId = subjectId;
+		this.beatDuration = beatDuration;
+		this.beatByteLength = WaveformWriter.SAMPLE_RATE * WaveformWriter.BITS_PER_SAMPLE / 8 * beatDuration / 1000;
+		hardLevel = HardLevelEnum.valueOf(heardLevel);
+	}
+	
+	public byte[] skipMetronome(byte[] datas) {
+		if (beatByteLength > 0) {
+			if (datas.length <= beatByteLength) {
+				beatByteLength -= datas.length;
+				return new byte[0];
+			}
+			if(beatByteLength % 2 != 0){
+				beatByteLength++;
+			}
+			datas = ArrayUtil.extractByte(datas, beatByteLength, datas.length - 1);
+			beatByteLength = 0;
+		}
+		return datas;
+	}
+	
+	public Long getRecordId() {
+		return recordId;
+	}
+
+	public void setRecordId(Long recordId) {
+		this.recordId = recordId;
+	}
+
+	public int getOffsetMS() {
+		return offsetMS;
+	}
+
+	public void setOffsetMS(int offsetMS) {
+		this.offsetMS = offsetMS;
+	}
+
+	public HardLevelEnum getHardLevel() {
+		return hardLevel;
+	}
+
+	public ConcurrentHashMap<Integer, MusicXmlBasicInfo> getSongMusicXmlMap() {
+		return songMusicXmlMap;
+	}
+
+	public WaveformWriter getWaveFileProcessor() {
+		return waveFileProcessor;
+	}
+
+	public void setWaveFileProcessor(WaveformWriter waveFileProcessor) {
+		this.waveFileProcessor = waveFileProcessor;
+	}
+
+	public NoteAnalysis getProcessingNote() {
+		return processingNote;
+	}
+
+	public void setProcessingNote(NoteAnalysis processingNote) {
+		this.processingNote = processingNote;
+	}
+	
+	public List<SectionAnalysis> getDoneSectionAnalysisList() {
+		return doneSectionAnalysisList;
+	}
+
+	public List<NoteAnalysis> getDoneNoteAnalysisList() {
+		return doneNoteAnalysisList;
+	}
+
+	public void resetUserInfo() {
+		beatByteLength = WaveformWriter.SAMPLE_RATE * WaveformWriter.BITS_PER_SAMPLE / 8 * beatDuration / 1000;
+		waveFileProcessor = null;
+		processingNote = new NoteAnalysis(0,0,-1);
+		evaluatingSectionIndex = new AtomicInteger(0);
+		channelBufferBytes = new byte[0];
+		doneNoteAnalysisList = new ArrayList<NoteAnalysis>();
+		doneSectionAnalysisList = new ArrayList<SectionAnalysis>();
+		chunkAnalysisList = new ArrayList<ChunkAnalysis>();
+		recordId = null;
+		playTime = 0;
+		receivedTime = 0;
+		lastChunkAnalysisList = new ArrayList<ChunkAnalysis>();
+	}
+	
+	public MusicXmlBasicInfo getMusicXmlBasicInfo(Integer songId){
+		MusicXmlBasicInfo musicXmlBasicInfo = null;
+		if (songId == null) {
+			musicXmlBasicInfo = songMusicXmlMap.values().stream().findFirst().get();
+		} else {
+			musicXmlBasicInfo = songMusicXmlMap.get(songId);
+		}
+		return musicXmlBasicInfo;
+	}
+	
+	public MusicXmlSection getCurrentMusicSection(Integer songId, int sectionIndex){
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+		return musicXmlBasicInfo.getMusicXmlSectionMap().get(sectionIndex);
+	}
+
+	public MusicXmlNote getCurrentMusicNote(Integer songId, Integer noteIndex) {
+		if (songMusicXmlMap.size() == 0) {
+			return null;
+		}
+		if(noteIndex == null){
+			noteIndex = processingNote.getMusicalNotesIndex();
+		}
+		final int index = noteIndex;
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null && index <= getTotalMusicNoteIndex(null)) {
+			return musicXmlBasicInfo.getMusicXmlInfos().stream().filter(t -> t.getMusicalNotesIndex() == index).findFirst().get();
+		}
+
+		return null;
+	}
+
+	public int getTotalMusicNoteIndex(Integer songId) {
+		if (songMusicXmlMap.size() == 0) {
+			return -1;
+		}
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null) {
+			return musicXmlBasicInfo.getMusicXmlInfos().stream().map(t -> t.getMusicalNotesIndex()).distinct().max(Integer::compareTo).get();
+		}
+
+		return -1;
+	}
+
+	public List<MusicXmlNote> getCurrentMusicSection(Integer songId, Integer sectionIndex) {
+		if (songMusicXmlMap.size() == 0) {
+			return null;
+		}
+		if(sectionIndex == null){
+			sectionIndex = processingNote.getSectionIndex();
+		}
+		final int index = sectionIndex;
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null) {
+			return musicXmlBasicInfo.getMusicXmlInfos().stream().filter(t -> t.getMusicalNotesIndex() == index)
+					.sorted(Comparator.comparing(MusicXmlNote::getMusicalNotesIndex)).collect(Collectors.toList());
+		}
+
+		return null;
+	}
+
+	public int getTotalMusicSectionSize(Integer songId) {
+		if (songMusicXmlMap.size() == 0) {
+			return -1;
+		}
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null) {
+			return (int) musicXmlBasicInfo.getMusicXmlInfos().stream().map(t -> t.getMeasureIndex()).distinct().count();
+		}
+
+		return -1;
+	}
+	
+	public int getMusicSectionIndex(Integer songId, int musicXmlNoteIndex) {
+		if (songMusicXmlMap.size() == 0) {
+			return -1;
+		}
+		
+		if(getTotalMusicNoteIndex(null) < musicXmlNoteIndex){
+			return -1;
+		}
+		
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null) {
+			return musicXmlBasicInfo.getMusicXmlInfos().stream().filter(t -> t.getMusicalNotesIndex() == musicXmlNoteIndex).findFirst().get().getMeasureIndex();
+		}
+
+		return -1;
+	}
+	
+	public byte[] getChannelBufferBytes() {
+		return channelBufferBytes;
+	}
+
+	public void setChannelBufferBytes(byte[] channelBufferBytes) {
+		this.channelBufferBytes = channelBufferBytes;
+	}
+
+	public AtomicInteger getEvaluatingSectionIndex() {
+		return evaluatingSectionIndex;
+	}
+
+	public void handle(float[] samples, AudioFormat audioFormat){
+		
+		YINPitchDetector frequencyDetector = new YINPitchDetector(samples.length , audioFormat.getSampleRate());
+
+		int playFrequency = (int) frequencyDetector.getFrequency(samples);
+		int splDb = (int) Signals.soundPressureLevel(samples);
+		int power = (int) Signals.power(samples);
+		int amplitude = (int) Signals.norm(samples);
+		float rms = Signals.rms(samples);
+		
+		double durationTime = 1000 * (samples.length * 2) / audioFormat.getSampleRate() / (audioFormat.getSampleSizeInBits() / 8);
+		
+		receivedTime += durationTime;
+		
+		/*if(offsetMS == 0){
+			return;
+		}*/
+		
+		if(receivedTime < offsetMS){
+			return;
+		}
+		
+		playTime += durationTime;
+		
+		// 获取当前音符信息
+		MusicXmlNote musicXmlNote = getCurrentMusicNote(null,null);
+
+		if (musicXmlNote == null) {
+			return;
+		}
+		
+		//取出当前处理中的音符信息
+		NoteAnalysis noteAnalysis = getProcessingNote();
+		if(noteAnalysis == null || noteAnalysis.getDurationTime() == 0) {
+			noteAnalysis = new NoteAnalysis(musicXmlNote.getMusicalNotesIndex(), musicXmlNote.getMeasureIndex(), (int)musicXmlNote.getFrequency(), musicXmlNote.getDuration());
+		}
+		
+		evaluatingSectionIndex.set(noteAnalysis.getSectionIndex());
+		
+		if (noteAnalysis.getMusicalNotesIndex() >= 0 && noteAnalysis.getMusicalNotesIndex() <= getTotalMusicNoteIndex(null)) {
+
+			if (playTime >= (musicXmlNote.getDuration() + musicXmlNote.getTimeStamp())) {
+
+				LOGGER.info("------ Frequency:{}  splDb:{}  Power:{}  amplitude:{} time:{}------", playFrequency, splDb, power, amplitude, playTime);
+				
+				ChunkAnalysis lastChunkAnalysis = new ChunkAnalysis(playTime - durationTime, playTime, playFrequency, splDb, power, amplitude);
+				
+				if(Math.abs(chunkAnalysisList.get(chunkAnalysisList.size() - 1).getFrequency() - lastChunkAnalysis.getFrequency()) > hardLevel.getFrequencyThreshold()){
+					lastChunkAnalysis.setFrequency(-1);
+				}
+				if(chunkAnalysisList.get(chunkAnalysisList.size() - 1).getAmplitude() + 2 < lastChunkAnalysis.getAmplitude()){
+					lastChunkAnalysis.setPeak(true);
+				}
+				
+				//每个音符最后一个块
+				lastChunkAnalysisList.add(lastChunkAnalysis);
+				if(noteAnalysis.getMusicalNotesIndex() > 0){
+					lastChunkAnalysis = lastChunkAnalysisList.get(noteAnalysis.getMusicalNotesIndex() - 1);
+				}else{
+					lastChunkAnalysis = new ChunkAnalysis(0, 0, -1, 0, 0, 0);
+				}
+
+				if (musicXmlNote.getDontEvaluating()) {
+					noteAnalysis.setIgnore(true);
+				}
+				
+				if(chunkAnalysisList.size() == 0){// 延音线
+					
+				}
+				
+				noteAnalysis.setPlayFrequency(computeFrequency(chunkAnalysisList, lastChunkAnalysis, hardLevel.getFrequencyThreshold()));
+				
+				//判断节奏(音符持续时间内有不间断的音高,就节奏正确)
+				boolean tempo = true;
+				if (subjectId == 23 || subjectId == 113) {
+					if (musicXmlNote.getFrequency() == -1) {// 休止符
+						tempo = chunkAnalysisList.stream().filter(t -> t.getAmplitude() > hardLevel.getAmplitudeThreshold()).count() <= 0;
+					}else{
+						tempo = computeTempoWithAmplitude2(musicXmlNote, chunkAnalysisList, lastChunkAnalysis);
+					}
+				}else{
+					if (musicXmlNote.getFrequency() == -1) {// 休止符
+						tempo = chunkAnalysisList.stream().filter(t -> t.getFrequency() > 100).count() <= 1;
+					}else{
+						tempo = computeTempoWithFrequency(musicXmlNote, chunkAnalysisList, lastChunkAnalysis);
+					}
+				}
+				
+				noteAnalysis.setDurationTime(chunkAnalysisList.stream().mapToDouble(t -> t.getDurationTime()).sum());
+				
+				noteAnalysis.setTempo(tempo);
+				
+				evaluateForNote(noteAnalysis);
+
+				LOGGER.info("当前音符下标[{}] 预计频率:{} 实际频率:{} 节奏:{}", noteAnalysis.getMusicalNotesIndex(), musicXmlNote.getFrequency(), noteAnalysis.getPlayFrequency(),
+						noteAnalysis.isTempo());
+				
+				doneNoteAnalysisList.add(noteAnalysis);
+				
+				//lastChunkAnalysis = chunkAnalysisList.get(chunkAnalysisList.size() - 1);
+				
+				chunkAnalysisList.clear();
+
+				// 准备处理下一个音符
+				int nextNoteIndex = musicXmlNote.getMusicalNotesIndex() + 1;
+				float nextNoteFrequence = -1;
+				double standDuration = 0;
+				MusicXmlNote nextMusicXmlNote = getCurrentMusicNote(null, nextNoteIndex);
+				if(nextMusicXmlNote != null){
+					nextNoteFrequence = nextMusicXmlNote.getFrequency();
+					standDuration = nextMusicXmlNote.getDuration();
+				}
+				
+				NoteAnalysis nextNoteAnalysis = new NoteAnalysis(nextNoteIndex, getMusicSectionIndex(null, nextNoteIndex), (int)nextNoteFrequence, standDuration);
+
+				noteAnalysis = nextNoteAnalysis;
+
+			} else {
+				
+				LOGGER.info("Frequency:{}  splDb:{}  Power:{}  amplitude:{}  rms:{}", playFrequency, splDb, power, amplitude, rms);
+				
+				chunkAnalysisList.add(new ChunkAnalysis(playTime - durationTime, playTime, playFrequency, splDb, power, amplitude));
+				
+			}
+
+			setProcessingNote(noteAnalysis);
+		}
+		
+	}
+	
+
+	public int evaluateForSection(int sectionIndex, int subjectId){
+
+		int score = -1;
+		if(doneSectionAnalysisList.size() >= getTotalMusicSectionSize(null)){
+			return score;
+		}
+		
+		//取出当前小节的所有音符
+		List<NoteAnalysis> noteAnalysisList = doneNoteAnalysisList.stream().filter(t -> t.getSectionIndex() == sectionIndex).collect(Collectors.toList());
+		
+		long ignoreSize = noteAnalysisList.stream().filter(t -> t.isIgnore()).count();
+
+		SectionAnalysis sectionAnalysis = new SectionAnalysis();
+		sectionAnalysis.setIndex(sectionIndex);
+		sectionAnalysis.setNoteNum(noteAnalysisList.size());
+		sectionAnalysis.setIsIngore(ignoreSize == noteAnalysisList.size());
+		
+		//判断是否需要评分
+		MusicXmlSection musicXmlSection = getCurrentMusicSection(null, sectionIndex);
+		if(noteAnalysisList.size() == musicXmlSection.getNoteNum()){
+			//取出需要评测的音符
+			List<NoteAnalysis>  noteList = noteAnalysisList.stream().filter(t -> t.isIgnore() == false).collect(Collectors.toList());
+			
+			if(noteList != null && noteList.size() > 0){
+				score = noteList.stream().mapToInt(t -> t.getScore()).sum() / noteList.size();
+			}
+			sectionAnalysis.setDurationTime(noteAnalysisList.stream().mapToDouble(t -> t.getDurationTime()).sum());
+			sectionAnalysis.setScore(score);
+
+			LOGGER.info("小节评分:{}",sectionAnalysis);
+			doneSectionAnalysisList.add(sectionAnalysis);
+		}
+		
+		return score;
+	}
+	
+	public Map<String, Integer> evaluateForMusic() {
+
+		Map<String, Integer> result = new HashMap<String, Integer>();
+		
+		result.put("playTime", (int) doneNoteAnalysisList.stream().mapToDouble(t -> t.getDurationTime()).sum());
+		
+		// 取出需要评测的音符
+		List<NoteAnalysis> noteAnalysisList = doneNoteAnalysisList.stream().filter(t -> t.isIgnore() == false).collect(Collectors.toList());
+
+		if (noteAnalysisList != null && noteAnalysisList.size() > 0) {
+			int intonationScore = 0;
+			int tempoScore = 0;
+			int integrityScore = 0;
+			int socre = 0;
+
+			for (NoteAnalysis note : noteAnalysisList) {
+				intonationScore += note.getIntonationScore();
+				tempoScore += note.getTempoScore();
+				integrityScore += note.getIntegrityScore();
+				socre += note.getScore();
+			}
+
+			tempoScore = tempoScore / noteAnalysisList.size();
+			intonationScore = intonationScore / noteAnalysisList.size();
+			integrityScore = integrityScore / noteAnalysisList.size();
+
+			result.put("cadence", tempoScore);
+			result.put("intonation", intonationScore);
+			result.put("integrity", integrityScore);
+	        result.put("recordId", recordId.intValue());
+
+			int score = socre / noteAnalysisList.size();
+
+			// 平均得分
+			if (getMusicXmlBasicInfo(null).getSubjectId() == 23 || getMusicXmlBasicInfo(null).getSubjectId() == 113) {
+				score = tempoScore;
+			}
+			result.put("score", score);
+		}
+		return result;
+	}
+	
+
+	public void evaluateForNote(NoteAnalysis noteAnalysis) {
+
+		double playDurationTime = 0;
+		
+		if (subjectId == 23 || subjectId == 113) {
+			if (noteAnalysis.getFrequency() == -1) {// 休止符
+				if (!noteAnalysis.isTempo()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				} else {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				}
+			}else{
+				int beatTimes = (int) chunkAnalysisList.stream().filter(t -> t.getAmplitude() > hardLevel.getAmplitudeThreshold()).count();
+				LOGGER.info("Amplitude:{}  beatTimes:{}",chunkAnalysisList.stream().map(t -> t.getAmplitude()).collect(Collectors.toList()),beatTimes);
+				if(beatTimes == 0){
+					noteAnalysis.setMusicalErrorType(NoteErrorType.NOT_PLAY);
+				}else if (!noteAnalysis.isTempo()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				} else {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				}
+			}
+		} else {
+			if (noteAnalysis.getFrequency() == -1) {// 休止符
+
+				playDurationTime = chunkAnalysisList.stream().filter(t -> t.getFrequency() <= 100).mapToDouble(t -> t.getDurationTime()).sum();
+
+				if (!noteAnalysis.isTempo()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				} else if (playDurationTime * 100 / noteAnalysis.getDurationTime() < hardLevel.getIntegrityRange()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.INTEGRITY_WRONG);
+				} else if (Math.abs(noteAnalysis.getFrequency() - noteAnalysis.getPlayFrequency()) > hardLevel.getFrequencyThreshold()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.INTONATION_WRONG);
+				} else {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				}
+			} else {
+				playDurationTime = chunkAnalysisList.stream().filter(t -> t.getFrequency() > 100 && t.getFrequency() < 2000)
+						.mapToDouble(t -> t.getDurationTime()).sum();
+
+				if (playDurationTime * 100 / noteAnalysis.getDurationTime() < hardLevel.getNotPlayRange()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.NOT_PLAY);
+				} else if (playDurationTime * 100 / noteAnalysis.getDurationTime() < hardLevel.getIntegrityRange()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.INTEGRITY_WRONG);
+				} else if (!noteAnalysis.isTempo()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				} else if (Math.abs(noteAnalysis.getFrequency() - noteAnalysis.getPlayFrequency()) > hardLevel.getFrequencyThreshold()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.INTONATION_WRONG);
+				} else {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				}
+			}
+		}
+
+		// 计算音分
+		int tempoScore = 0;
+		int integrityScore = 0;
+		int intonationScore = 100 - new BigDecimal(Math.abs(YINPitchDetector.hertzToAbsoluteCent(noteAnalysis.getPlayFrequency())
+				- YINPitchDetector.hertzToAbsoluteCent(noteAnalysis.getFrequency()))).multiply(new BigDecimal(20)).divide(new BigDecimal(17), BigDecimal.ROUND_UP)
+				.setScale(0, BigDecimal.ROUND_UP).intValue();
+		if (intonationScore < 0) {
+			intonationScore = 0;
+		} else if (intonationScore > 100) {
+			intonationScore = 100;
+		}
+
+		if (noteAnalysis.getMusicalErrorType() == NoteErrorType.NOT_PLAY) {
+			intonationScore = 0;
+		} else {
+
+			if (noteAnalysis.isTempo()) {
+				tempoScore = 100;
+				noteAnalysis.setTempoScore(tempoScore);
+			}
+
+			double durationPercent = playDurationTime / noteAnalysis.getDurationTime();
+			if (durationPercent >= 0.7) {
+				integrityScore = 100;
+			} else if (durationPercent < 0.7 && durationPercent >= 0.5) {
+				integrityScore = 50;
+			}
+			noteAnalysis.setIntegrityScore(integrityScore);
+		}
+		noteAnalysis.setIntonationScore(intonationScore);
+		if (subjectId == 23 || subjectId == 113) {
+			noteAnalysis.setScore(tempoScore);
+		} else {
+			noteAnalysis.setScore(new BigDecimal(intonationScore + tempoScore + integrityScore).divide(new BigDecimal(3), 2).setScale(0, BigDecimal.ROUND_UP)
+					.intValue());
+		}
+	}
+	
+	private int computeFrequency(List<ChunkAnalysis> chunkAnalysisList, ChunkAnalysis lastChunkAnalysis, int offsetRange) {
+		
+		List<ChunkAnalysis> chunkList = new ArrayList<ChunkAnalysis>(chunkAnalysisList);
+		
+		int tenutoSize = 0;
+		// 剔除上一个音延续下来的信号
+		if (lastChunkAnalysis != null) {
+			int lastFrequency = lastChunkAnalysis.getFrequency();
+			Iterator<ChunkAnalysis> iterable = chunkList.iterator();
+			while (iterable.hasNext()) {
+				if (Math.abs(lastFrequency - iterable.next().getFrequency()) > offsetRange) {
+					break;
+				}
+				iterable.remove();
+				tenutoSize++;
+			}
+
+			if (chunkList.size() == 0) {
+				return lastFrequency < 100 ? -1 : lastFrequency;
+			}
+		}
+
+		List<Integer> chunkFrequencyList = chunkList.stream().map(t -> t.getFrequency()).filter(t -> t.doubleValue() > 100 && t.doubleValue() < 2000)
+				.collect(Collectors.toList());
+		
+		if (chunkFrequencyList.size() == 0) {
+			return -1;
+		}
+		
+		if(tenutoSize * 100 / chunkAnalysisList.size() > 50){
+			return lastChunkAnalysis.getFrequency();
+		}
+		
+		// 排序
+		chunkFrequencyList = chunkFrequencyList.stream().sorted().collect(Collectors.toList());
+		
+		int tempFrequency = chunkFrequencyList.get(0), totalFrequency = chunkFrequencyList.get(0);
+
+		int maxChunkSize = 0;
+		int frequency = chunkFrequencyList.get(0);
+		int chunkSize = 1;
+		int avgFrequency = chunkFrequencyList.get(0);
+		for (int i = 1; i < chunkFrequencyList.size(); i++) {
+			tempFrequency = chunkFrequencyList.get(i);
+
+			if (Math.abs(avgFrequency - tempFrequency) > offsetRange) {
+
+				avgFrequency = totalFrequency / chunkSize;
+
+				if (maxChunkSize < chunkSize) {
+					maxChunkSize = chunkSize;
+					frequency = avgFrequency;
+				}
+
+				chunkSize = 1;
+				avgFrequency = tempFrequency;
+				totalFrequency = tempFrequency;
+			} else {
+				chunkSize++;
+				totalFrequency += tempFrequency;
+			}
+
+			if (i == chunkFrequencyList.size() - 1) {
+				if (maxChunkSize <= chunkSize) {
+					maxChunkSize = chunkSize;
+					frequency = totalFrequency / chunkSize;
+				}
+			}
+		}
+
+		if (chunkFrequencyList.size() < 3) {
+			frequency = (int)chunkFrequencyList.get(chunkFrequencyList.size() - 1);
+		}
+		
+		if(frequency < 100){
+			frequency = -1;
+		}
+
+		return frequency;
+	}
+	
+	private boolean computeTempoWithFrequency(MusicXmlNote musicXmlNote, List<ChunkAnalysis> chunkAnalysisList, ChunkAnalysis lastChunkAnalysis){
+		
+		List<ChunkAnalysis> chunkList = new ArrayList<ChunkAnalysis>(chunkAnalysisList);
+		
+		// 剔除上一个音延续下来的信号
+		if (lastChunkAnalysis != null) {
+			double lastFrequency = lastChunkAnalysis.getFrequency();
+			Iterator<ChunkAnalysis> iterable = chunkList.iterator();
+			while (iterable.hasNext()) {
+				if (Math.abs(lastFrequency - iterable.next().getFrequency()) > hardLevel.getFrequencyThreshold()) {
+					break;
+				}
+				iterable.remove();
+			}
+		}
+		
+		if(chunkList.size() == 0){
+			return false;
+		}
+		
+		ChunkAnalysis chunkAnalysis = null;
+		boolean tempo = false;
+		boolean isContinue = true;
+		int unplayedSize = 0;
+		int firstPeakIndex = -1;
+		for (int i = 0; i < chunkList.size(); i++) {
+			chunkAnalysis = chunkList.get(i);
+			if (chunkAnalysis != null) {
+				if (chunkAnalysis.getFrequency() > 100) {
+					tempo = true;
+					if(firstPeakIndex == -1){
+						firstPeakIndex = i;
+					}
+					if (isContinue == false) {
+						if (chunkAnalysisList.size() < 5) {
+							if (unplayedSize > 0) {
+								tempo = false;
+								break;
+							}
+						} else {
+							if ((unplayedSize * 100 / chunkAnalysisList.size()) > hardLevel.getNotPlayRange() || unplayedSize > 1) {
+								tempo = false;
+								break;
+							}
+						}
+					}
+				} else {
+					if (tempo == true) {
+						isContinue = false;
+						unplayedSize++;
+					}
+				}
+			}
+		}
+		
+		if (tempo) {
+			// 判断进入时间点
+			if((chunkAnalysisList.size() - chunkList.size() + firstPeakIndex) * 100 /chunkAnalysisList.size() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator())){
+				tempo = false;
+			}
+		}
+		
+		return tempo;
+	}
+	
+	private boolean computeTempoWithAmplitude2(MusicXmlNote musicXmlNote, List<ChunkAnalysis> chunkAnalysisList, ChunkAnalysis lastChunkAnalysis) {
+
+		List<Integer> chunkAmplitudeList = chunkAnalysisList.stream().map(ChunkAnalysis::getAmplitude).collect(Collectors.toList());
+
+		/*if (chunkAmplitudeList.size() <= 3) {
+			return chunkAmplitudeList.stream().filter(t -> t.floatValue() > hardLevel.getAmplitudeThreshold()).count() > 0;
+		}*/
+		
+		chunkAmplitudeList.add(0, lastChunkAnalysis.getAmplitude());
+		
+		// 检测是否有多个波峰
+		boolean tempo = false;
+		boolean isContinue = true;
+		int firstPeakIndex = -1;
+		int peakSize = 0;
+		for (int i = 1; i < chunkAmplitudeList.size(); i++) {
+			if (chunkAmplitudeList.get(i) > hardLevel.getAmplitudeThreshold() && chunkAmplitudeList.get(i) > chunkAmplitudeList.get(i - 1) + 2) {
+				tempo = true;
+				if(firstPeakIndex == -1){
+					firstPeakIndex = i;
+					peakSize++;
+				}
+				if (isContinue == false) {
+					tempo = false;
+					peakSize++;
+					break;
+				}
+			} else {
+				if (tempo == true) {
+					isContinue = false;
+				}
+			}
+		}
+		
+		if(peakSize == 0){
+			tempo = lastChunkAnalysis.isPeak();
+		}else if(peakSize == 1){
+			tempo = true;
+		}else{
+			tempo = false;
+		}
+		
+		if (tempo) {
+			// 判断进入时间点
+			if((firstPeakIndex - 1) * 100 /chunkAmplitudeList.size() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator())){
+				tempo = false;
+			}
+		}
+		
+		return tempo;
+	}
+	
+	private boolean computeTempoWithAmplitude(MusicXmlNote musicXmlNote, List<ChunkAnalysis> chunkAnalysisList, ChunkAnalysis lastChunkAnalysis) {
+
+		boolean tempo = false;
+
+		List<Integer> chunkAmplitudeList = chunkAnalysisList.stream().map(ChunkAnalysis::getAmplitude).collect(Collectors.toList());
+
+		if (chunkAmplitudeList.size() < 3) {
+			return chunkAmplitudeList.stream().filter(t -> t.floatValue() > hardLevel.getAmplitudeThreshold()).count() > 0;
+		}
+		
+		chunkAmplitudeList.add(0, lastChunkAnalysis.getAmplitude());
+
+		// 检测是否有多个波峰
+		int peakSize = 0;
+		int minPeakIndex = -1;
+		for (int i = 1; i < chunkAmplitudeList.size(); i++) {
+			if (chunkAmplitudeList.get(i) < hardLevel.getAmplitudeThreshold()) {
+				continue;
+			}
+			if (i == chunkAmplitudeList.size() - 1) {
+				if (chunkAmplitudeList.get(i) > chunkAmplitudeList.get(i - 1)) {
+					peakSize++;
+					if (minPeakIndex == -1 || minPeakIndex > i) {
+						minPeakIndex = i;
+					}
+				}
+			} else {
+				if (chunkAmplitudeList.get(i - 1) < chunkAmplitudeList.get(i) && chunkAmplitudeList.get(i) >= chunkAmplitudeList.get(i + 1)) {
+					//if(Math.abs(chunkAmplitudeList.get(i - 1) - chunkAmplitudeList.get(i)) > 2 || Math.abs(chunkAmplitudeList.get(i) - chunkAmplitudeList.get(i + 1)) > 2){
+						peakSize++;
+						if (minPeakIndex == -1 || minPeakIndex > i) {
+							minPeakIndex = i;
+						}
+					//}
+				}
+			}
+		}
+
+		if (peakSize == 1) {
+			if (lastChunkAnalysis.isPeak() == false) {
+				tempo = true;
+			}
+		} else if (peakSize == 0) {
+			if (lastChunkAnalysis.isPeak()) {
+				tempo = true;
+			}
+		}
+
+		// 检测是否延迟进入
+		if (tempo == true) {
+			if (minPeakIndex * 100 / chunkAmplitudeList.size() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) && chunkAmplitudeList.size() > 3) {
+				tempo = false;
+			}
+		}
+
+		return tempo;
+	}
+	
+	public static void main(String[] args) {
+		UserChannelContext2 context = new UserChannelContext2();
+		
+		//int[] frequencys = {286,291,291,291,291,291,291};
+		int[] frequencys = {312,43,295,294,294,295};
+		
+		ChunkAnalysis lastChunkAnalysis = new ChunkAnalysis(624, 0, 0);
+		
+		List<ChunkAnalysis> chunkAnalysisList = new ArrayList<ChunkAnalysis>();
+		for(int f : frequencys) {
+			chunkAnalysisList.add(new ChunkAnalysis(f, 0, 0));
+		}
+		
+		MusicXmlNote musicXmlNote = new MusicXmlNote();
+		musicXmlNote.setDenominator(1);
+		
+		//System.out.println(context.computeFrequency(chunkAnalysisList, lastChunkAnalysis, 5));
+		System.out.println(context.computeTempoWithFrequency(musicXmlNote, chunkAnalysisList, lastChunkAnalysis));
+	}
+	
+}

+ 841 - 0
audio-analysis/src/main/java/com/yonge/netty/dto/UserChannelContext3.java

@@ -0,0 +1,841 @@
+package com.yonge.netty.dto;
+
+import java.math.BigDecimal;
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.stream.Collectors;
+
+import javax.sound.sampled.AudioFormat;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.yonge.audio.analysis.Signals;
+import com.yonge.audio.analysis.detector.YINPitchDetector;
+import com.yonge.audio.utils.ArrayUtil;
+import com.yonge.netty.dto.NoteAnalysis.NoteErrorType;
+import com.yonge.netty.entity.MusicXmlBasicInfo;
+import com.yonge.netty.entity.MusicXmlNote;
+import com.yonge.netty.entity.MusicXmlSection;
+import com.yonge.netty.server.processor.WaveformWriter;
+
+/**
+ * 用户通道上下文
+ */
+public class UserChannelContext3 {
+	
+	private final static Logger LOGGER = LoggerFactory.getLogger(UserChannelContext.class);
+	
+	private double standardFrequecy = 442;
+	
+	private int offsetMS;
+	
+	private double dynamicOffset;
+	
+	private String platform;
+	
+	private Long recordId;
+	
+	private Integer subjectId;
+	
+	private int beatDuration;
+	
+	private int beatByteLength;
+	
+	private boolean delayProcessed;
+	
+	// 曲目与musicxml对应关系
+	private ConcurrentHashMap<Integer, MusicXmlBasicInfo> songMusicXmlMap = new ConcurrentHashMap<Integer, MusicXmlBasicInfo>();
+
+	private WaveformWriter waveFileProcessor;
+
+	private NoteAnalysis processingNote = new NoteAnalysis(0, 0, -1);
+	
+	private AtomicInteger evaluatingSectionIndex = new AtomicInteger(0);
+	
+	private List<NoteAnalysis> doneNoteAnalysisList = new ArrayList<NoteAnalysis>();
+	
+	private List<SectionAnalysis> doneSectionAnalysisList = new ArrayList<SectionAnalysis>();
+	
+	private List<ChunkAnalysis> totalChunkAnalysisList = new ArrayList<ChunkAnalysis>();
+	
+	private byte[] channelBufferBytes = new byte[0];
+	
+	private double playTime;
+	
+	private double receivedTime;
+	
+	private HardLevelEnum hardLevel = HardLevelEnum.ADVANCED;
+	
+	private NotePlayResult queryNoteFrequency(MusicXmlNote xmlNote, double playFrequency) {
+
+		NotePlayResult result = new NotePlayResult();
+
+		boolean status = false;
+		double migrationRate = 0;
+
+		if (Math.round(xmlNote.getFrequency()) == Math.round(playFrequency)) {
+			status = true;
+			migrationRate = 0;
+		} else {
+			NoteFrequencyRange noteFrequencyRange = new NoteFrequencyRange(standardFrequecy, xmlNote.getFrequency());
+
+			if (noteFrequencyRange.getMinFrequency() > playFrequency || playFrequency > noteFrequencyRange.getMaxFrequency()) {
+				status = false;
+			} else {
+
+				status = true;
+
+				if (Math.round(playFrequency) < Math.round(xmlNote.getFrequency())) {
+					double min = Math.abs(xmlNote.getFrequency() - noteFrequencyRange.getMinFrequency()) / 2;
+					migrationRate = Math.abs(playFrequency - xmlNote.getFrequency()) / min;
+				} else {
+					double max = Math.abs(xmlNote.getFrequency() - noteFrequencyRange.getMaxFrequency()) / 2;
+					migrationRate = Math.abs(playFrequency - xmlNote.getFrequency()) / max;
+				}
+			}
+		}
+
+		result.setStatus(status);
+		result.setMigrationRate(migrationRate);
+
+		return result;
+	}
+	
+	public void init(String platform, String heardLevel, int subjectId, int beatDuration) {
+		this.platform = platform;
+		this.subjectId = subjectId;
+		this.beatDuration = beatDuration;
+		this.beatByteLength = WaveformWriter.SAMPLE_RATE * WaveformWriter.BITS_PER_SAMPLE / 8 * beatDuration / 1000;
+		hardLevel = HardLevelEnum.valueOf(heardLevel);
+	}
+	
+	public byte[] skipMetronome(byte[] datas) {
+		if (beatByteLength > 0) {
+			if (datas.length <= beatByteLength) {
+				beatByteLength -= datas.length;
+				return new byte[0];
+			}
+			if(beatByteLength % 2 != 0){
+				beatByteLength++;
+			}
+			datas = ArrayUtil.extractByte(datas, beatByteLength, datas.length - 1);
+			beatByteLength = 0;
+		}
+		return datas;
+	}
+	
+	public Long getRecordId() {
+		return recordId;
+	}
+
+	public void setRecordId(Long recordId) {
+		this.recordId = recordId;
+	}
+
+	public int getOffsetMS() {
+		return offsetMS;
+	}
+
+	public void setOffsetMS(int offsetMS) {
+		this.offsetMS = offsetMS;
+	}
+
+	public HardLevelEnum getHardLevel() {
+		return hardLevel;
+	}
+
+	public ConcurrentHashMap<Integer, MusicXmlBasicInfo> getSongMusicXmlMap() {
+		return songMusicXmlMap;
+	}
+
+	public WaveformWriter getWaveFileProcessor() {
+		return waveFileProcessor;
+	}
+
+	public void setWaveFileProcessor(WaveformWriter waveFileProcessor) {
+		this.waveFileProcessor = waveFileProcessor;
+	}
+
+	public NoteAnalysis getProcessingNote() {
+		return processingNote;
+	}
+
+	public void setProcessingNote(NoteAnalysis processingNote) {
+		this.processingNote = processingNote;
+	}
+	
+	public List<SectionAnalysis> getDoneSectionAnalysisList() {
+		return doneSectionAnalysisList;
+	}
+
+	public List<NoteAnalysis> getDoneNoteAnalysisList() {
+		return doneNoteAnalysisList;
+	}
+
+	public void resetUserInfo() {
+		beatByteLength = WaveformWriter.SAMPLE_RATE * WaveformWriter.BITS_PER_SAMPLE / 8 * beatDuration / 1000;
+		waveFileProcessor = null;
+		processingNote = new NoteAnalysis(0,0,-1);
+		evaluatingSectionIndex = new AtomicInteger(0);
+		channelBufferBytes = new byte[0];
+		doneNoteAnalysisList = new ArrayList<NoteAnalysis>();
+		doneSectionAnalysisList = new ArrayList<SectionAnalysis>();
+		totalChunkAnalysisList = new ArrayList<ChunkAnalysis>();
+		recordId = null;
+		playTime = 0;
+		receivedTime = 0;
+		delayProcessed = false;
+		dynamicOffset = 0;
+	}
+	
+	public MusicXmlBasicInfo getMusicXmlBasicInfo(Integer songId){
+		MusicXmlBasicInfo musicXmlBasicInfo = null;
+		if (songId == null) {
+			musicXmlBasicInfo = songMusicXmlMap.values().stream().findFirst().get();
+		} else {
+			musicXmlBasicInfo = songMusicXmlMap.get(songId);
+		}
+		return musicXmlBasicInfo;
+	}
+	
+	public MusicXmlSection getCurrentMusicSection(Integer songId, int sectionIndex){
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+		return musicXmlBasicInfo.getMusicXmlSectionMap().get(sectionIndex);
+	}
+
+	public MusicXmlNote getCurrentMusicNote(Integer songId, Integer noteIndex) {
+		if (songMusicXmlMap.size() == 0) {
+			return null;
+		}
+		if(noteIndex == null){
+			noteIndex = processingNote.getMusicalNotesIndex();
+		}
+		final int index = noteIndex;
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null && index <= getTotalMusicNoteIndex(null)) {
+			return musicXmlBasicInfo.getMusicXmlInfos().stream().filter(t -> t.getMusicalNotesIndex() == index).findFirst().get();
+		}
+
+		return null;
+	}
+
+	public int getTotalMusicNoteIndex(Integer songId) {
+		if (songMusicXmlMap.size() == 0) {
+			return -1;
+		}
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null) {
+			return musicXmlBasicInfo.getMusicXmlInfos().stream().map(t -> t.getMusicalNotesIndex()).distinct().max(Integer::compareTo).get();
+		}
+
+		return -1;
+	}
+
+	public List<MusicXmlNote> getCurrentMusicSection(Integer songId, Integer sectionIndex) {
+		if (songMusicXmlMap.size() == 0) {
+			return null;
+		}
+		if(sectionIndex == null){
+			sectionIndex = processingNote.getSectionIndex();
+		}
+		final int index = sectionIndex;
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null) {
+			return musicXmlBasicInfo.getMusicXmlInfos().stream().filter(t -> t.getMusicalNotesIndex() == index)
+					.sorted(Comparator.comparing(MusicXmlNote::getMusicalNotesIndex)).collect(Collectors.toList());
+		}
+
+		return null;
+	}
+
+	public int getTotalMusicSectionSize(Integer songId) {
+		if (songMusicXmlMap.size() == 0) {
+			return -1;
+		}
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null) {
+			return (int) musicXmlBasicInfo.getMusicXmlInfos().stream().map(t -> t.getMeasureIndex()).distinct().count();
+		}
+
+		return -1;
+	}
+	
+	public int getMusicSectionIndex(Integer songId, int musicXmlNoteIndex) {
+		if (songMusicXmlMap.size() == 0) {
+			return -1;
+		}
+		
+		if(getTotalMusicNoteIndex(null) < musicXmlNoteIndex){
+			return -1;
+		}
+		
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null) {
+			return musicXmlBasicInfo.getMusicXmlInfos().stream().filter(t -> t.getMusicalNotesIndex() == musicXmlNoteIndex).findFirst().get().getMeasureIndex();
+		}
+
+		return -1;
+	}
+	
+	public byte[] getChannelBufferBytes() {
+		return channelBufferBytes;
+	}
+
+	public void setChannelBufferBytes(byte[] channelBufferBytes) {
+		this.channelBufferBytes = channelBufferBytes;
+	}
+
+	public AtomicInteger getEvaluatingSectionIndex() {
+		return evaluatingSectionIndex;
+	}
+
+	public void handle(float[] samples, AudioFormat audioFormat){
+		
+		YINPitchDetector frequencyDetector = new YINPitchDetector(samples.length , audioFormat.getSampleRate());
+
+		int playFrequency = (int) frequencyDetector.getFrequency(samples);
+		int splDb = (int) Signals.soundPressureLevel(samples);
+		int power = (int) Signals.power(samples);
+		int amplitude = (int) Signals.norm(samples);
+		float rms = Signals.rms(samples);
+		
+		double durationTime = 1000 * (samples.length * 2) / audioFormat.getSampleRate() / (audioFormat.getSampleSizeInBits() / 8);
+		
+		receivedTime += durationTime;
+		
+		if(receivedTime < offsetMS){
+			return;
+		}
+		
+		playTime += durationTime;
+		
+		// 获取当前音符信息
+		MusicXmlNote musicXmlNote = getCurrentMusicNote(null,null);
+
+		if (musicXmlNote == null) {
+			return;
+		}
+		
+		//取出当前处理中的音符信息
+		NoteAnalysis noteAnalysis = getProcessingNote();
+		if(noteAnalysis == null || noteAnalysis.getDurationTime() == 0) {
+			noteAnalysis = new NoteAnalysis(musicXmlNote.getMusicalNotesIndex(), musicXmlNote.getMeasureIndex(), (int)musicXmlNote.getFrequency(), musicXmlNote.getDuration());
+		}
+		
+		evaluatingSectionIndex.set(noteAnalysis.getSectionIndex());
+		
+		if (noteAnalysis.getMusicalNotesIndex() >= 0 && noteAnalysis.getMusicalNotesIndex() <= getTotalMusicNoteIndex(null)) {
+			
+			LOGGER.info("delayPrcessed:{} dynamicOffset:{}  Frequency:{}  splDb:{}  Power:{}  amplitude:{}  rms:{}  time:{}", delayProcessed, dynamicOffset, playFrequency, splDb, power, amplitude, rms, playTime);
+			
+			ChunkAnalysis chunkAnalysis = new ChunkAnalysis(playTime - durationTime, playTime, playFrequency, splDb, power, amplitude);
+			
+			if(totalChunkAnalysisList.size() > 0){
+				if(totalChunkAnalysisList.get(totalChunkAnalysisList.size() - 1).getAmplitude() + 2 < chunkAnalysis.getAmplitude()){
+					chunkAnalysis.setPeak(true);//只针对打击乐
+				}
+			}
+			totalChunkAnalysisList.add(chunkAnalysis);
+			
+			if(delayProcessed == false && chunkAnalysis.getFrequency() > 100){
+				
+				delayProcessed = true;
+				//计算延迟偏移值
+				//playTime = musicXmlNote.getTimeStamp() + durationTime;
+				dynamicOffset = chunkAnalysis.getStartTime() - musicXmlNote.getTimeStamp();
+				/*if(100 * dynamicOffset / musicXmlNote.getDuration() > (100 - hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()))){
+					dynamicOffset = 0;
+				}*/
+			}
+			
+			if (playTime >= (musicXmlNote.getDuration() + musicXmlNote.getTimeStamp() + dynamicOffset)) {
+
+				if (musicXmlNote.getDontEvaluating()) {
+					noteAnalysis.setIgnore(true);
+				}
+				
+				noteAnalysis.setPlayFrequency(computeFrequency(musicXmlNote));
+				
+				//判断节奏(音符持续时间内有不间断的音高,就节奏正确)
+				boolean tempo = true;
+				if (subjectId == 23 || subjectId == 113) {
+					tempo = computeTempoWithAmplitude2(musicXmlNote);
+				}else{
+					tempo = computeTempoWithFrequency(musicXmlNote);
+				}
+				
+				noteAnalysis.setTempo(tempo);
+				
+				evaluateForNote(musicXmlNote, noteAnalysis);
+
+				LOGGER.info("当前音符下标[{}] 预计频率:{} 实际频率:{} 节奏:{}", noteAnalysis.getMusicalNotesIndex(), musicXmlNote.getFrequency(), noteAnalysis.getPlayFrequency(),
+						noteAnalysis.isTempo());
+				
+				doneNoteAnalysisList.add(noteAnalysis);
+				
+				// 准备处理下一个音符
+				int nextNoteIndex = musicXmlNote.getMusicalNotesIndex() + 1;
+				float nextNoteFrequence = -1;
+				double standDuration = 0;
+				MusicXmlNote nextMusicXmlNote = getCurrentMusicNote(null, nextNoteIndex);
+				if(nextMusicXmlNote != null){
+					nextNoteFrequence = nextMusicXmlNote.getFrequency();
+					standDuration = nextMusicXmlNote.getDuration();
+				}
+				
+				NoteAnalysis nextNoteAnalysis = new NoteAnalysis(nextNoteIndex, getMusicSectionIndex(null, nextNoteIndex), (int)nextNoteFrequence, standDuration);
+
+				noteAnalysis = nextNoteAnalysis;
+
+			}
+
+			setProcessingNote(noteAnalysis);
+		}
+		
+	}
+	
+
+	public int evaluateForSection(int sectionIndex, int subjectId){
+
+		int score = -1;
+		if(doneSectionAnalysisList.size() >= getTotalMusicSectionSize(null)){
+			return score;
+		}
+		
+		//取出当前小节的所有音符
+		List<NoteAnalysis> noteAnalysisList = doneNoteAnalysisList.stream().filter(t -> t.getSectionIndex() == sectionIndex).collect(Collectors.toList());
+		
+		long ignoreSize = noteAnalysisList.stream().filter(t -> t.isIgnore()).count();
+
+		SectionAnalysis sectionAnalysis = new SectionAnalysis();
+		sectionAnalysis.setIndex(sectionIndex);
+		sectionAnalysis.setNoteNum(noteAnalysisList.size());
+		sectionAnalysis.setIsIngore(ignoreSize == noteAnalysisList.size());
+		
+		//判断是否需要评分
+		MusicXmlSection musicXmlSection = getCurrentMusicSection(null, sectionIndex);
+		if(noteAnalysisList.size() == musicXmlSection.getNoteNum()){
+			//取出需要评测的音符
+			List<NoteAnalysis>  noteList = noteAnalysisList.stream().filter(t -> t.isIgnore() == false).collect(Collectors.toList());
+			
+			if(noteList != null && noteList.size() > 0){
+				score = noteList.stream().mapToInt(t -> t.getScore()).sum() / noteList.size();
+			}
+			sectionAnalysis.setDurationTime(noteAnalysisList.stream().mapToDouble(t -> t.getDurationTime()).sum());
+			sectionAnalysis.setScore(score);
+
+			LOGGER.info("小节评分:{}",sectionAnalysis);
+			doneSectionAnalysisList.add(sectionAnalysis);
+		}
+		
+		return score;
+	}
+	
+	public Map<String, Integer> evaluateForMusic() {
+
+		Map<String, Integer> result = new HashMap<String, Integer>();
+		
+		result.put("playTime", (int) doneNoteAnalysisList.stream().mapToDouble(t -> t.getDurationTime()).sum());
+		
+		// 取出需要评测的音符
+		List<NoteAnalysis> noteAnalysisList = doneNoteAnalysisList.stream().filter(t -> t.isIgnore() == false).collect(Collectors.toList());
+
+		if (noteAnalysisList != null && noteAnalysisList.size() > 0) {
+			int intonationScore = 0;
+			int tempoScore = 0;
+			int integrityScore = 0;
+			int socre = 0;
+
+			for (NoteAnalysis note : noteAnalysisList) {
+				intonationScore += note.getIntonationScore();
+				tempoScore += note.getTempoScore();
+				integrityScore += note.getIntegrityScore();
+				socre += note.getScore();
+			}
+
+			tempoScore = tempoScore / noteAnalysisList.size();
+			intonationScore = intonationScore / noteAnalysisList.size();
+			integrityScore = integrityScore / noteAnalysisList.size();
+
+			result.put("cadence", tempoScore);
+			result.put("intonation", intonationScore);
+			result.put("integrity", integrityScore);
+	        result.put("recordId", recordId.intValue());
+
+			int score = socre / noteAnalysisList.size();
+
+			// 平均得分
+			if (getMusicXmlBasicInfo(null).getSubjectId() == 23 || getMusicXmlBasicInfo(null).getSubjectId() == 113) {
+				score = tempoScore;
+			}
+			result.put("score", score);
+		}
+		return result;
+	}
+	
+
+	public void evaluateForNote(MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis) {
+		
+		double floatingRange = musicXmlNote.getDuration() * hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) / 100;
+		double endTime = musicXmlNote.getDuration() + musicXmlNote.getTimeStamp() + dynamicOffset;
+		double startTime = musicXmlNote.getTimeStamp() + dynamicOffset;
+		
+		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
+
+		//根据完整度取部分有效信号
+		int elementSize = chunkAnalysisList.size() * (100 - hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator())) / 100;
+		chunkAnalysisList = chunkAnalysisList.subList(0, elementSize);
+		
+		double playDurationTime = 0;
+		
+		if (subjectId == 23 || subjectId == 113) {
+			if (noteAnalysis.getFrequency() == -1) {// 休止符
+				if (!noteAnalysis.isTempo()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				} else {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				}
+			}else{
+				int beatTimes = (int) chunkAnalysisList.stream().filter(t -> t.getAmplitude() > hardLevel.getAmplitudeThreshold()).count();
+				LOGGER.info("Amplitude:{}  beatTimes:{}  Denominator:{}",chunkAnalysisList.stream().map(t -> t.getAmplitude()).collect(Collectors.toList()), beatTimes, musicXmlNote.getDenominator());
+				if(beatTimes == 0){
+					noteAnalysis.setMusicalErrorType(NoteErrorType.NOT_PLAY);
+				}else if (!noteAnalysis.isTempo()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				} else {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				}
+			}
+		} else {
+			
+			NotePlayResult notePlayResult = queryNoteFrequency(musicXmlNote, noteAnalysis.getPlayFrequency());
+			
+			if (noteAnalysis.getFrequency() == -1) {// 休止符
+
+				playDurationTime = chunkAnalysisList.stream().filter(t -> t.getFrequency() <= 100).mapToDouble(t -> t.getDurationTime()).sum();
+
+				if (!noteAnalysis.isTempo()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				} else if (playDurationTime * 100 / noteAnalysis.getDurationTime() < hardLevel.getIntegrityRange()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.INTEGRITY_WRONG);
+				} else if (notePlayResult.getStatus() == false) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.INTONATION_WRONG);
+				} else {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				}
+			} else {
+				playDurationTime = chunkAnalysisList.stream().filter(t -> t.getFrequency() > 100 && t.getFrequency() < 2000)
+						.mapToDouble(t -> t.getDurationTime()).sum();
+
+				if (playDurationTime * 100 / noteAnalysis.getDurationTime() < hardLevel.getNotPlayRange()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.NOT_PLAY);
+				} else if (playDurationTime * 100 / noteAnalysis.getDurationTime() < hardLevel.getIntegrityRange()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.INTEGRITY_WRONG);
+				} else if (!noteAnalysis.isTempo()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				} else if (notePlayResult.getStatus() == false) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.INTONATION_WRONG);
+				} else {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				}
+			}
+		}
+
+		// 计算音分
+		int tempoScore = 0;
+		int integrityScore = 0;
+		int intonationScore = 100 - new BigDecimal(Math.abs(YINPitchDetector.hertzToAbsoluteCent(noteAnalysis.getPlayFrequency())
+				- YINPitchDetector.hertzToAbsoluteCent(noteAnalysis.getFrequency()))).multiply(new BigDecimal(20)).divide(new BigDecimal(17), BigDecimal.ROUND_UP)
+				.setScale(0, BigDecimal.ROUND_UP).intValue();
+		if (intonationScore < 0) {
+			intonationScore = 0;
+		} else if (intonationScore > 100) {
+			intonationScore = 100;
+		}
+
+		if (noteAnalysis.getMusicalErrorType() == NoteErrorType.NOT_PLAY) {
+			intonationScore = 0;
+		} else {
+
+			if (noteAnalysis.isTempo()) {
+				tempoScore = 100;
+				noteAnalysis.setTempoScore(tempoScore);
+			}
+
+			double durationPercent = playDurationTime / noteAnalysis.getDurationTime();
+			if (durationPercent >= 0.7) {
+				integrityScore = 100;
+			} else if (durationPercent < 0.7 && durationPercent >= 0.5) {
+				integrityScore = 50;
+			}
+			noteAnalysis.setIntegrityScore(integrityScore);
+		}
+		noteAnalysis.setIntonationScore(intonationScore);
+		if (subjectId == 23 || subjectId == 113) {
+			noteAnalysis.setScore(tempoScore);
+		} else {
+			noteAnalysis.setScore(new BigDecimal(intonationScore + tempoScore + integrityScore).divide(new BigDecimal(3), 2).setScale(0, BigDecimal.ROUND_UP)
+					.intValue());
+		}
+	}
+	
+	private int computeFrequency(MusicXmlNote musicXmlNote) {
+		
+		double floatingRange = musicXmlNote.getDuration() * hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) / 100;
+		double endTime = musicXmlNote.getDuration() + musicXmlNote.getTimeStamp() + dynamicOffset - floatingRange;
+		double startTime = musicXmlNote.getTimeStamp() + dynamicOffset;
+		
+		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
+		
+		//根据完整度取部分有效信号
+		int elementSize = chunkAnalysisList.size() * (100 - hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator())) / 100;
+		chunkAnalysisList = chunkAnalysisList.subList(0, elementSize);
+		
+		if(chunkAnalysisList == null || chunkAnalysisList.size() == 0){
+			return -1;
+		}
+		
+		ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
+		
+		LOGGER.info("-------startTime:{}  endTime:{}------", firstChunkAnalysis.getStartTime(), chunkAnalysisList.get(chunkAnalysisList.size() - 1)
+				.getEndTime());
+		
+		List<ChunkAnalysis> chunkList = new ArrayList<ChunkAnalysis>(chunkAnalysisList);
+		
+		List<Integer> chunkFrequencyList = chunkList.stream().map(t -> t.getFrequency()).filter(t -> t.doubleValue() > 100 && t.doubleValue() < 2000)
+				.collect(Collectors.toList());
+		
+		if (chunkFrequencyList.size() == 0) {
+			return -1;
+		}
+
+		int frequency = (int) (chunkFrequencyList.stream().mapToInt(t -> t).sum() / chunkFrequencyList.size());
+
+		return frequency;
+	}
+	
+	/**
+	 * 时值范围内有且只有一个音,且不能间断,且在合理范围内需开始演奏
+	 * 与上一个音相同时,2个音之间需要间断
+	 * @param musicXmlNote
+	 * @return
+	 */
+	private boolean computeTempoWithFrequency(MusicXmlNote musicXmlNote){
+		
+		double floatingRange = musicXmlNote.getDuration() * hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) / 100;
+		double endTime = musicXmlNote.getDuration() + musicXmlNote.getTimeStamp() + dynamicOffset - floatingRange;
+		double startTime = musicXmlNote.getTimeStamp() + dynamicOffset;
+		
+		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
+
+		//根据完整度取部分有效信号
+		int elementSize = chunkAnalysisList.size() * (100 - hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator())) / 100;
+		chunkAnalysisList = chunkAnalysisList.subList(0, elementSize);
+		
+		if(chunkAnalysisList == null || chunkAnalysisList.size() == 0){
+			return false;
+		}
+		
+		if (musicXmlNote.getFrequency() == -1) {// 休止符
+			return chunkAnalysisList.stream().filter(t -> t.getFrequency() > 100).count() <= 1;
+		}
+		
+		ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
+		
+		Optional<ChunkAnalysis> chunkAnalysisOptional = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getEndTime()) == Double.doubleToLongBits(firstChunkAnalysis.getStartTime())).findFirst();
+
+		ChunkAnalysis lastChunkAnalysis = null;
+		if (chunkAnalysisOptional.isPresent()) {
+			lastChunkAnalysis = chunkAnalysisOptional.get();
+		}
+		
+		if(lastChunkAnalysis == null){
+			lastChunkAnalysis = new ChunkAnalysis(0, 0, -1, 0, 0, 0);
+		}
+		
+		List<ChunkAnalysis> chunkList = new ArrayList<ChunkAnalysis>(chunkAnalysisList);
+		
+		if(chunkList.size() == 0){
+			return false;
+		}
+		
+		NoteFrequencyRange noteFrequencyRange = null;
+		ChunkAnalysis chunkAnalysis = null;
+		boolean tempo = false;
+		boolean isContinue = true;
+		int unplayedSize = 0;
+		int firstPeakIndex = -1;
+		for (int i = 0; i < chunkList.size(); i++) {
+			chunkAnalysis = chunkList.get(i);
+			if (chunkAnalysis != null) {
+				if (chunkAnalysis.getFrequency() > 100) {
+					
+					tempo = true;
+					if (firstPeakIndex == -1) {
+						firstPeakIndex = i;
+						noteFrequencyRange = new NoteFrequencyRange(standardFrequecy, chunkAnalysis.getFrequency());
+					} else if (noteFrequencyRange.getMinFrequency() > chunkAnalysis.getFrequency()
+							|| chunkAnalysis.getFrequency() > noteFrequencyRange.getMaxFrequency()) {
+						// 判断是否是同一个音
+						tempo = false;
+						LOGGER.info("节奏错误原因:不是同一个音[{}]:{}-{}", chunkAnalysis.getFrequency(), noteFrequencyRange.getMinFrequency(), noteFrequencyRange.getMaxFrequency());
+						break;
+					}
+					if (isContinue == false) {
+						if ((i + 1) / chunkAnalysisList.size() < hardLevel.getIntegrityRange()) {
+							if (unplayedSize > 0) {
+								tempo = false;
+								LOGGER.info("节奏错误原因:信号不连续");
+								break;
+							}
+						}
+					}
+				} else {
+					if (tempo == true) {
+						isContinue = false;
+						unplayedSize++;
+					}
+				}
+			}
+		}
+		
+		if (tempo) {
+			// 判断进入时间点
+			if(firstPeakIndex * 100 /chunkAnalysisList.size() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator())){
+				tempo = false;
+				LOGGER.info("节奏错误原因:进入时间点太晚");
+			}
+			if(tempo){
+				//判断是否与上一个音延续下来的
+				if(firstChunkAnalysis.getFrequency() > 100 && lastChunkAnalysis.getFrequency() > 100){
+					tempo = new NoteFrequencyRange(standardFrequecy, firstChunkAnalysis.getFrequency()).equals(new NoteFrequencyRange(standardFrequecy, lastChunkAnalysis.getFrequency())) == false;
+					LOGGER.info("节奏错误原因:上一个音延续下来导致的");
+				}
+			}
+		}
+		
+		return tempo;
+	}
+	
+	private boolean computeTempoWithAmplitude2(MusicXmlNote musicXmlNote) {
+
+		double floatingRange = musicXmlNote.getDuration() * hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) / 100;
+		
+		double endTime = musicXmlNote.getTimeStamp() + dynamicOffset + floatingRange;
+		double startTime = musicXmlNote.getTimeStamp() + dynamicOffset - floatingRange;
+		
+		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
+		
+		double correctedStartTime = queryFirstNoteStartTime(chunkAnalysisList, musicXmlNote);
+		double correctedEndTime = correctedStartTime + musicXmlNote.getDuration();
+		
+		chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(correctedStartTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(correctedEndTime)).collect(Collectors.toList());
+		
+		if(chunkAnalysisList == null || chunkAnalysisList.size() == 0){
+			return false;
+		}
+
+		if (musicXmlNote.getFrequency() == -1) {// 休止符
+			return chunkAnalysisList.stream().filter(t -> t.getAmplitude() > hardLevel.getAmplitudeThreshold()).count() <= 0;
+		}
+		
+		ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
+		
+		Optional<ChunkAnalysis> chunkAnalysisOptional = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getEndTime()) == Double.doubleToLongBits(firstChunkAnalysis.getStartTime())).findFirst();
+
+		ChunkAnalysis lastChunkAnalysis = null;
+		if (chunkAnalysisOptional.isPresent()) {
+			lastChunkAnalysis = chunkAnalysisOptional.get();
+		}
+		if(lastChunkAnalysis == null){
+			lastChunkAnalysis = new ChunkAnalysis(0, 0, -1, 0, 0, 0);
+		}
+		
+		List<Integer> chunkAmplitudeList = chunkAnalysisList.stream().map(ChunkAnalysis::getAmplitude).collect(Collectors.toList());
+
+		chunkAmplitudeList.add(0, lastChunkAnalysis.getAmplitude());
+		
+		// 检测是否有多个波峰
+		boolean tempo = false;
+		boolean isContinue = true;
+		int firstPeakIndex = -1;
+		int peakSize = 0;
+		for (int i = 1; i < chunkAmplitudeList.size(); i++) {
+			if (chunkAmplitudeList.get(i) > hardLevel.getAmplitudeThreshold() && chunkAmplitudeList.get(i) > chunkAmplitudeList.get(i - 1) + 2) {
+				tempo = true;
+				if(firstPeakIndex == -1){
+					firstPeakIndex = i;
+					peakSize++;
+				}
+				if (isContinue == false) {
+					tempo = false;
+					peakSize++;
+					break;
+				}
+			} else {
+				if (tempo == true) {
+					isContinue = false;
+				}
+			}
+		}
+		
+		if(peakSize == 0){
+			tempo = lastChunkAnalysis.isPeak();
+		}else if(peakSize == 1){
+			tempo = true;
+		}else{
+			tempo = false;
+		}
+		
+		if (tempo) {
+			// 判断进入时间点
+			if((firstPeakIndex - 1) * 100 /chunkAmplitudeList.size() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) * 2){
+				LOGGER.info("超过范围:{}", (firstPeakIndex - 1) * 100 /chunkAmplitudeList.size());
+				tempo = false;
+			}
+		}
+		
+		return tempo;
+	}
+	
+	private double queryFirstNoteStartTime(List<ChunkAnalysis> chunkAnalysisList, MusicXmlNote musicXmlNote) {
+		
+		if(chunkAnalysisList == null || chunkAnalysisList.size() == 0){
+			return musicXmlNote.getTimeStamp() + dynamicOffset;
+		}
+
+		NoteFrequencyRange standardNote = new NoteFrequencyRange(standardFrequecy, musicXmlNote.getFrequency());
+
+		NoteFrequencyRange noteFrequencyRange = null;
+
+		for (ChunkAnalysis ca : chunkAnalysisList) {
+			noteFrequencyRange = new NoteFrequencyRange(standardFrequecy, ca.getFrequency());
+			if (standardNote.equals(noteFrequencyRange)) {
+				return ca.getStartTime();
+			}
+		}
+
+		return chunkAnalysisList.get(chunkAnalysisList.size() - 1).getEndTime();
+	}
+	
+	public static void main(String[] args) {
+		
+		NoteFrequencyRange range = new NoteFrequencyRange(440, 466);
+		
+		System.out.println("Min:" + range.getMinFrequency() + "  Max:" + range.getMaxFrequency());
+		
+	}
+	
+}

+ 67 - 0
audio-analysis/src/main/java/com/yonge/netty/dto/WebSocketResponse.java

@@ -0,0 +1,67 @@
+package com.yonge.netty.dto;
+
+import org.springframework.http.HttpStatus;
+
+public class WebSocketResponse<T> {
+
+	private Head header = new Head();
+
+	private T body;
+
+	public WebSocketResponse(Head header, T body) {
+		this.header = header;
+		this.body = body;
+	}
+
+	public WebSocketResponse(String command, T body) {
+		this.header = new Head(command, HttpStatus.OK.value());
+		this.body = body;
+	}
+
+	public Head getHeader() {
+		return header;
+	}
+
+	public void setHeader(Head header) {
+		this.header = header;
+	}
+
+	public T getBody() {
+		return body;
+	}
+
+	public void setBody(T body) {
+		this.body = body;
+	}
+
+	public static class Head {
+		private int status = HttpStatus.OK.value();
+		private String commond = "";
+
+		public Head() {
+
+		}
+
+		public Head(String commond, int status) {
+			this.commond = commond;
+			this.status = status;
+		}
+
+		public int getStatus() {
+			return status;
+		}
+
+		public void setStatus(int status) {
+			this.status = status;
+		}
+
+		public String getCommond() {
+			return commond;
+		}
+
+		public void setCommond(String commond) {
+			this.commond = commond;
+		}
+
+	}
+}

+ 161 - 0
audio-analysis/src/main/java/com/yonge/netty/entity/MusicXmlBasicInfo.java

@@ -0,0 +1,161 @@
+package com.yonge.netty.entity;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.stream.Collectors;
+
+public class MusicXmlBasicInfo {
+
+	private Integer id;
+
+	private Integer subjectId;
+
+	private Integer detailId;
+
+	private Integer examSongId;
+
+	private String xmlUrl;
+
+	private String behaviorId;
+
+	private String platform;
+
+	private int speed;
+
+	private String heardLevel;
+
+	private String uuid;
+	
+	private float beatLength;
+
+	private List<MusicXmlNote> musicXmlInfos = new ArrayList<MusicXmlNote>();
+
+	private Map<Integer, MusicXmlSection> musicXmlSectionMap = new HashMap<Integer, MusicXmlSection>();
+
+	public Integer getId() {
+		return id;
+	}
+
+	public void setId(Integer id) {
+		this.id = id;
+	}
+
+	public Integer getSubjectId() {
+		return subjectId;
+	}
+
+	public void setSubjectId(Integer subjectId) {
+		this.subjectId = subjectId;
+	}
+
+	public Integer getDetailId() {
+		return detailId;
+	}
+
+	public void setDetailId(Integer detailId) {
+		this.detailId = detailId;
+	}
+
+	public Integer getExamSongId() {
+		return examSongId;
+	}
+
+	public void setExamSongId(Integer examSongId) {
+		this.examSongId = examSongId;
+	}
+
+	public String getXmlUrl() {
+		return xmlUrl;
+	}
+
+	public void setXmlUrl(String xmlUrl) {
+		this.xmlUrl = xmlUrl;
+	}
+
+	public String getBehaviorId() {
+		return behaviorId;
+	}
+
+	public void setBehaviorId(String behaviorId) {
+		this.behaviorId = behaviorId;
+	}
+
+	public String getPlatform() {
+		return platform;
+	}
+
+	public void setPlatform(String platform) {
+		this.platform = platform;
+	}
+
+	public int getSpeed() {
+		return speed;
+	}
+
+	public void setSpeed(int speed) {
+		this.speed = speed;
+	}
+
+	public String getHeardLevel() {
+		return heardLevel;
+	}
+
+	public void setHeardLevel(String heardLevel) {
+		this.heardLevel = heardLevel;
+	}
+
+	public String getUuid() {
+		return uuid;
+	}
+
+	public void setUuid(String uuid) {
+		this.uuid = uuid;
+	}
+
+	public float getBeatLength() {
+		return beatLength;
+	}
+
+	public void setBeatLength(int beatLength) {
+		this.beatLength = beatLength;
+	}
+
+	public List<MusicXmlNote> getMusicXmlInfos() {
+		return musicXmlInfos;
+	}
+
+	public void setMusicXmlInfos(List<MusicXmlNote> musicXmlInfos) {
+		this.musicXmlInfos = musicXmlInfos;
+	}
+
+	public Map<Integer, MusicXmlSection> getMusicXmlSectionMap() {
+
+		if (musicXmlSectionMap.size() == 0) {
+			Map<Integer, List<MusicXmlNote>> map = musicXmlInfos.stream().collect(Collectors.groupingBy(MusicXmlNote::getMeasureIndex));
+
+			List<MusicXmlNote> noteList = null;
+			MusicXmlSection section = null;
+			for (Entry<Integer, List<MusicXmlNote>> entry : map.entrySet()) {
+				noteList = entry.getValue();
+
+				section = new MusicXmlSection();
+
+				section.setStartTime(noteList.stream().map(t -> t.getTimeStamp()).distinct().min(Double::compareTo).get());
+				section.setDuration(noteList.stream().mapToDouble(t -> t.getDuration()).sum());
+				section.setNoteNum(noteList.size());
+				section.setIndex(entry.getKey());
+
+				musicXmlSectionMap.put(entry.getKey(), section);
+			}
+		}
+
+		return musicXmlSectionMap;
+	}
+
+	public void setMusicXmlSectionMap(Map<Integer, MusicXmlSection> musicXmlSectionMap) {
+		this.musicXmlSectionMap = musicXmlSectionMap;
+	}
+}

+ 106 - 0
audio-analysis/src/main/java/com/yonge/netty/entity/MusicXmlNote.java

@@ -0,0 +1,106 @@
+package com.yonge.netty.entity;
+
+/**
+ * 音符信息
+ */
+public class MusicXmlNote {
+
+	// 音符起始时间戳(第一个音符是0ms)
+	private double timeStamp;
+
+	// 当前音符持续的播放时间(ms)
+	private double duration;
+
+	// 当前音符的频率
+	private float frequency;
+
+	// 下一个音的频率(不是乐谱中下一个音符的频率)
+	private float nextFrequency;
+
+	// 上一个音的频率(不是乐谱中上一个音符的频率)
+	private float prevFrequency;
+
+	// 当前音符所在的小节下标(从0开始)
+	private int measureIndex;
+
+	// 当前音符是否需要评测
+	private boolean dontEvaluating;
+
+	// 当前音符在整个曲谱中的下标(从0开始)
+	private int musicalNotesIndex;
+	
+	// 多少分音符
+	private int denominator;
+
+	public double getTimeStamp() {
+		return timeStamp;
+	}
+
+	public void setTimeStamp(double timeStamp) {
+		this.timeStamp = timeStamp;
+	}
+
+	public double getDuration() {
+		return duration;
+	}
+
+	public void setDuration(double duration) {
+		this.duration = duration;
+	}
+
+	public float getFrequency() {
+		return frequency;
+	}
+
+	public void setFrequency(float frequency) {
+		this.frequency = frequency;
+	}
+
+	public float getPrevFrequency() {
+		return prevFrequency;
+	}
+
+	public void setPrevFrequency(float prevFrequency) {
+		this.prevFrequency = prevFrequency;
+	}
+
+	public float getNextFrequency() {
+		return nextFrequency;
+	}
+
+	public void setNextFrequency(float nextFrequency) {
+		this.nextFrequency = nextFrequency;
+	}
+
+	public int getMeasureIndex() {
+		return measureIndex;
+	}
+
+	public void setMeasureIndex(int measureIndex) {
+		this.measureIndex = measureIndex;
+	}
+
+	public boolean getDontEvaluating() {
+		return dontEvaluating;
+	}
+
+	public void setDontEvaluating(boolean dontEvaluating) {
+		this.dontEvaluating = dontEvaluating;
+	}
+
+	public int getMusicalNotesIndex() {
+		return musicalNotesIndex;
+	}
+
+	public void setMusicalNotesIndex(int musicalNotesIndex) {
+		this.musicalNotesIndex = musicalNotesIndex;
+	}
+
+	public int getDenominator() {
+		return denominator;
+	}
+
+	public void setDenominator(int denominator) {
+		this.denominator = denominator;
+	}
+}

+ 50 - 0
audio-analysis/src/main/java/com/yonge/netty/entity/MusicXmlSection.java

@@ -0,0 +1,50 @@
+package com.yonge.netty.entity;
+
+/**
+ * 小节信息
+ */
+public class MusicXmlSection {
+
+	private double startTime;
+
+	// 当前小节持续的播放时间(ms)
+	private double duration;
+
+	// 音符数量
+	private int noteNum;
+
+	private int index;
+
+	public double getDuration() {
+		return duration;
+	}
+
+	public void setDuration(double duration) {
+		this.duration = duration;
+	}
+
+	public int getNoteNum() {
+		return noteNum;
+	}
+
+	public void setNoteNum(int noteNum) {
+		this.noteNum = noteNum;
+	}
+
+	public double getStartTime() {
+		return startTime;
+	}
+
+	public void setStartTime(double startTime) {
+		this.startTime = startTime;
+	}
+
+	public int getIndex() {
+		return index;
+	}
+
+	public void setIndex(int index) {
+		this.index = index;
+	}
+
+}

+ 155 - 0
audio-analysis/src/main/java/com/yonge/netty/server/NettyServer.java

@@ -0,0 +1,155 @@
+package com.yonge.netty.server;
+
+import io.netty.bootstrap.ServerBootstrap;
+import io.netty.channel.ChannelFuture;
+import io.netty.channel.ChannelInitializer;
+import io.netty.channel.ChannelOption;
+import io.netty.channel.ChannelPipeline;
+import io.netty.channel.EventLoopGroup;
+import io.netty.channel.nio.NioEventLoopGroup;
+import io.netty.channel.socket.SocketChannel;
+import io.netty.channel.socket.nio.NioServerSocketChannel;
+import io.netty.handler.codec.http.HttpObjectAggregator;
+import io.netty.handler.codec.http.HttpServerCodec;
+import io.netty.handler.codec.http.websocketx.WebSocketServerProtocolHandler;
+import io.netty.handler.codec.http.websocketx.extensions.compression.WebSocketServerCompressionHandler;
+import io.netty.handler.stream.ChunkedWriteHandler;
+
+import java.net.InetSocketAddress;
+
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Configuration;
+
+import com.yonge.netty.server.handler.NettyServerHandler;
+import com.yonge.netty.server.handler.message.BinaryWebSocketFrameHandler;
+import com.yonge.netty.server.handler.message.TextWebSocketHandler;
+
+@Configuration
+public class NettyServer {
+
+	private static final Logger LOGGER = LoggerFactory.getLogger(NettyServer.class);
+
+	/**
+	 * webSocket协议名
+	 */
+	private static final String WEBSOCKET_PROTOCOL = "WebSocket";
+
+	/**
+	 * 端口号
+	 */
+	@Value("${netty.server.port}")
+	private int port;
+
+	/**
+	 * webSocket路径
+	 */
+	private String webSocketPath = "/audioAnalysis";
+
+	private EventLoopGroup bossGroup = new NioEventLoopGroup(1);
+
+	private EventLoopGroup workGroup = new NioEventLoopGroup(5);
+
+	@Autowired
+	private NettyServerHandler nettyServerHandler;
+
+	@Autowired
+	private BinaryWebSocketFrameHandler binaryWebSocketFrameHandler;
+
+	@Autowired
+	private TextWebSocketHandler textWebSocketHandler;
+
+	/**
+	 * 启动
+	 * @throws InterruptedException
+	 */
+	private void start() throws InterruptedException {
+		ServerBootstrap bootstrap = new ServerBootstrap();
+
+		// bossGroup辅助客户端的tcp连接请求, workGroup负责与客户端之前的读写操作
+		bootstrap.group(bossGroup, workGroup);
+		// 设置NIO类型的channel
+		bootstrap.channel(NioServerSocketChannel.class);
+		// 设置监听端口
+		bootstrap.localAddress(new InetSocketAddress(port));
+		// 服务端 accept 队列的大小
+		//bootstrap.option(ChannelOption.SO_BACKLOG, 1024);
+		//bootstrap.option(ChannelOption.SO_RCVBUF, 1024*4);
+		// 允许较小的数据包的发送,降低延迟
+		bootstrap.childOption(ChannelOption.TCP_NODELAY, true);
+		// 连接到达时会创建一个通道
+		bootstrap.childHandler(new ChannelInitializer<SocketChannel>() {
+
+			@Override
+			protected void initChannel(SocketChannel ch) throws Exception {
+				// 获得 Channel 对应的 ChannelPipeline
+				ChannelPipeline channelPipeline = ch.pipeline();
+
+				// 流水线管理通道中的处理程序(Handler),用来处理业务
+				// webSocket协议本身是基于http协议的,所以这边也要使用http编解码器
+				channelPipeline.addLast(new HttpServerCodec());
+				// channelPipeline.addLast(new ObjectEncoder());
+				// 分块向客户端写数据,防止发送大文件时导致内存溢出, channel.write(new ChunkedFile(new File("bigFile.mkv")))
+				channelPipeline.addLast(new ChunkedWriteHandler());
+				/*
+				 * 说明: 1、http数据在传输过程中是分段的,HttpObjectAggregator可以将多个段聚合 2、这就是为什么,当浏览器发送大量数据时,就会发送多次http请求
+				 */
+				channelPipeline.addLast(new HttpObjectAggregator(1024 * 8));
+				// webSocket 数据压缩扩展,当添加这个的时候WebSocketServerProtocolHandler的第三个参数需要设置成true
+				channelPipeline.addLast(new WebSocketServerCompressionHandler());
+				/*
+				 * 说明: 1、对应webSocket,它的数据是以帧(frame)的形式传递 2、浏览器请求时 ws://localhost:58080/xxx 表示请求的uri 3、核心功能是将http协议升级为ws协议,保持长连接
+				 */
+				channelPipeline.addLast(new WebSocketServerProtocolHandler(webSocketPath, WEBSOCKET_PROTOCOL, true, 65536 * 1000, false, true));
+
+				// 自定义的handler,处理业务逻辑
+				channelPipeline.addLast(nettyServerHandler);
+				channelPipeline.addLast(binaryWebSocketFrameHandler);
+				channelPipeline.addLast(textWebSocketHandler);
+
+			}
+		});
+
+		// 配置完成,开始绑定server,通过调用sync同步方法阻塞直到绑定成功
+		ChannelFuture channelFuture = bootstrap.bind().sync();
+
+		if (channelFuture.isSuccess()) {
+			LOGGER.info("Server started and listen on:{}", channelFuture.channel().localAddress());
+		}
+
+		// 对关闭通道进行监听
+		channelFuture.channel().closeFuture().sync();
+	}
+
+	/**
+	 * 释放资源
+	 * @throws InterruptedException
+	 */
+	@PreDestroy
+	public void destroy() throws InterruptedException {
+		if (bossGroup != null) {
+			bossGroup.shutdownGracefully().sync();
+		}
+		if (workGroup != null) {
+			workGroup.shutdownGracefully().sync();
+		}
+	}
+
+	@PostConstruct()
+	public void init() {
+		// 需要开启一个新的线程来执行netty server 服务器
+		new Thread(() -> {
+			try {
+				start();
+			} catch (InterruptedException e) {
+				e.printStackTrace();
+			}
+		}).start();
+	}
+
+}

+ 9 - 0
audio-analysis/src/main/java/com/yonge/netty/server/handler/ChannelContextConstants.java

@@ -0,0 +1,9 @@
+package com.yonge.netty.server.handler;
+
+import io.netty.util.AttributeKey;
+
+public class ChannelContextConstants {
+
+	public static final AttributeKey<String> CHANNEL_ATTR_KEY_ACTION = AttributeKey.newInstance("action");
+	
+}

+ 135 - 0
audio-analysis/src/main/java/com/yonge/netty/server/handler/NettyChannelManager.java

@@ -0,0 +1,135 @@
+package com.yonge.netty.server.handler;
+
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelId;
+import io.netty.handler.codec.http.websocketx.TextWebSocketFrame;
+import io.netty.util.AttributeKey;
+
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Component;
+
+import com.ym.mec.util.json.JsonUtil;
+
+@Component
+public class NettyChannelManager {
+
+	private static final Logger LOGGER = LoggerFactory.getLogger(NettyChannelManager.class);
+
+	/**
+	 * {@link Channel#attr(AttributeKey)} 属性中,表示 Channel 对应的用户
+	 */
+	private static final AttributeKey<String> CHANNEL_ATTR_KEY_USER = AttributeKey.newInstance("user");
+
+	/**
+	 * Channel 映射
+	 */
+	private ConcurrentMap<ChannelId, Channel> channels = new ConcurrentHashMap<ChannelId, Channel>();
+
+	/**
+	 * 用户与 Channel 的映射。
+	 *
+	 * 通过它,可以获取用户对应的 Channel。这样,我们可以向指定用户发送消息。
+	 */
+	private ConcurrentMap<String, Channel> userChannels = new ConcurrentHashMap<String, Channel>();
+
+	/**
+	* 添加 Channel 到 {@link #channels} 中
+	*
+	* @param channel Channel
+	*/
+	public void add(Channel channel) {
+		channels.put(channel.id(), channel);
+	}
+
+	/**
+	 * 添加指定用户到 {@link #userChannels} 中
+	 *
+	 * @param channel Channel
+	 * @param user 用户
+	 */
+	public void addUser(Channel channel, String user) {
+		Channel existChannel = channels.get(channel.id());
+		if (existChannel == null) {
+			LOGGER.error("[addUser][连接({}) 不存在]", channel.id());
+			return;
+		}
+		// 设置属性
+		channel.attr(CHANNEL_ATTR_KEY_USER).set(user);
+		// 添加到 userChannels
+		userChannels.put(user, channel);
+
+		LOGGER.info("[add][用户({})加入,总数({})]", user, channels.size());
+	}
+
+	/**
+	 * 从channel中获取user
+	 * @param channel
+	 * @return
+	 */
+	public String getUser(Channel channel) {
+		if (channel.hasAttr(CHANNEL_ATTR_KEY_USER)) {
+			return channel.attr(CHANNEL_ATTR_KEY_USER).get();
+		}
+		return null;
+	}
+
+	/**
+	 * 将 Channel 从 {@link #channels} 和 {@link #userChannels} 中移除
+	 *
+	 * @param channel Channel
+	 */
+	public void remove(Channel channel) {
+		// 移除 channels
+		channels.remove(channel.id());
+
+		String user = "";
+		// 移除 userChannels
+		if (channel.hasAttr(CHANNEL_ATTR_KEY_USER)) {
+			user = channel.attr(CHANNEL_ATTR_KEY_USER).get();
+			userChannels.remove(user);
+		}
+		LOGGER.info("[remove][用户({})移除,总数({})]", user, channels.size());
+	}
+
+	/**
+	 * 向指定用户发送消息
+	 *
+	 * @param user 用户
+	 * @param message 消息体
+	 */
+	public void sendTextMessage(String user, Object message) {
+		// 获得用户对应的 Channel
+		Channel channel = userChannels.get(user);
+		if (channel == null) {
+			LOGGER.error("[send][连接不存在]");
+			return;
+		}
+		if (!channel.isActive()) {
+			LOGGER.error("[send][连接({})未激活]", channel.id());
+			return;
+		}
+		// 发送消息
+		channel.writeAndFlush(new TextWebSocketFrame(JsonUtil.toJSONString(message)));
+	}
+
+	/**
+	 * 向所有用户发送消息
+	 *
+	 * @param message 消息体
+	 */
+	public void sendAll(Object message) {
+		for (Channel channel : channels.values()) {
+			if (!channel.isActive()) {
+				LOGGER.error("[send][连接({})未激活]", channel.id());
+				return;
+			}
+			// 发送消息
+			channel.writeAndFlush(message);
+		}
+	}
+
+}

+ 77 - 0
audio-analysis/src/main/java/com/yonge/netty/server/handler/NettyServerHandler.java

@@ -0,0 +1,77 @@
+package com.yonge.netty.server.handler;
+
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelHandler;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.ChannelInboundHandlerAdapter;
+import io.netty.handler.codec.http.HttpHeaders;
+import io.netty.handler.codec.http.websocketx.WebSocketServerProtocolHandler;
+
+import org.apache.commons.lang3.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.security.oauth2.common.OAuth2AccessToken;
+import org.springframework.stereotype.Component;
+
+@Component
+@ChannelHandler.Sharable
+public class NettyServerHandler extends ChannelInboundHandlerAdapter {
+
+	private static final Logger LOGGER = LoggerFactory.getLogger(NettyServerHandler.class);
+
+	@Autowired
+	private NettyChannelManager channelManager;
+
+	@Override
+	public void channelActive(ChannelHandlerContext ctx) {
+		// 从管理器中添加
+		channelManager.add(ctx.channel());
+	}
+
+	@Override
+	public void channelUnregistered(ChannelHandlerContext ctx) {
+		// 从管理器中移除
+		channelManager.remove(ctx.channel());
+	}
+
+	@Override
+	public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
+		LOGGER.error("[exceptionCaught][连接({}) 发生异常]", ctx.channel().id(), cause);
+		// 断开连接
+		ctx.channel().close();
+	}
+
+	@Override
+	public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
+		if (evt instanceof WebSocketServerProtocolHandler.HandshakeComplete) {
+			WebSocketServerProtocolHandler.HandshakeComplete handshakeComplete = (WebSocketServerProtocolHandler.HandshakeComplete) evt;
+			String requestUri = handshakeComplete.requestUri();
+			
+			String userId = StringUtils.substringAfterLast(requestUri, "?");
+			
+			if(StringUtils.isBlank(userId) || !StringUtils.isNumeric(userId)){
+				userId = StringUtils.substringAfterLast(requestUri, "/");
+			}
+			
+			Channel channel = ctx.channel();
+			
+			if(!StringUtils.isNumeric(userId)){
+				// 断开连接
+				channel.close();
+			}
+			
+			channelManager.addUser(channel, userId);
+			
+			LOGGER.info("userId:[{}]", userId);
+			
+			HttpHeaders httpHeaders = handshakeComplete.requestHeaders();
+			String authHeader = httpHeaders.get("Authorization");
+			
+			String tokenValue = authHeader.toLowerCase().replace(OAuth2AccessToken.BEARER_TYPE.toLowerCase(), StringUtils.EMPTY).trim();
+			LOGGER.info("token:[{}]", tokenValue);
+		}
+		super.userEventTriggered(ctx, evt);
+	}
+
+}

+ 78 - 0
audio-analysis/src/main/java/com/yonge/netty/server/handler/message/BinaryWebSocketFrameHandler.java

@@ -0,0 +1,78 @@
+package com.yonge.netty.server.handler.message;
+
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufUtil;
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelHandler;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.SimpleChannelInboundHandler;
+import io.netty.handler.codec.http.websocketx.BinaryWebSocketFrame;
+
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.BeansException;
+import org.springframework.beans.factory.InitializingBean;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.ApplicationContext;
+import org.springframework.context.ApplicationContextAware;
+import org.springframework.stereotype.Component;
+
+import com.yonge.netty.server.handler.ChannelContextConstants;
+import com.yonge.netty.server.handler.NettyChannelManager;
+
+@Component
+@ChannelHandler.Sharable
+public class BinaryWebSocketFrameHandler extends SimpleChannelInboundHandler<BinaryWebSocketFrame> implements ApplicationContextAware,InitializingBean {
+
+	private final static Logger LOGGER = LoggerFactory.getLogger(BinaryWebSocketFrameHandler.class);
+	
+	@Autowired
+	private NettyChannelManager nettyChannelManager;
+
+	private ApplicationContext applicationContext;
+	
+	private Map<String, MessageHandler> handlerMap;
+
+	@Override
+	protected void channelRead0(ChannelHandlerContext ctx, BinaryWebSocketFrame frame) throws Exception {
+
+		Channel channel = ctx.channel();
+
+		ByteBuf buf = frame.content().retain();
+
+		try {
+			byte[] datas = ByteBufUtil.getBytes(buf);
+
+			String user = nettyChannelManager.getUser(channel);
+			
+			String action = channel.attr(ChannelContextConstants.CHANNEL_ATTR_KEY_ACTION).get();
+			
+			if(handlerMap == null){
+				LOGGER.error("消息处理器没有初始化");
+			}
+			MessageHandler handler = handlerMap.get(action);
+			
+			if(handler != null){
+				handler.handleBinaryMessage(user, channel, datas);
+			}
+			
+		} finally {
+			buf.release();
+		}
+	}
+
+	@Override
+	public void afterPropertiesSet() throws Exception {
+		handlerMap = applicationContext.getBeansOfType(MessageHandler.class).values().stream()
+				.collect(Collectors.toMap(MessageHandler::getAction, t -> t));
+	}
+
+	@Override
+	public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
+		this.applicationContext = applicationContext;
+	}
+
+}

+ 12 - 0
audio-analysis/src/main/java/com/yonge/netty/server/handler/message/MessageHandler.java

@@ -0,0 +1,12 @@
+package com.yonge.netty.server.handler.message;
+
+import io.netty.channel.Channel;
+
+public interface MessageHandler {
+	
+	String getAction();
+	
+	boolean handleTextMessage(String user, Channel channel, String text);
+
+	boolean handleBinaryMessage(String user, Channel channel, byte[] bytes);
+}

+ 75 - 0
audio-analysis/src/main/java/com/yonge/netty/server/handler/message/TextWebSocketHandler.java

@@ -0,0 +1,75 @@
+package com.yonge.netty.server.handler.message;
+
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelHandler;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.SimpleChannelInboundHandler;
+import io.netty.handler.codec.http.websocketx.TextWebSocketFrame;
+
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import org.apache.commons.lang3.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.BeansException;
+import org.springframework.beans.factory.InitializingBean;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.ApplicationContext;
+import org.springframework.context.ApplicationContextAware;
+import org.springframework.stereotype.Component;
+
+import com.alibaba.fastjson.JSONPath;
+import com.yonge.netty.server.handler.ChannelContextConstants;
+import com.yonge.netty.server.handler.NettyChannelManager;
+
+@Component
+@ChannelHandler.Sharable
+public class TextWebSocketHandler extends SimpleChannelInboundHandler<TextWebSocketFrame> implements ApplicationContextAware,InitializingBean {
+
+	private static final Logger LOGGER = LoggerFactory.getLogger(TextWebSocketHandler.class);
+
+	@Autowired
+	private NettyChannelManager nettyChannelManager;
+	
+	private ApplicationContext applicationContext;
+	
+	private Map<String, MessageHandler> handlerMap;
+
+	@Override
+	protected void channelRead0(ChannelHandlerContext ctx, TextWebSocketFrame frame) throws Exception {
+
+		Channel channel = ctx.channel();
+
+		String jsonMsg = frame.text();
+		
+		LOGGER.info("接收到客户端的消息内容:{}", jsonMsg);
+		
+		String action = (String) JSONPath.extract(jsonMsg, "$.header.type");
+		
+		if(StringUtils.isNoneBlank(action)){
+			channel.attr(ChannelContextConstants.CHANNEL_ATTR_KEY_ACTION).set(action);
+			
+			if(handlerMap == null){
+				LOGGER.error("消息处理器没有初始化");
+			}
+			MessageHandler handler = handlerMap.get(action);
+			
+			if(handler != null){
+				handler.handleTextMessage(nettyChannelManager.getUser(channel), channel, jsonMsg);
+			}
+		}
+	}
+
+	@Override
+	public void afterPropertiesSet() throws Exception {
+		handlerMap = applicationContext.getBeansOfType(MessageHandler.class).values().stream()
+				.collect(Collectors.toMap(MessageHandler::getAction, t -> t));
+	}
+
+	@Override
+	public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
+		this.applicationContext = applicationContext;
+	}
+
+}

+ 112 - 0
audio-analysis/src/main/java/com/yonge/netty/server/processor/WaveformWriter.java

@@ -0,0 +1,112 @@
+/*
+ *      _______                       _____   _____ _____  
+ *     |__   __|                     |  __ \ / ____|  __ \ 
+ *        | | __ _ _ __ ___  ___  ___| |  | | (___ | |__) |
+ *        | |/ _` | '__/ __|/ _ \/ __| |  | |\___ \|  ___/ 
+ *        | | (_| | |  \__ \ (_) \__ \ |__| |____) | |     
+ *        |_|\__,_|_|  |___/\___/|___/_____/|_____/|_|     
+ *                                                         
+ * -------------------------------------------------------------
+ *
+ * TarsosDSP is developed by Joren Six at IPEM, University Ghent
+ *  
+ * -------------------------------------------------------------
+ *
+ *  Info: http://0110.be/tag/TarsosDSP
+ *  Github: https://github.com/JorenSix/TarsosDSP
+ *  Releases: http://0110.be/releases/TarsosDSP/
+ *  
+ *  TarsosDSP includes modified source code by various authors,
+ *  for credits and info, see README.
+ * 
+ */
+
+package com.yonge.netty.server.processor;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import be.tarsos.dsp.writer.WaveHeader;
+
+/**
+ * 写wav文件
+ */
+public class WaveformWriter {
+
+	private static final Logger LOGGER = LoggerFactory.getLogger(WaveformWriter.class);
+
+	private RandomAccessFile randomAccessFile;
+
+	private final String fileName;
+
+	public static final short CHANNEL_NUM = 1;
+
+	public static final int SAMPLE_RATE = 44100;
+
+	public static final short BITS_PER_SAMPLE = 16;
+
+	public static final int HEADER_LENGTH = 44;
+
+	public WaveformWriter(String fileName) {
+
+		this.fileName = fileName;
+		try {
+			randomAccessFile = new RandomAccessFile(fileName, "rw");
+			randomAccessFile.write(new byte[HEADER_LENGTH]);
+		} catch (IOException e) {
+			LOGGER.error("创建WAV文件出现异常[{}]:{}", fileName, e.getMessage());
+			e.printStackTrace();
+		}
+
+	}
+
+	public boolean process(byte[] datas) {
+
+		try {
+			randomAccessFile.write(datas);
+		} catch (IOException e) {
+			LOGGER.error("写WAV文件出现异常[{}]:{}", fileName, e.getMessage());
+			e.printStackTrace();
+		}
+
+		return true;
+	}
+
+	public void processingFinished() {
+		try {
+			WaveHeader waveHeader = new WaveHeader(WaveHeader.FORMAT_PCM, CHANNEL_NUM, SAMPLE_RATE, BITS_PER_SAMPLE, (int) randomAccessFile.length()
+					- HEADER_LENGTH);
+
+			ByteArrayOutputStream header = new ByteArrayOutputStream();
+			waveHeader.write(header);
+			randomAccessFile.seek(0);
+			randomAccessFile.write(header.toByteArray());
+			randomAccessFile.close();
+		} catch (IOException e) {
+			LOGGER.error("关闭WAV文件出现异常[{}]:{}", fileName, e.getMessage());
+			e.printStackTrace();
+		}
+	}
+
+	public File getFile() {
+		return new File(fileName);
+	}
+
+	public long getFileLength(boolean isSubHeadLength) {
+		try {
+			if (isSubHeadLength) {
+				return randomAccessFile.length() - HEADER_LENGTH;
+			}
+			return randomAccessFile.length();
+		} catch (IOException e) {
+			LOGGER.error("读取WAV文件出现异常[{}]:{}", fileName, e.getMessage());
+			e.printStackTrace();
+		}
+		return 0;
+	}
+}

+ 357 - 0
audio-analysis/src/main/java/com/yonge/netty/server/service/AudioCompareHandler.java

@@ -0,0 +1,357 @@
+package com.yonge.netty.server.service;
+
+import io.netty.channel.Channel;
+
+import java.io.File;
+import java.math.BigDecimal;
+import java.text.SimpleDateFormat;
+import java.util.Comparator;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Objects;
+import java.util.stream.Collectors;
+
+import javax.sound.sampled.AudioFormat;
+
+import org.apache.commons.lang3.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Component;
+
+import com.alibaba.fastjson.JSON;
+import com.alibaba.fastjson.JSONObject;
+import com.alibaba.fastjson.JSONPath;
+import com.ym.mec.biz.dal.entity.SysMusicCompareRecord;
+import com.ym.mec.biz.dal.enums.DeviceTypeEnum;
+import com.ym.mec.biz.dal.enums.FeatureType;
+import com.ym.mec.biz.dal.enums.HeardLevelEnum;
+import com.ym.mec.biz.service.SysMusicCompareRecordService;
+import com.ym.mec.thirdparty.storage.StoragePluginContext;
+import com.ym.mec.thirdparty.storage.provider.KS3StoragePlugin;
+import com.ym.mec.util.upload.UploadUtil;
+import com.yonge.audio.analysis.AudioFloatConverter;
+import com.yonge.audio.utils.ArrayUtil;
+import com.yonge.netty.dto.SectionAnalysis;
+import com.yonge.netty.dto.UserChannelContext;
+import com.yonge.netty.dto.WebSocketResponse;
+import com.yonge.netty.entity.MusicXmlBasicInfo;
+import com.yonge.netty.entity.MusicXmlNote;
+import com.yonge.netty.server.handler.NettyChannelManager;
+import com.yonge.netty.server.handler.message.MessageHandler;
+import com.yonge.netty.server.processor.WaveformWriter;
+
+@Component
+public class AudioCompareHandler implements MessageHandler {
+	
+	private static final Logger LOGGER = LoggerFactory.getLogger(AudioCompareHandler.class);
+
+	@Autowired
+	private UserChannelContextService userChannelContextService;
+
+	@Autowired
+	private NettyChannelManager nettyChannelManager;
+	
+	@Autowired
+	private SysMusicCompareRecordService sysMusicCompareRecordService;
+
+    @Autowired
+    private StoragePluginContext storagePluginContext;
+
+	/**
+	 * @describe 采样率
+	 */
+	private float sampleRate = 44100;
+
+	/**
+	 * 每个采样大小(Bit)
+	 */
+	private int bitsPerSample = 16;
+
+	/**
+	 * 通道数
+	 */
+	private int channels = 1;
+
+	/**
+	 * @describe 采样大小
+	 */
+	private int bufferSize = 1024 * 2;
+
+	private boolean signed = true;
+
+	private boolean bigEndian = false;
+
+	private AudioFormat audioFormat = new AudioFormat(sampleRate, bitsPerSample, channels, signed, bigEndian);
+
+	private AudioFloatConverter converter = AudioFloatConverter.getConverter(audioFormat);
+
+	private String tmpFileDir = "/mdata/soundCompare/";
+
+	private SimpleDateFormat sdf = new SimpleDateFormat("yyMMddHHmmSS");
+	
+	@Override
+	public String getAction() {
+		return "SOUND_COMPARE";
+	}
+
+	@Override
+	public boolean handleTextMessage(String user, Channel channel, String jsonMsg) {
+		
+		String command = (String) JSONPath.extract(jsonMsg, "$.header.commond");
+
+		JSONObject dataObj = (JSONObject) JSONPath.extract(jsonMsg, "$.body");
+		
+		UserChannelContext channelContext = userChannelContextService.getChannelContext(channel);
+		
+		MusicXmlBasicInfo musicXmlBasicInfo = null;
+
+		switch (command) {
+		case "musicXml": // 同步music xml信息
+			
+			musicXmlBasicInfo = JSONObject.toJavaObject(dataObj, MusicXmlBasicInfo.class);
+			
+			userChannelContextService.remove(channel);
+
+			channelContext = new UserChannelContext();
+			
+			channelContext.setHandlerSwitch(false);
+
+			channelContext.getSongMusicXmlMap().put(musicXmlBasicInfo.getExamSongId(), musicXmlBasicInfo);
+			channelContext.init(musicXmlBasicInfo.getPlatform(), musicXmlBasicInfo.getHeardLevel(), musicXmlBasicInfo.getSubjectId(),
+					musicXmlBasicInfo.getBeatLength(), audioFormat.getSampleRate(), bufferSize / 2);
+			channelContext.setUser(user);
+			
+			userChannelContextService.register(channel, channelContext);
+
+			break;
+		case "recordStart": // 开始评测
+
+			// 清空缓存信息
+			channelContext.resetUserInfo();
+			
+			channelContext.setHandlerSwitch(false);
+			
+			musicXmlBasicInfo = channelContext.getMusicXmlBasicInfo(null);
+
+			if (musicXmlBasicInfo != null) {
+				Date date = new Date();
+				SysMusicCompareRecord sysMusicCompareRecord = new SysMusicCompareRecord(FeatureType.CLOUD_STUDY_EVALUATION);
+				sysMusicCompareRecord.setCreateTime(date);
+				sysMusicCompareRecord.setUserId(Integer.parseInt(user));
+				sysMusicCompareRecord.setSysMusicScoreId(musicXmlBasicInfo.getExamSongId());
+				sysMusicCompareRecord.setBehaviorId(musicXmlBasicInfo.getBehaviorId());
+				//sysMusicCompareRecord.setClientId();
+				sysMusicCompareRecord.setDeviceType(DeviceTypeEnum.valueOf(musicXmlBasicInfo.getPlatform()));
+				sysMusicCompareRecord.setSpeed(musicXmlBasicInfo.getSpeed());
+				
+				MusicXmlNote musicXmlNote = musicXmlBasicInfo.getMusicXmlInfos().stream().max(Comparator.comparing(MusicXmlNote::getTimeStamp)).get();
+				sysMusicCompareRecord.setSourceTime((float) ((musicXmlNote.getTimeStamp()+musicXmlNote.getDuration())/1000));
+				sysMusicCompareRecordService.insert(sysMusicCompareRecord);
+				channelContext.setRecordId(sysMusicCompareRecord.getId());
+			}
+			break;
+		case "recordEnd": // 结束评测
+		case "recordCancel": // 取消评测
+			if (channelContext == null) {
+				return false;
+			}
+			
+			channelContext.setHandlerSwitch(false);
+
+			WaveformWriter waveFileProcessor = channelContext.getWaveFileProcessor();
+			if (waveFileProcessor != null) {
+				// 写文件头
+				waveFileProcessor.processingFinished();
+			}
+
+			if (StringUtils.equals(command, "recordEnd")) {
+				// 生成评测报告
+				Map<String, Object> params = new HashMap<String, Object>();
+
+				Map<String, Integer> scoreMap = channelContext.evaluateForMusic();
+				for (Entry<String, Integer> entry : scoreMap.entrySet()) {
+					params.put(entry.getKey(), entry.getValue());
+				}
+				
+				//保存评测结果
+				Long recordId = channelContext.getRecordId();
+				SysMusicCompareRecord sysMusicCompareRecord = sysMusicCompareRecordService.get(recordId);
+				if(sysMusicCompareRecord != null){
+					musicXmlBasicInfo = channelContext.getMusicXmlBasicInfo(null);
+					
+					if (scoreMap != null && scoreMap.size() > 1) {
+						sysMusicCompareRecord.setScore(new BigDecimal(scoreMap.get("score")));
+						sysMusicCompareRecord.setIntonation(new BigDecimal(scoreMap.get("intonation")));
+						sysMusicCompareRecord.setIntegrity(new BigDecimal(scoreMap.get("integrity")));
+						sysMusicCompareRecord.setCadence(new BigDecimal(scoreMap.get("cadence")));
+						sysMusicCompareRecord.setPlayTime(scoreMap.get("playTime") / 1000);
+						
+						LOGGER.info("Score:{} Intonation:{} Integrity:{} Cadence:{}", sysMusicCompareRecord.getScore(),sysMusicCompareRecord.getIntonation(),sysMusicCompareRecord.getIntegrity(),sysMusicCompareRecord.getCadence());
+					}
+					sysMusicCompareRecord.setFeature(FeatureType.CLOUD_STUDY_EVALUATION);
+
+		            String url = null;
+		            try {
+		                String folder = UploadUtil.getFileFloder();
+		                url = storagePluginContext.asyncUploadFile(KS3StoragePlugin.PLUGIN_NAME,"soundCompare/" + folder, waveFileProcessor.getFile(), true);
+		            } catch (Exception e) {
+		                LOGGER.error("录音文件上传失败:{}", e);
+		            }
+					sysMusicCompareRecord.setRecordFilePath(url);
+					//sysMusicCompareRecord.setVideoFilePath(videoFilePath);
+
+					Map<String, Object> scoreData = new HashMap<>();
+					List<SectionAnalysis> sectionAnalysisList = channelContext.getDoneSectionAnalysisList();
+					sectionAnalysisList = sectionAnalysisList.stream().filter(t -> t.isIngore() == false).collect(Collectors.toList());
+					scoreData.put("userMeasureScore", sectionAnalysisList.stream().collect(Collectors.toMap(SectionAnalysis :: getIndex, t -> t)));
+
+					Map<String, Object> musicalNotesPlayStats = new HashMap<>();
+					musicalNotesPlayStats.put("detailId", musicXmlBasicInfo.getDetailId());
+					musicalNotesPlayStats.put("examSongId", musicXmlBasicInfo.getExamSongId());
+					musicalNotesPlayStats.put("xmlUrl", musicXmlBasicInfo.getXmlUrl());
+					
+					musicalNotesPlayStats.put("notesData", channelContext.getDoneNoteAnalysisList().stream().filter(t -> t.isIgnore() == false).collect(Collectors.toList()));
+					scoreData.put("musicalNotesPlayStats", musicalNotesPlayStats);
+					sysMusicCompareRecord.setScoreData(JSON.toJSONString(scoreData));
+					
+					sysMusicCompareRecord.setHeardLevel(HeardLevelEnum.valueOf(channelContext.getHardLevel().name()));
+					
+					sysMusicCompareRecordService.saveMusicCompareData(sysMusicCompareRecord);
+				}
+				
+				WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>("overall", params);
+
+				nettyChannelManager.sendTextMessage(user, resp);
+			}
+
+			// 清空缓存信息
+			channelContext.resetUserInfo();
+
+			break;
+		case "audioPlayStart": // ???
+			
+			Integer offsetTime = dataObj.getInteger("offsetTime");
+			if(offsetTime != null){
+				channelContext.setOffsetMS(offsetTime);
+				channelContext.setHandlerSwitch(true);
+			}
+
+			break;
+		case "videoUpload": // 上传音频
+			SysMusicCompareRecord musicCompareRecord = null;
+			if (dataObj.containsKey("recordId")) {
+				musicCompareRecord = sysMusicCompareRecordService.get(dataObj.getLong("recordId"));
+			}
+			if (Objects.nonNull(musicCompareRecord) && dataObj.containsKey("filePath")) {
+				musicCompareRecord.setVideoFilePath(dataObj.getString("filePath"));
+				sysMusicCompareRecordService.update(musicCompareRecord);
+			} else {
+				musicCompareRecord.setVideoFilePath(musicCompareRecord.getRecordFilePath());
+				sysMusicCompareRecordService.update(musicCompareRecord);
+			}
+			
+			break;
+
+		default:
+			// 非法请求
+			break;
+		}
+		return true;
+	}
+
+	@Override
+	public boolean handleBinaryMessage(String user, Channel channel, byte[] datas) {
+		
+		UserChannelContext channelContext = userChannelContextService.getChannelContext(channel);
+
+		if (channelContext == null) {
+			return false;
+		}
+
+		// 写录音文件
+		WaveformWriter waveFileProcessor = channelContext.getWaveFileProcessor();
+		if (waveFileProcessor == null) {
+			File file = new File(tmpFileDir + user + "_" + sdf.format(new Date()) + ".wav");
+			waveFileProcessor = new WaveformWriter(file.getAbsolutePath());
+			channelContext.setWaveFileProcessor(waveFileProcessor);
+		}
+		waveFileProcessor.process(datas);
+		
+		/*datas = channelContext.skipMetronome(datas);
+
+		if (datas.length == 0) {
+			return false;
+		}*/
+
+		channelContext.setChannelBufferBytes(ArrayUtil.mergeByte(channelContext.getChannelBufferBytes(), datas));
+
+		int totalLength = channelContext.getChannelBufferBytes().length;
+		
+		if (channelContext.getHandlerSwitch() == false) {
+			return false;
+		}
+		
+		if (channelContext.getOffsetMS() + channelContext.getBeatDuration() > 0) {
+			int beatByteLength = (int) (audioFormat.getSampleRate() * audioFormat.getSampleSizeInBits() / 8 * (channelContext.getOffsetMS() + channelContext.getBeatDuration()) / 1000);
+			
+			if(totalLength > beatByteLength){
+				if(beatByteLength % 2 != 0){
+					LOGGER.info("**************奇数*****************");
+					beatByteLength--;
+				}
+				channelContext.setChannelBufferBytes(ArrayUtil.extractByte(channelContext.getChannelBufferBytes(), beatByteLength, totalLength - 1));
+				
+				LOGGER.info("--------Length:{}  Times[{} + {}]:{}--------", waveFileProcessor.getFile().length() - channelContext.getChannelBufferBytes().length, channelContext.getOffsetMS() , channelContext.getBeatDuration(),(waveFileProcessor.getFile().length() - channelContext.getChannelBufferBytes().length) * 1000 /audioFormat.getSampleRate()/2);
+				
+				channelContext.setOffsetMS(0);
+				channelContext.setBeatDuration(0);
+			}else{
+				return false;
+			}
+		}
+		
+		totalLength = channelContext.getChannelBufferBytes().length;
+		
+
+		while (totalLength >= bufferSize) {
+			byte[] bufferData = ArrayUtil.extractByte(channelContext.getChannelBufferBytes(), 0, bufferSize - 1);
+
+			if (bufferSize != totalLength) {
+				channelContext.setChannelBufferBytes(ArrayUtil.extractByte(channelContext.getChannelBufferBytes(), bufferSize, totalLength - 1));
+			} else {
+				channelContext.setChannelBufferBytes(new byte[0]);
+			}
+
+			float[] sampleFloats = new float[bufferSize / 2];
+
+			converter.toFloatArray(bufferData, sampleFloats);
+
+			channelContext.handle(sampleFloats, audioFormat);
+
+			MusicXmlBasicInfo musicXmlBasicInfo = channelContext.getMusicXmlBasicInfo(null);
+			int sectionIndex = channelContext.getEvaluatingSectionIndex().get();
+
+			// 评分
+			int score = channelContext.evaluateForSection(sectionIndex, musicXmlBasicInfo.getSubjectId());
+			if (score >= 0) {
+
+				Map<String, Object> params = new HashMap<String, Object>();
+				params.put("score", score);
+				params.put("measureIndex", sectionIndex);
+
+				WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>("measureScore", params);
+
+				nettyChannelManager.sendTextMessage(user, resp);
+			}
+
+			totalLength = channelContext.getChannelBufferBytes().length;
+		}
+
+		return true;
+	}
+
+}

+ 90 - 0
audio-analysis/src/main/java/com/yonge/netty/server/service/PitchDetectionHandler.java

@@ -0,0 +1,90 @@
+package com.yonge.netty.server.service;
+
+import io.netty.channel.Channel;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import javax.sound.sampled.AudioFormat;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Component;
+
+import com.yonge.audio.analysis.AudioFloatConverter;
+import com.yonge.audio.analysis.detector.YINPitchDetector;
+import com.yonge.netty.dto.WebSocketResponse;
+import com.yonge.netty.server.handler.NettyChannelManager;
+import com.yonge.netty.server.handler.message.MessageHandler;
+
+@Component
+public class PitchDetectionHandler implements MessageHandler {
+	
+	private final static Logger LOGGER = LoggerFactory.getLogger(PitchDetectionHandler.class);
+
+	/**
+	 * @describe 采样率
+	 */
+	private float sampleRate = 44100;
+
+	/**
+	 * 每个采样大小(Bit)
+	 */
+	private int bitsPerSample = 16;
+
+	/**
+	 * 通道数
+	 */
+	private int channels = 1;
+	
+	private boolean signed = true;
+
+	private boolean bigEndian = false;
+
+	private AudioFormat audioFormat = new AudioFormat(sampleRate, bitsPerSample, channels, signed, bigEndian);
+
+	private AudioFloatConverter converter = AudioFloatConverter.getConverter(audioFormat);
+	
+	@Autowired
+	private NettyChannelManager nettyChannelManager;
+	
+	@Override
+	public String getAction() {
+		return "PITCH_DETECTION";
+	}
+
+	@Override
+	public boolean handleTextMessage(String user, Channel channel, String text) {
+
+		return true;
+	}
+
+	@Override
+	public boolean handleBinaryMessage(String userId, Channel channel, byte[] bytes) {
+
+		float[] samples = new float[bytes.length / 2];
+
+		if (samples.length == 0) {
+			return false;
+		}
+
+		converter.toFloatArray(bytes, samples);
+
+		YINPitchDetector frequencyDetector = new YINPitchDetector(samples.length, audioFormat.getSampleRate());
+
+		int playFrequency = (int) frequencyDetector.getFrequency(samples);
+		
+		LOGGER.info("校音频率:{}", playFrequency);
+
+		Map<String, Object> params = new HashMap<String, Object>();
+		params.put("frequency", playFrequency);
+
+		WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>("checking", params);
+
+		nettyChannelManager.sendTextMessage(userId, resp);
+
+		return true;
+	}
+	
+}

+ 32 - 0
audio-analysis/src/main/java/com/yonge/netty/server/service/UserChannelContextService.java

@@ -0,0 +1,32 @@
+package com.yonge.netty.server.service;
+
+import io.netty.channel.Channel;
+
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.springframework.stereotype.Component;
+
+import com.yonge.netty.dto.UserChannelContext;
+
+@Component
+public class UserChannelContextService {
+
+	private ConcurrentHashMap<Channel, UserChannelContext> channelContextMap = new ConcurrentHashMap<Channel, UserChannelContext>();
+	
+	public boolean register(Channel channel,UserChannelContext userChannelContext){
+		channelContextMap.put(channel, userChannelContext);
+		return true;
+	}
+	
+	public boolean remove(Channel channel){
+		if(channel != null){
+			channelContextMap.remove(channel);
+		}
+		return true;
+	}
+	
+	public UserChannelContext getChannelContext(Channel channel){
+		return channelContextMap.get(channel);
+	}
+	
+}

+ 128 - 0
audio-analysis/src/main/resources/application-template.yml

@@ -0,0 +1,128 @@
+server:
+  port: 9004
+  tomcat:
+    accesslog:
+      enabled: true
+      buffered: true
+      directory: /var/logs
+      file-date-format: -yyyy-MM-dd
+      pattern: common
+      prefix: tomcat-audio
+      rename-on-rotate: false
+      request-attributes-enabled: false
+      rotate: true
+      suffix: .log
+      
+netty:
+  server:
+    port: 8090
+
+eureka:
+  client:
+    serviceUrl:
+      defaultZone: http://admin:admin123@localhost:8761/eureka/eureka/
+    instance: 
+      lease-renewal-interval-in-seconds: 5
+
+spring:
+  application:
+    name: audio-analysis-server
+    
+  datasource:
+    name: test
+    url: jdbc:mysql://47.114.1.200:3306/mec_test?useUnicode=true&characterEncoding=UTF8&serverTimezone=Asia/Shanghai&allowMultiQueries=true
+    username: mec_dev
+    password: dayaDataOnline@2019
+    # 使用druid数据源
+    type: com.alibaba.druid.pool.DruidDataSource
+    driver-class-name: com.mysql.cj.jdbc.Driver
+    filters: stat
+    maxActive: 20
+    initialSize: 1
+    maxWait: 60000
+    minIdle: 1
+    timeBetweenEvictionRunsMillis: 60000
+    minEvictableIdleTimeMillis: 300000
+    validationQuery: select 'x'
+    testWhileIdle: true
+    testOnBorrow: false
+    testOnReturn: false
+    poolPreparedStatements: true
+    maxOpenPreparedStatements: 20
+  
+  redis:
+    host: 47.114.1.200
+    port: 6379
+    password: dyym
+    database: 1
+    #连接超时时间(毫秒)
+    timeout: 10000
+    jedis:
+      pool:
+        #连接池最大连接数(使用负值表示没有限制)
+        max-active: 20
+        #连接池最大阻塞等待时间(使用负值表示没有限制)
+        max-wait: 10000
+        #连接池中的最大空闲连接
+        max-idle: 10
+        #连接池中的最小空闲连接
+        min-idle: 5
+    
+
+mybatis:
+    mapperLocations: classpath:config/mybatis/*.xml
+    
+swagger:
+  base-package: com.yonge.audo.controller
+          
+##认证 
+security:
+  oauth2:
+    client:
+      client-id: app
+      client-secret: app
+    resource:
+      token-info-uri: http://localhost:8001/oauth/check_token
+  
+#spring boot admin 相关配置
+management:
+  endpoints:
+    web:
+      exposure:
+        include: "*"
+  endpoint:
+    health:
+      show-details: ALWAYS
+      
+
+ribbon:  
+    ReadTimeout: 60000  
+    ConnectTimeout: 60000
+
+message:
+  debugMode: true
+  
+##支付流水隐藏
+payment:
+  hiddenMode: false
+  #隐藏的支付方式
+  channel: YQPAY
+  
+eseal:
+  tsign:
+    projectid: 4438776254
+    projectSecret: a94cf63d6361084d232f345d71321691
+    apisUrl: http://smlitsm.tsign.cn:8080/tgmonitor/rest/app!getAPIInfo2
+
+push:
+  jiguang:
+    reqURL: https://api.jpush.cn/v3/push
+    appKey:
+      student: 0e7422e1d6e73637e678716a
+      teacher: 7e0282ca92c12c8c45a93bb3
+      system: 496fc1007dea59b1b4252d2b
+    masterSecret:
+      student: c2361016604eab56ab2db2ac
+      teacher: d47430e2f4755ef5dc050ac5
+      system: a5e51e9cdb25417463afbf7a
+    apns_production: false

+ 16 - 0
audio-analysis/src/main/resources/bootstrap-dev.properties

@@ -0,0 +1,16 @@
+#\u6307\u5b9a\u5f00\u53d1\u73af\u5883
+#spring.profiles.active=dev
+#\u670d\u52a1\u5668\u5730\u5740
+spring.cloud.nacos.config.server-addr=47.114.1.200:8848
+#\u9ed8\u8ba4\u4e3aPublic\u547d\u540d\u7a7a\u95f4,\u53ef\u4ee5\u7701\u7565\u4e0d\u5199
+spring.cloud.nacos.config.namespace=a5c10b43-0c4d-4e3b-a0ad-9af651cfe89c
+#\u6307\u5b9a\u914d\u7f6e\u7fa4\u7ec4 --\u5982\u679c\u662fPublic\u547d\u540d\u7a7a\u95f4 \u5219\u53ef\u4ee5\u7701\u7565\u7fa4\u7ec4\u914d\u7f6e
+spring.cloud.nacos.config.group=DEFAULT_GROUP
+#\u6587\u4ef6\u540d -- \u5982\u679c\u6ca1\u6709\u914d\u7f6e\u5219\u9ed8\u8ba4\u4e3a ${spring.appliction.name}
+spring.cloud.nacos.config.prefix=audio-analysis
+#\u6307\u5b9a\u6587\u4ef6\u540e\u7f00
+spring.cloud.nacos.config.file-extension=yaml
+#\u662f\u5426\u52a8\u6001\u5237\u65b0
+spring.cloud.nacos.config.refresh.enabled=true
+#\u662f\u5426\u542f\u7528nacos\u914d\u7f6e\u4e2d\u5fc3
+spring.cloud.nacos.config.enabled=true

+ 16 - 0
audio-analysis/src/main/resources/bootstrap-prod.properties

@@ -0,0 +1,16 @@
+#\u6307\u5b9a\u5f00\u53d1\u73af\u5883
+#spring.profiles.active=dev
+#\u670d\u52a1\u5668\u5730\u5740
+spring.cloud.nacos.config.server-addr=47.96.80.97:8848
+#\u9ed8\u8ba4\u4e3aPublic\u547d\u540d\u7a7a\u95f4,\u53ef\u4ee5\u7701\u7565\u4e0d\u5199
+spring.cloud.nacos.config.namespace=f40a7594-4bd0-4bc6-8397-9353c6d2e63a
+#\u6307\u5b9a\u914d\u7f6e\u7fa4\u7ec4 --\u5982\u679c\u662fPublic\u547d\u540d\u7a7a\u95f4 \u5219\u53ef\u4ee5\u7701\u7565\u7fa4\u7ec4\u914d\u7f6e
+spring.cloud.nacos.config.group=DEFAULT_GROUP
+#\u6587\u4ef6\u540d -- \u5982\u679c\u6ca1\u6709\u914d\u7f6e\u5219\u9ed8\u8ba4\u4e3a ${spring.appliction.name}
+spring.cloud.nacos.config.prefix=audio-analysis
+#\u6307\u5b9a\u6587\u4ef6\u540e\u7f00
+spring.cloud.nacos.config.file-extension=yaml
+#\u662f\u5426\u52a8\u6001\u5237\u65b0
+spring.cloud.nacos.config.refresh.enabled=true
+#\u662f\u5426\u542f\u7528nacos\u914d\u7f6e\u4e2d\u5fc3
+spring.cloud.nacos.config.enabled=true

+ 16 - 0
audio-analysis/src/main/resources/bootstrap-test.properties

@@ -0,0 +1,16 @@
+#\u6307\u5b9a\u5f00\u53d1\u73af\u5883
+#spring.profiles.active=dev
+#\u670d\u52a1\u5668\u5730\u5740
+spring.cloud.nacos.config.server-addr=47.114.176.40:8848
+#\u9ed8\u8ba4\u4e3aPublic\u547d\u540d\u7a7a\u95f4,\u53ef\u4ee5\u7701\u7565\u4e0d\u5199
+spring.cloud.nacos.config.namespace=46f06363-b9d6-46f0-9cd7-7b33dcf26bb0
+#\u6307\u5b9a\u914d\u7f6e\u7fa4\u7ec4 --\u5982\u679c\u662fPublic\u547d\u540d\u7a7a\u95f4 \u5219\u53ef\u4ee5\u7701\u7565\u7fa4\u7ec4\u914d\u7f6e
+spring.cloud.nacos.config.group=DEFAULT_GROUP
+#\u6587\u4ef6\u540d -- \u5982\u679c\u6ca1\u6709\u914d\u7f6e\u5219\u9ed8\u8ba4\u4e3a ${spring.appliction.name}
+spring.cloud.nacos.config.prefix=audio-analysis
+#\u6307\u5b9a\u6587\u4ef6\u540e\u7f00
+spring.cloud.nacos.config.file-extension=yaml
+#\u662f\u5426\u52a8\u6001\u5237\u65b0
+spring.cloud.nacos.config.refresh.enabled=true
+#\u662f\u5426\u542f\u7528nacos\u914d\u7f6e\u4e2d\u5fc3
+spring.cloud.nacos.config.enabled=true

+ 55 - 0
audio-analysis/src/main/resources/logback-spring.xml

@@ -0,0 +1,55 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<configuration scan="true" scanPeriod="10 seconds">
+
+	<property name="LOG_HOME" value="/mdata/logs/audio-analysis-%d{yyyy-MM-dd_HH}-%i.log" />
+	<property name="CONSOLE_LOG_PATTERN"
+		value="[%X{username} %X{ip} %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36}] : %msg%n" />
+
+	<appender name="stdout" class="ch.qos.logback.core.ConsoleAppender">
+		<encoder charset="UTF-8">
+			<pattern>${CONSOLE_LOG_PATTERN}</pattern>
+		</encoder>
+	</appender>
+
+	<appender name="file"
+		class="ch.qos.logback.core.rolling.RollingFileAppender">
+		<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+			<FileNamePattern>${LOG_HOME}</FileNamePattern>
+			<MaxHistory>90</MaxHistory>
+			<TimeBasedFileNamingAndTriggeringPolicy
+				class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
+				<MaxFileSize>20MB</MaxFileSize>
+			</TimeBasedFileNamingAndTriggeringPolicy>
+		</rollingPolicy>
+
+		<encoder>
+			<pattern>${CONSOLE_LOG_PATTERN}</pattern>
+		</encoder>
+	</appender>
+
+	<logger name="com.yonge.audio" level="INFO" />
+
+	<!--开发环境:打印控制台 -->
+	<springProfile name="dev">
+		<root level="INFO">
+			<appender-ref ref="stdout" />
+			<appender-ref ref="file" />
+		</root>
+	</springProfile>
+	
+	<springProfile name="test">
+		<root level="INFO">
+			<appender-ref ref="stdout" />
+			<appender-ref ref="file" />
+		</root>
+	</springProfile>
+
+	<!--生产环境:输出到文件 -->
+	<springProfile name="prod">
+		<root level="WARN">
+			<appender-ref ref="stdout" />
+			<appender-ref ref="file" />
+		</root>
+	</springProfile>
+
+</configuration>

+ 2 - 1
mec-biz/src/main/java/com/ym/mec/biz/dal/dao/SysMusicCompareWeekDataDao.java

@@ -14,6 +14,7 @@ public interface SysMusicCompareWeekDataDao extends BaseDAO<Integer, SysMusicCom
 
     List<MusicCompareRankingDto> getUserTrainStat(@Param("monday") String monday,
                                                   @Param("orderType") Integer orderType,
-                                                  @Param("heardLevel") String heardLevel,@Param("tenantId") Integer tenantId);
+                                                  @Param("heardLevel") String heardLevel,
+                                                  @Param("organId") Integer organId,@Param("tenantId") Integer tenantId);
 	
 }

+ 54 - 4
mec-biz/src/main/java/com/ym/mec/biz/dal/entity/SysMusicScore.java

@@ -2,12 +2,36 @@ package com.ym.mec.biz.dal.entity;
 
 import com.ym.mec.biz.dal.enums.ClientTypeEnum;
 import com.ym.mec.biz.dal.enums.ExamSongTypeEnum;
+import com.ym.mec.common.enums.AccessSource;
+import com.ym.mec.common.enums.BaseEnum;
+
 import org.apache.commons.lang3.builder.ToStringBuilder;
 
 /**
  * 对应数据库表(sys_music_score):
  */
 public class SysMusicScore {
+	
+	public enum PlayMode  implements BaseEnum<String, PlayMode> {
+		
+		MP3("MP3播放"),XML("XML播放");
+		
+		private String desc;
+		
+		private PlayMode(String desc) {
+			this.desc = desc;
+		}
+
+		@Override
+		public String getCode() {
+			return this.name();
+		}
+
+		public String getDesc() {
+			return desc;
+		}
+		
+	}
 
 	/**  */
 	private Integer id;
@@ -81,6 +105,12 @@ public class SysMusicScore {
 	private Integer showFlag = 0;
 
 	private Boolean isOpenMetronome;
+	
+	private String museScoreUrl;
+	
+	private String museScoreMemo;
+	
+	private PlayMode playMode;
 
 	private String organName;
 
@@ -116,6 +146,14 @@ public class SysMusicScore {
 		this.isOpenMetronome = isOpenMetronome;
 	}
 
+	public PlayMode getPlayMode() {
+		return playMode;
+	}
+
+	public void setPlayMode(PlayMode playMode) {
+		this.playMode = playMode;
+	}
+
 	public String getAccompanimentUrl() {
 		return accompanimentUrl;
 	}
@@ -300,15 +338,27 @@ public class SysMusicScore {
 		return this.createTime;
 	}
 
-	public String getMidiUrl() {
+	public String getMuseScoreUrl() {
+		return museScoreUrl;
+	}
+
+	public void setMuseScoreUrl(String museScoreUrl) {
+		this.museScoreUrl = museScoreUrl;
+	}
+
+	public String getMuseScoreMemo() {
+		return museScoreMemo;
+	}
+
+	public void setMuseScoreMemo(String museScoreMemo) {
+		this.museScoreMemo = museScoreMemo;
+	}	public String getMidiUrl() {
 		return midiUrl;
 	}
 
 	public void setMidiUrl(String midiUrl) {
 		this.midiUrl = midiUrl;
-	}
-
-	@Override
+	}	@Override
 	public String toString() {
 		return ToStringBuilder.reflectionToString(this);
 	}

+ 31 - 0
mec-biz/src/main/java/com/ym/mec/biz/dal/entity/SysMusicScoreAccompaniment.java

@@ -1,5 +1,6 @@
 package com.ym.mec.biz.dal.entity;
 
+import com.ym.mec.biz.dal.entity.SysMusicScore.PlayMode;
 import com.ym.mec.biz.dal.enums.ClientTypeEnum;
 
 import org.apache.commons.lang3.builder.ToStringBuilder;
@@ -76,6 +77,12 @@ public class SysMusicScoreAccompaniment {
 	private String renderFrom;
 	
 	private boolean enableEvaluation;
+	
+	private String museScoreUrl;
+	
+	private String museScoreMemo;
+	
+	private PlayMode playMode;
 
 	public String getMetronomeUrl() {
 		return metronomeUrl;
@@ -277,6 +284,30 @@ public class SysMusicScoreAccompaniment {
 		this.metronomeMp3Url = metronomeMp3Url;
 	}
 
+	public String getMuseScoreUrl() {
+		return museScoreUrl;
+	}
+
+	public void setMuseScoreUrl(String museScoreUrl) {
+		this.museScoreUrl = museScoreUrl;
+	}
+
+	public String getMuseScoreMemo() {
+		return museScoreMemo;
+	}
+
+	public void setMuseScoreMemo(String museScoreMemo) {
+		this.museScoreMemo = museScoreMemo;
+	}
+
+	public PlayMode getPlayMode() {
+		return playMode;
+	}
+
+	public void setPlayMode(PlayMode playMode) {
+		this.playMode = playMode;
+	}
+
 	@Override
 	public String toString() {
 		return ToStringBuilder.reflectionToString(this);

+ 10 - 0
mec-biz/src/main/java/com/ym/mec/biz/dal/page/SysMusicCompareRecordQueryInfo.java

@@ -33,6 +33,16 @@ public class SysMusicCompareRecordQueryInfo extends QueryInfo {
 
     private Boolean visitFlag;
 
+    private Integer organId;
+
+    public Integer getOrganId() {
+        return organId;
+    }
+
+    public void setOrganId(Integer organId) {
+        this.organId = organId;
+    }
+
     public Boolean getVisitFlag() {
         return visitFlag;
     }

+ 7 - 0
mec-biz/src/main/java/com/ym/mec/biz/service/SysMusicCompareRecordService.java

@@ -24,6 +24,13 @@ public interface SysMusicCompareRecordService extends BaseService<Long, SysMusic
     void saveMusicCompareData(String phone, SoundCompareHelper soundCompareInfo);
 
     /**
+     * @describe 保存用户评测记录
+     * @param sysMusicCompareRecord
+     * @return void
+     */
+    void saveMusicCompareData(SysMusicCompareRecord sysMusicCompareRecord);
+
+    /**
      * @describe 用户最后一次评测数据
      * @author Joburgess
      * @date 2021/8/23 0023

+ 45 - 16
mec-biz/src/main/java/com/ym/mec/biz/service/impl/DegreeRegistrationServiceImpl.java

@@ -1,20 +1,12 @@
 package com.ym.mec.biz.service.impl;
 
 import java.math.BigDecimal;
-import java.util.*;
-import java.util.stream.Collectors;
-
-import com.alibaba.fastjson.JSON;
-import com.ym.mec.biz.dal.dao.*;
-import com.ym.mec.biz.dal.dto.DegreeRegistrationActivityDto;
-import com.ym.mec.biz.dal.entity.*;
-import com.ym.mec.biz.dal.enums.*;
-import com.ym.mec.biz.service.*;
-import com.ym.mec.common.constant.CommonConstants;
-import com.ym.mec.common.controller.BaseController;
-import com.ym.mec.common.entity.HttpResponseResult;
-import com.ym.mec.common.tenant.TenantContextHolder;
-import com.ym.mec.util.date.DateUtil;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
 
 import org.apache.commons.lang3.StringUtils;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -22,16 +14,47 @@ import org.springframework.http.HttpStatus;
 import org.springframework.stereotype.Service;
 import org.springframework.transaction.annotation.Isolation;
 import org.springframework.transaction.annotation.Transactional;
+import org.springframework.util.CollectionUtils;
 
+import com.alibaba.fastjson.JSON;
+import com.ym.mec.biz.dal.dao.DegreeRegistrationDao;
+import com.ym.mec.biz.dal.dao.PracticeGroupSellPriceDao;
+import com.ym.mec.biz.dal.dao.SporadicChargeInfoDao;
+import com.ym.mec.biz.dal.dao.StudentDao;
+import com.ym.mec.biz.dal.dao.StudentPaymentOrderDetailDao;
+import com.ym.mec.biz.dal.dao.SysConfigDao;
+import com.ym.mec.biz.dal.dto.DegreeRegistrationActivityDto;
 import com.ym.mec.biz.dal.dto.PageInfoDegree;
+import com.ym.mec.biz.dal.entity.CourseSchedule;
+import com.ym.mec.biz.dal.entity.DegreeRegistration;
+import com.ym.mec.biz.dal.entity.PracticeGroupSellPrice;
+import com.ym.mec.biz.dal.entity.SporadicChargeInfo;
+import com.ym.mec.biz.dal.entity.Student;
+import com.ym.mec.biz.dal.entity.StudentPaymentOrder;
+import com.ym.mec.biz.dal.entity.StudentPaymentOrderDetail;
+import com.ym.mec.biz.dal.entity.SysUserCashAccount;
+import com.ym.mec.biz.dal.enums.DealStatusEnum;
+import com.ym.mec.biz.dal.enums.GroupType;
+import com.ym.mec.biz.dal.enums.OrderDetailTypeEnum;
+import com.ym.mec.biz.dal.enums.OrderTypeEnum;
+import com.ym.mec.biz.dal.enums.PlatformCashAccountDetailTypeEnum;
 import com.ym.mec.biz.dal.page.DegreeQueryInfo;
+import com.ym.mec.biz.service.DegreeRegistrationService;
+import com.ym.mec.biz.service.PayService;
+import com.ym.mec.biz.service.StudentPaymentOrderService;
+import com.ym.mec.biz.service.SysConfigService;
+import com.ym.mec.biz.service.SysTenantConfigService;
+import com.ym.mec.biz.service.SysUserCashAccountService;
+import com.ym.mec.common.constant.CommonConstants;
+import com.ym.mec.common.controller.BaseController;
 import com.ym.mec.common.dal.BaseDAO;
+import com.ym.mec.common.entity.HttpResponseResult;
 import com.ym.mec.common.exception.BizException;
 import com.ym.mec.common.service.IdGeneratorService;
 import com.ym.mec.common.service.impl.BaseServiceImpl;
+import com.ym.mec.common.tenant.TenantContextHolder;
 import com.ym.mec.util.collection.MapUtil;
-
-import org.springframework.util.CollectionUtils;
+import com.ym.mec.util.date.DateUtil;
 
 @Service
 public class DegreeRegistrationServiceImpl extends BaseServiceImpl<Integer, DegreeRegistration> implements DegreeRegistrationService {
@@ -586,6 +609,12 @@ public class DegreeRegistrationServiceImpl extends BaseServiceImpl<Integer, Degr
             }
         }
 
+        if (studentPaymentOrder.getStatus() == DealStatusEnum.CLOSE || studentPaymentOrder.getStatus() == DealStatusEnum.FAILED) {
+            if (studentPaymentOrder.getBalancePaymentAmount() != null && studentPaymentOrder.getBalancePaymentAmount().compareTo(BigDecimal.ZERO) > 0) {
+                sysUserCashAccountService.updateBalance(studentPaymentOrder.getUserId(), studentPaymentOrder.getBalancePaymentAmount(), PlatformCashAccountDetailTypeEnum.REFUNDS, "支付失败-退回");
+            }
+        }
+
         if(Objects.isNull(studentPaymentOrder.getClassGroupId())){
             return true;
         }

+ 3 - 3
mec-biz/src/main/java/com/ym/mec/biz/service/impl/PayServiceImpl.java

@@ -49,7 +49,7 @@ public class PayServiceImpl implements PayService {
 
     @Override
     public Map<String, Object> getPayMap(BigDecimal amount, BigDecimal balanceAmount, String orderNo, String notifyUrl, String returnUrl, String orderSubject, String orderBody, Integer organId, String receiver) throws Exception {
-        String usePaymentConfig = sysConfigDao.findConfigValue("use_payment_config");
+        String usePaymentConfig = sysConfigDao.findConfigValue("use_payment_config");// 是否用收费配置(1:使用 0:不使用)
         List<RouteScaleDto> routeScaleDtos = null;
         //使用配置开关
         if (usePaymentConfig.equals("0")) {
@@ -172,7 +172,7 @@ public class PayServiceImpl implements PayService {
      * @param amount 金额
      */
     private List<RouteScaleDto> getAmountChannel(Integer organId, BigDecimal amount, String receiver) {
-        String amountChannel = sysConfigDao.findConfigValue("amount_channel");
+        String amountChannel = sysConfigDao.findConfigValue("amount_channel"); //按金额分润规则
         if (StringUtils.isBlank(amountChannel)) {
             return null;
         }
@@ -233,7 +233,7 @@ public class PayServiceImpl implements PayService {
         if (receiver == null || !receiver.equals("sporadic")) {
             return null;
         }
-        String SporadicChannel = sysConfigDao.findConfigValue("sporadic_channel");
+        String SporadicChannel = sysConfigDao.findConfigValue("sporadic_channel"); // 零星支付收款规则
         if (StringUtils.isBlank(SporadicChannel)) {
             return null;
         }

+ 2 - 2
mec-biz/src/main/java/com/ym/mec/biz/service/impl/StudentServeServiceImpl.java

@@ -969,9 +969,9 @@ public class StudentServeServiceImpl implements StudentServeService {
         //课后作业
         Set<Integer> hss = studentCourseHomeworkDao.checkStudentHaveHomeworkInDateRange(monDayDate.toString(), sunDayDate.toString(), studentIds);
         //课外训练
-        Set<Integer> ess = extracurricularExercisesReplyDao.checkStudentHaveExercisesInDateRange(monDayDate.toString(), sunDayDate.toString(), studentIds);
+        //Set<Integer> ess = extracurricularExercisesReplyDao.checkStudentHaveExercisesInDateRange(monDayDate.toString(), sunDayDate.toString(), studentIds);
         for (Integer studentId : studentIds) {
-            if(!hss.contains(studentId)&&!ess.contains(studentId)){
+            if(!hss.contains(studentId)){
                 result.put("isAssignHomework", 0);
                 return result;
             }

+ 14 - 1
mec-biz/src/main/java/com/ym/mec/biz/service/impl/SysMusicCompareRecordServiceImpl.java

@@ -27,6 +27,7 @@ import com.ym.mec.util.date.DateUtil;
 import org.apache.commons.lang3.StringUtils;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Service;
+import org.springframework.transaction.annotation.Transactional;
 import org.springframework.util.CollectionUtils;
 
 import java.math.BigDecimal;
@@ -131,6 +132,18 @@ public class SysMusicCompareRecordServiceImpl extends BaseServiceImpl<Long, SysM
 	}
 
 	@Override
+	@Transactional
+	public void saveMusicCompareData(SysMusicCompareRecord sysMusicCompareRecord) {
+		
+		Integer userId = sysMusicCompareRecord.getUserId();
+		
+		sysMusicCompareRecordDao.update(sysMusicCompareRecord);
+		studentDao.addStudentCloudStudySequenceDays(userId);
+		sysMusicCompareWeekDataService
+				.updateUserWeekTrainData(userId, LocalDate.now().with(DateUtil.weekFields.dayOfWeek(), DayOfWeek.MONDAY.getValue()));
+	}
+
+	@Override
 	public Object getLastEvaluationMusicalNotesPlayStats(Integer userId, Long recordId) {
 		SysMusicCompareRecord userLastEvaluationData;
 		if(Objects.nonNull(recordId)){
@@ -166,7 +179,7 @@ public class SysMusicCompareRecordServiceImpl extends BaseServiceImpl<Long, SysM
 		StatDto result = new StatDto();
 		MusicCompareRankingDto head = new MusicCompareRankingDto();
 		head.setUserId(queryInfo.getUserId());
-		List<MusicCompareRankingDto> userTrainStat = sysMusicCompareWeekDataDao.getUserTrainStat(queryInfo.getStartTime(), queryInfo.getOrderType(), Objects.isNull(queryInfo.getHeardLevel())?null:queryInfo.getHeardLevel().getCode(),queryInfo.getTeacherId());
+		List<MusicCompareRankingDto> userTrainStat = sysMusicCompareWeekDataDao.getUserTrainStat(queryInfo.getStartTime(), queryInfo.getOrderType(), Objects.isNull(queryInfo.getHeardLevel())?null:queryInfo.getHeardLevel().getCode(),queryInfo.getTeacherId(), queryInfo.getTenantId());
 		List<MusicCompareRankingDto> detail = new ArrayList<>();
 
 		List<Integer> studentIds = userTrainStat.stream().limit(10).map(MusicCompareRankingDto::getUserId).collect(Collectors.toList());

+ 152 - 141
mec-biz/src/main/java/com/ym/mec/biz/service/impl/VipGroupServiceImpl.java

@@ -2778,157 +2778,168 @@ public class VipGroupServiceImpl extends BaseServiceImpl<Long, VipGroup> impleme
 		ClassGroup classGroup = classGroupDao.get(order.getClassGroupId());
 		if(studentPaymentOrder.getStatus() == DealStatusEnum.SUCCESS){
 			VipGroup vipGroup = vipGroupDao.get(Long.parseLong(classGroup.getMusicGroupId()));
-			//活动赠送
-			Integer activityUserMapperId = studentPaymentOrderService.activityGive(vipGroup.getVipGroupActivityId(),studentPaymentOrder,
-					vipGroup.getId(),null,vipGroup.getUserId());
-
-			//生成班级学员关联
-			ClassGroupStudentMapper classGroupStudentMapper = new ClassGroupStudentMapper();
-			classGroupStudentMapper.setMusicGroupId(classGroup.getMusicGroupId());
-			classGroupStudentMapper.setClassGroupId(classGroup.getId());
-			classGroupStudentMapper.setUserId(userId);
-			classGroupStudentMapper.setCreateTime(date);
-			classGroupStudentMapper.setStatus(ClassGroupStudentStatusEnum.NORMAL);
-			classGroupStudentMapper.setGroupType(GroupType.VIP);
-			classGroupStudentMapperDao.insert(classGroupStudentMapper);
+			if(vipGroup.getStatus() == VipGroupStatusEnum.DELETE ||
+					vipGroup.getStatus() == VipGroupStatusEnum.CANCEL ||
+					vipGroup.getStatus() == VipGroupStatusEnum.PAUSE){
+				//增加用户余额
+				sysUserCashAccountService.updateBalance(order.getUserId(), order.getActualAmount().add(order.getBalancePaymentAmount()),
+						PlatformCashAccountDetailTypeEnum.REFUNDS, "课程组关闭退还课程余额,订单号:" + order.getOrderNo());
+
+				//退还优惠券
+				sysCouponCodeService.quit(order.getCouponCodeId());
+			}else {
+				//活动赠送
+				Integer activityUserMapperId = studentPaymentOrderService.activityGive(vipGroup.getVipGroupActivityId(),studentPaymentOrder,
+						vipGroup.getId(),null,vipGroup.getUserId());
 
-			//获取班级实际人数
-			Integer studentNum = classGroupStudentMapperDao.countGroupNormalStudentNum(VIP, classGroup.getMusicGroupId());
-			//课程组人数已满,变更状态
-			if(studentNum.equals(classGroup.getExpectStudentNum())){
-				vipGroup.setStatus(VipGroupStatusEnum.PROGRESS);
-				classGroup.setDelFlag(0);
-				VipGroupDefaultClassesUnitPrice vipGroupDefaultClassesUnitPrice = vipGroupDefaultClassesUnitPriceDao.getByVipGroupCategory(vipGroup.getVipGroupCategoryId(), vipGroup.getOrganId());
+				//生成班级学员关联
+				ClassGroupStudentMapper classGroupStudentMapper = new ClassGroupStudentMapper();
+				classGroupStudentMapper.setMusicGroupId(classGroup.getMusicGroupId());
+				classGroupStudentMapper.setClassGroupId(classGroup.getId());
+				classGroupStudentMapper.setUserId(userId);
+				classGroupStudentMapper.setCreateTime(date);
+				classGroupStudentMapper.setStatus(ClassGroupStudentStatusEnum.NORMAL);
+				classGroupStudentMapper.setGroupType(GroupType.VIP);
+				classGroupStudentMapperDao.insert(classGroupStudentMapper);
+
+				//获取班级实际人数
+				Integer studentNum = classGroupStudentMapperDao.countGroupNormalStudentNum(VIP, classGroup.getMusicGroupId());
+				//课程组人数已满,变更状态
+				if(studentNum.equals(classGroup.getExpectStudentNum())){
+					vipGroup.setStatus(VipGroupStatusEnum.PROGRESS);
+					classGroup.setDelFlag(0);
+					VipGroupDefaultClassesUnitPrice vipGroupDefaultClassesUnitPrice = vipGroupDefaultClassesUnitPriceDao.getByVipGroupCategory(vipGroup.getVipGroupCategoryId(), vipGroup.getOrganId());
+
+					//生成课表
+					List<CourseSchedule> courseSchedules = JSON.parseArray(vipGroup.getCourseSchedulesJson(),CourseSchedule.class);
+					classGroup.setTotalClassTimes(courseSchedules.size());
+
+					courseScheduleService.batchAddCourseSchedule(courseSchedules);
+
+					//考勤信息
+					List<TeacherAttendance> teacherAttendances = new ArrayList<>();
+					//创建课酬信息
+					List<CourseScheduleTeacherSalary> courseScheduleTeacherSalaries = new ArrayList<>();
+					Map<String, BigDecimal> stringBigDecimalMap = countVipGroupPredictFee1(vipGroup, vipGroup.getUserId(), null);
+					for (CourseSchedule courseSchedule : courseSchedules) {
+						//创建教师课程薪水记录
+						CourseScheduleTeacherSalary courseScheduleTeacherSalary = new CourseScheduleTeacherSalary();
+						courseScheduleTeacherSalary.setCourseScheduleId(courseSchedule.getId());
+						courseScheduleTeacherSalary.setGroupType(courseSchedule.getGroupType());
+						courseScheduleTeacherSalary.setMusicGroupId(courseSchedule.getMusicGroupId());
+						courseScheduleTeacherSalary.setTeacherRole(TeachTypeEnum.BISHOP);
+						courseScheduleTeacherSalary.setUserId(courseSchedule.getActualTeacherId().intValue());
+						courseScheduleTeacherSalary.setClassGroupId(courseSchedule.getClassGroupId());
+						courseScheduleTeacherSalary.setExpectSalary(stringBigDecimalMap.get("offlineTeacherSalary"));
+						courseScheduleTeacherSalary.setActualSalary(null);
+						courseScheduleTeacherSalaries.add(courseScheduleTeacherSalary);
+
+						TeacherAttendance teacherAttendance = new TeacherAttendance();
+						teacherAttendance.setGroupType(courseSchedule.getGroupType());
+						teacherAttendance.setClassGroupId(courseSchedule.getClassGroupId());
+						teacherAttendance.setMusicGroupId(courseSchedule.getMusicGroupId());
+						teacherAttendance.setTeacherId(courseSchedule.getActualTeacherId());
+						teacherAttendance.setCourseScheduleId(courseSchedule.getId());
+						teacherAttendances.add(teacherAttendance);
+					}
+					courseScheduleTeacherSalaryDao.batchInsert(courseScheduleTeacherSalaries);
+					teacherAttendanceDao.batchInsert(teacherAttendances);
 
-				//生成课表
-				List<CourseSchedule> courseSchedules = JSON.parseArray(vipGroup.getCourseSchedulesJson(),CourseSchedule.class);
-				classGroup.setTotalClassTimes(courseSchedules.size());
+					//群聊数据
+					Map<Integer,String> userRoleMap = new HashMap<Integer, String>(5);
+					if(Objects.nonNull(vipGroup.getEducationalTeacherId())){
+						userRoleMap.put(vipGroup.getEducationalTeacherId(),"乐团主管");
+					}
+					userRoleMap.put(vipGroup.getUserId(),"指导老师");
+					//生成课程学员关联
+					List<CourseScheduleStudentPayment> courseScheduleStudentPayments = new ArrayList<>();
+					List<ClassGroupStudentMapper> classGroupStudents = classGroupStudentMapperDao.findByClassGroup(classGroup.getId());
+
+					List<Integer> studentIdList = classGroupStudents.stream().map(e -> e.getUserId()).collect(Collectors.toList());
+					for (ClassGroupStudentMapper classGroupStudent : classGroupStudents) {
+						StudentPaymentOrder successOrder = studentPaymentOrderDao.findByStudentVipGroup(vipGroup.getId(), classGroupStudent.getUserId(), "SUCCESS");
+						//实际支付金额,去除优惠券
+						BigDecimal actualPrice = successOrder.getExpectAmount();
+						BigDecimal divide = actualPrice.divide(new BigDecimal(courseSchedules.size()), ROUND_DOWN);
+						BigDecimal firstAmount = actualPrice.subtract(divide.multiply(new BigDecimal(courseSchedules.size()))).add(divide);
+						for (int i = 0; i < courseSchedules.size(); i++) {
+							CourseSchedule courseSchedule = courseSchedules.get(i);
+							CourseScheduleStudentPayment courseScheduleStudentPayment = new CourseScheduleStudentPayment();
+							courseScheduleStudentPayment.setUserId(classGroupStudent.getUserId());
+							courseScheduleStudentPayment.setGroupType(courseSchedule.getGroupType());
+							courseScheduleStudentPayment.setMusicGroupId(courseSchedule.getMusicGroupId());
+							courseScheduleStudentPayment.setCourseScheduleId(courseSchedule.getId());
+							courseScheduleStudentPayment.setClassGroupId(courseSchedule.getClassGroupId());
+							if (i == 0) {
+								courseScheduleStudentPayment.setExpectPrice(firstAmount);
+							}else{
+								courseScheduleStudentPayment.setExpectPrice(divide);
+							}
+							courseScheduleStudentPayment.setOriginalPrice(TeachModeEnum.ONLINE.equals(courseSchedule.getTeachMode())?vipGroupDefaultClassesUnitPrice.getOnlineClassesUnitPrice():vipGroupDefaultClassesUnitPrice.getOfflineClassesUnitPrice());
+							courseScheduleStudentPayment.setActualPrice(courseScheduleStudentPayment.getExpectPrice());
+							courseScheduleStudentPayments.add(courseScheduleStudentPayment);
+						}
+						userRoleMap.put(classGroupStudent.getUserId(),null);
+					}
+					studentDao.updateStudentServiceTag(null, studentIdList, YesOrNoEnum.YES.getCode());
 
-				courseScheduleService.batchAddCourseSchedule(courseSchedules);
+					courseScheduleStudentPaymentDao.batchInsert(courseScheduleStudentPayments);
+//				courseScheduleService.checkNewCourseSchedules(courseSchedules, false,false);
 
-				//考勤信息
-				List<TeacherAttendance> teacherAttendances = new ArrayList<>();
-				//创建课酬信息
-				List<CourseScheduleTeacherSalary> courseScheduleTeacherSalaries = new ArrayList<>();
-				Map<String, BigDecimal> stringBigDecimalMap = countVipGroupPredictFee1(vipGroup, vipGroup.getUserId(), null);
-				for (CourseSchedule courseSchedule : courseSchedules) {
-					//创建教师课程薪水记录
-					CourseScheduleTeacherSalary courseScheduleTeacherSalary = new CourseScheduleTeacherSalary();
-					courseScheduleTeacherSalary.setCourseScheduleId(courseSchedule.getId());
-					courseScheduleTeacherSalary.setGroupType(courseSchedule.getGroupType());
-					courseScheduleTeacherSalary.setMusicGroupId(courseSchedule.getMusicGroupId());
-					courseScheduleTeacherSalary.setTeacherRole(TeachTypeEnum.BISHOP);
-					courseScheduleTeacherSalary.setUserId(courseSchedule.getActualTeacherId().intValue());
-					courseScheduleTeacherSalary.setClassGroupId(courseSchedule.getClassGroupId());
-					courseScheduleTeacherSalary.setExpectSalary(stringBigDecimalMap.get("offlineTeacherSalary"));
-					courseScheduleTeacherSalary.setActualSalary(null);
-					courseScheduleTeacherSalaries.add(courseScheduleTeacherSalary);
-
-					TeacherAttendance teacherAttendance = new TeacherAttendance();
-					teacherAttendance.setGroupType(courseSchedule.getGroupType());
-					teacherAttendance.setClassGroupId(courseSchedule.getClassGroupId());
-					teacherAttendance.setMusicGroupId(courseSchedule.getMusicGroupId());
-					teacherAttendance.setTeacherId(courseSchedule.getActualTeacherId());
-					teacherAttendance.setCourseScheduleId(courseSchedule.getId());
-					teacherAttendances.add(teacherAttendance);
+					imGroupService.create(classGroup.getId().longValue(), null, classGroup.getName(), classGroup.getName(), vipGroup.getName(), null, null, GroupType.VIP.getCode());
+					imGroupMemberService.join(classGroup.getId().longValue(), userRoleMap);
+					imUserFriendService.refreshGroupImUserFriend(classGroup.getMusicGroupId(),classGroup.getGroupType());
 				}
-				courseScheduleTeacherSalaryDao.batchInsert(courseScheduleTeacherSalaries);
-				teacherAttendanceDao.batchInsert(teacherAttendances);
-
-				//群聊数据
-				Map<Integer,String> userRoleMap = new HashMap<Integer, String>(5);
-				if(Objects.nonNull(vipGroup.getEducationalTeacherId())){
-					userRoleMap.put(vipGroup.getEducationalTeacherId(),"乐团主管");
+				SysUserCashAccount sysUserCashAccount = sysUserCashAccountService.get(userId);
+				//插入缴费明细
+				//收入
+				SysUserCashAccountDetail sysUserIncomeCashAccountDetail = new SysUserCashAccountDetail();
+				sysUserIncomeCashAccountDetail.setUserId(userId);
+				sysUserIncomeCashAccountDetail.setType(PlatformCashAccountDetailTypeEnum.RECHARGE);
+				sysUserIncomeCashAccountDetail.setStatus(DealStatusEnum.SUCCESS);
+				sysUserIncomeCashAccountDetail.setAmount(order.getActualAmount());
+				sysUserIncomeCashAccountDetail.setBalance(sysUserCashAccount.getBalance().add(order.getActualAmount()));
+				sysUserIncomeCashAccountDetail.setAttribute(order.getTransNo());
+				sysUserIncomeCashAccountDetail.setChannel(studentPaymentOrder.getPaymentChannel());
+				sysUserIncomeCashAccountDetail.setComAmount(studentPaymentOrder.getComAmount());
+				sysUserIncomeCashAccountDetail.setPerAmount(studentPaymentOrder.getPerAmount());
+
+				//支出
+				SysUserCashAccountDetail sysUserExpendCashAccountDetail = new SysUserCashAccountDetail();
+				sysUserExpendCashAccountDetail.setUserId(userId);
+				sysUserExpendCashAccountDetail.setType(PlatformCashAccountDetailTypeEnum.PAY_FEE);
+				sysUserExpendCashAccountDetail.setStatus(DealStatusEnum.SUCCESS);
+				sysUserExpendCashAccountDetail.setAmount(order.getActualAmount().negate());
+				sysUserExpendCashAccountDetail.setBalance(sysUserCashAccount.getBalance());
+				sysUserExpendCashAccountDetail.setAttribute(order.getTransNo());
+				sysUserExpendCashAccountDetail.setChannel(studentPaymentOrder.getPaymentChannel());
+				if(studentPaymentOrder.getComAmount() != null){
+					sysUserExpendCashAccountDetail.setComAmount(studentPaymentOrder.getComAmount().negate());
 				}
-				userRoleMap.put(vipGroup.getUserId(),"指导老师");
-				//生成课程学员关联
-				List<CourseScheduleStudentPayment> courseScheduleStudentPayments = new ArrayList<>();
-				List<ClassGroupStudentMapper> classGroupStudents = classGroupStudentMapperDao.findByClassGroup(classGroup.getId());
-
-				List<Integer> studentIdList = classGroupStudents.stream().map(e -> e.getUserId()).collect(Collectors.toList());
-				for (ClassGroupStudentMapper classGroupStudent : classGroupStudents) {
-					StudentPaymentOrder successOrder = studentPaymentOrderDao.findByStudentVipGroup(vipGroup.getId(), classGroupStudent.getUserId(), "SUCCESS");
-					//实际支付金额,去除优惠券
-					BigDecimal actualPrice = successOrder.getExpectAmount();
-					BigDecimal divide = actualPrice.divide(new BigDecimal(courseSchedules.size()), ROUND_DOWN);
-					BigDecimal firstAmount = actualPrice.subtract(divide.multiply(new BigDecimal(courseSchedules.size()))).add(divide);
-					for (int i = 0; i < courseSchedules.size(); i++) {
-						CourseSchedule courseSchedule = courseSchedules.get(i);
-						CourseScheduleStudentPayment courseScheduleStudentPayment = new CourseScheduleStudentPayment();
-						courseScheduleStudentPayment.setUserId(classGroupStudent.getUserId());
-						courseScheduleStudentPayment.setGroupType(courseSchedule.getGroupType());
-						courseScheduleStudentPayment.setMusicGroupId(courseSchedule.getMusicGroupId());
-						courseScheduleStudentPayment.setCourseScheduleId(courseSchedule.getId());
-						courseScheduleStudentPayment.setClassGroupId(courseSchedule.getClassGroupId());
-						if (i == 0) {
-							courseScheduleStudentPayment.setExpectPrice(firstAmount);
-						}else{
-							courseScheduleStudentPayment.setExpectPrice(divide);
-						}
-						courseScheduleStudentPayment.setOriginalPrice(TeachModeEnum.ONLINE.equals(courseSchedule.getTeachMode())?vipGroupDefaultClassesUnitPrice.getOnlineClassesUnitPrice():vipGroupDefaultClassesUnitPrice.getOfflineClassesUnitPrice());
-						courseScheduleStudentPayment.setActualPrice(courseScheduleStudentPayment.getExpectPrice());
-						courseScheduleStudentPayments.add(courseScheduleStudentPayment);
-					}
-					userRoleMap.put(classGroupStudent.getUserId(),null);
+				if(studentPaymentOrder.getPerAmount() != null){
+					sysUserExpendCashAccountDetail.setPerAmount(studentPaymentOrder.getPerAmount().negate());
 				}
-				studentDao.updateStudentServiceTag(null, studentIdList, YesOrNoEnum.YES.getCode());
 
-				courseScheduleStudentPaymentDao.batchInsert(courseScheduleStudentPayments);
-//				courseScheduleService.checkNewCourseSchedules(courseSchedules, false,false);
+				sysUserCashAccountDetailService.insert(sysUserIncomeCashAccountDetail);
+				sysUserCashAccountDetailService.insert(sysUserExpendCashAccountDetail);
 
-				imGroupService.create(classGroup.getId().longValue(), null, classGroup.getName(), classGroup.getName(), vipGroup.getName(), null, null, GroupType.VIP.getCode());
-				imGroupMemberService.join(classGroup.getId().longValue(), userRoleMap);
-				imUserFriendService.refreshGroupImUserFriend(classGroup.getMusicGroupId(),classGroup.getGroupType());
-			}
-			SysUserCashAccount sysUserCashAccount = sysUserCashAccountService.get(userId);
-			//插入缴费明细
-			//收入
-			SysUserCashAccountDetail sysUserIncomeCashAccountDetail = new SysUserCashAccountDetail();
-			sysUserIncomeCashAccountDetail.setUserId(userId);
-			sysUserIncomeCashAccountDetail.setType(PlatformCashAccountDetailTypeEnum.RECHARGE);
-			sysUserIncomeCashAccountDetail.setStatus(DealStatusEnum.SUCCESS);
-			sysUserIncomeCashAccountDetail.setAmount(order.getActualAmount());
-			sysUserIncomeCashAccountDetail.setBalance(sysUserCashAccount.getBalance().add(order.getActualAmount()));
-			sysUserIncomeCashAccountDetail.setAttribute(order.getTransNo());
-			sysUserIncomeCashAccountDetail.setChannel(studentPaymentOrder.getPaymentChannel());
-			sysUserIncomeCashAccountDetail.setComAmount(studentPaymentOrder.getComAmount());
-			sysUserIncomeCashAccountDetail.setPerAmount(studentPaymentOrder.getPerAmount());
-
-			//支出
-			SysUserCashAccountDetail sysUserExpendCashAccountDetail = new SysUserCashAccountDetail();
-			sysUserExpendCashAccountDetail.setUserId(userId);
-			sysUserExpendCashAccountDetail.setType(PlatformCashAccountDetailTypeEnum.PAY_FEE);
-			sysUserExpendCashAccountDetail.setStatus(DealStatusEnum.SUCCESS);
-			sysUserExpendCashAccountDetail.setAmount(order.getActualAmount().negate());
-			sysUserExpendCashAccountDetail.setBalance(sysUserCashAccount.getBalance());
-			sysUserExpendCashAccountDetail.setAttribute(order.getTransNo());
-			sysUserExpendCashAccountDetail.setChannel(studentPaymentOrder.getPaymentChannel());
-			if(studentPaymentOrder.getComAmount() != null){
-				sysUserExpendCashAccountDetail.setComAmount(studentPaymentOrder.getComAmount().negate());
-			}
-			if(studentPaymentOrder.getPerAmount() != null){
-				sysUserExpendCashAccountDetail.setPerAmount(studentPaymentOrder.getPerAmount().negate());
-			}
-
-			sysUserCashAccountDetailService.insert(sysUserIncomeCashAccountDetail);
-			sysUserCashAccountDetailService.insert(sysUserExpendCashAccountDetail);
+				Map<Integer,String> map = new HashMap<>(1);
+				map.put(userId,userId.toString());
+				sysMessageService.batchSendMessage(MessageSenderPluginContext.MessageSender.JIGUANG, MessageTypeEnum.STUDENT_PUSH_VIP_BUY, map, null, 0, "2","STUDENT",
+						vipGroup.getName());
 
-			Map<Integer,String> map = new HashMap<>(1);
-			map.put(userId,userId.toString());
-			sysMessageService.batchSendMessage(MessageSenderPluginContext.MessageSender.JIGUANG, MessageTypeEnum.STUDENT_PUSH_VIP_BUY, map, null, 0, "2","STUDENT",
-					vipGroup.getName());
-
-			//更新所属分部列表
-			List<Integer> organIds = classGroupDao.findStudentOrganIdsByClassGroup(classGroup.getId().longValue());
-			organIds.add(vipGroup.getOrganId());
-			HashSet<Integer> hashSet = new HashSet<>(organIds);
-			String organIdsString = StringUtils.join(hashSet, ",");
-			vipGroup.setOrganIdList(organIdsString);
-			vipGroupDao.update(vipGroup);
-			try {
-				contractService.transferVipGroupCoursesContract(userId,vipGroup.getId());
-			} catch (Exception e) {
-				LOGGER.error(MessageFormatter.arrayFormat("小课[{}]购买协议错误:{}", vipGroup.getId(), e.getMessage()), e);
+				//更新所属分部列表
+				List<Integer> organIds = classGroupDao.findStudentOrganIdsByClassGroup(classGroup.getId().longValue());
+				organIds.add(vipGroup.getOrganId());
+				HashSet<Integer> hashSet = new HashSet<>(organIds);
+				String organIdsString = StringUtils.join(hashSet, ",");
+				vipGroup.setOrganIdList(organIdsString);
+				vipGroupDao.update(vipGroup);
+				try {
+					contractService.transferVipGroupCoursesContract(userId,vipGroup.getId());
+				} catch (Exception e) {
+					LOGGER.error(MessageFormatter.arrayFormat("小课[{}]购买协议错误:{}", vipGroup.getId(), e.getMessage()), e);
+				}
 			}
 		}else {
 			classGroup.setStudentNum(classGroup.getStudentNum() - 1);

+ 1 - 1
mec-biz/src/main/resources/config/mybatis/CourseScheduleMapper.xml

@@ -2433,7 +2433,7 @@
         SELECT <include refid="resultSql"/>
         FROM course_schedule cs
         WHERE FIND_IN_SET(cs.class_group_id_, #{classGroupIds})
-          AND CONCAT(cs.class_date_, ' ', cs.start_class_time_) &gt; NOW()
+          AND cs.status_ = 'NOT_START'
           AND cs.class_date_ &gt;= DATE_FORMAT(#{fromDate}, "%Y-%m-%d")
           AND (cs.del_flag_ IS NULL OR cs.del_flag_ = 0) AND cs.pre_course_flag_ = 0
     </select>

+ 8 - 8
mec-biz/src/main/resources/config/mybatis/FinancialExpenditureMapper.xml

@@ -230,26 +230,26 @@
         <result property="deptId" column="dept_id"/>
     </resultMap>
     <select id="getWorkOrderInfo" resultMap="PWorkOrderInfo">
-        SELECT woi.*,su.mec_user_id FROM mec_dev_api.p_work_order_info woi
-        LEFT JOIN mec_dev_api.p_work_order_circulation_history woch ON woi.id = woch.work_order
-        LEFT JOIN mec_dev_api.p_process_info pi ON pi.id = woi.classify
-        LEFT JOIN mec_dev_api.sys_user su ON su.user_id = woi.creator
+        SELECT woi.*,su.mec_user_id FROM oa_pro.p_work_order_info woi
+        LEFT JOIN oa_pro.p_work_order_circulation_history woch ON woi.id = woch.work_order
+        LEFT JOIN oa_pro.p_process_info pi ON pi.id = woi.classify
+        LEFT JOIN oa_pro.sys_user su ON su.user_id = woi.creator
         WHERE woi.is_end = 1  AND woi.is_denied = 0  AND woi.is_cancel = 0
         AND woch.`status` != 0 AND woi.id = #{workOrderId} AND pi.fee_type = 1 LIMIT 1
     </select>
     <select id="getFormStructure" resultType="java.lang.String">
-        SELECT form_structure FROM mec_dev_api.p_work_order_tpl_data WHERE work_order = #{workOrderId}
+        SELECT form_structure FROM oa_pro.p_work_order_tpl_data WHERE work_order = #{workOrderId}
     </select>
     <select id="getFormData" resultType="java.lang.String">
-        SELECT form_data FROM mec_dev_api.p_work_order_tpl_data WHERE work_order = #{workOrderId}
+        SELECT form_data FROM oa_pro.p_work_order_tpl_data WHERE work_order = #{workOrderId}
     </select>
     <select id="getTplInfo" resultType="java.lang.String">
-        SELECT form_structure FROM mec_dev_api.p_tpl_info WHERE id = #{tplInfoId}
+        SELECT form_structure FROM oa_pro.p_tpl_info WHERE id = #{tplInfoId}
     </select>
     <select id="findByBatchNoAndProcessNo" resultType="integer">
         SELECT id_ FROM financial_expenditure WHERE batch_no_ = #{workOrderId} AND financial_process_no_ = #{workOrderId} LIMIT 1
     </select>
     <select id="getDeptId" resultType="java.lang.Integer">
-        SELECT organ_id FROM mec_dev_api.sys_dept WHERE dept_id = #{deptId}
+        SELECT organ_id FROM oa_pro.sys_dept WHERE dept_id = #{deptId}
     </select>
 </mapper>

+ 1 - 1
mec-biz/src/main/resources/config/mybatis/StudentManageDao.xml

@@ -532,7 +532,7 @@
                 </foreach>
             </if>
             <if test="currentGrade != null">
-                AND sr.current_grade_ LIKE CONCAT('%',#{currentGrade},'%')
+                AND stu.current_grade_num_ = #{currentGrade}
             </if>
             <if test="createYear != null">
                 AND DATE_FORMAT(sr.create_time_,'%Y') =  #{createYear}

+ 2 - 1
mec-biz/src/main/resources/config/mybatis/StudentRegistrationMapper.xml

@@ -430,7 +430,7 @@
                 AND (su.username_ LIKE CONCAT('%',#{name},'%') OR sr.parents_phone_ LIKE CONCAT('%',#{name},'%'))
             </if>
             <if test="currentGrade != null">
-                AND sr.current_grade_ LIKE CONCAT('%',#{currentGrade},'%')
+                AND st.current_grade_num_ = #{currentGrade}
             </if>
             <if test="createYear != null">
                 AND DATE_FORMAT(sr.create_time_,'%Y') = #{createYear}
@@ -514,6 +514,7 @@
         SELECT COUNT(sr.id_)
         FROM student_registration sr
         LEFT JOIN sys_user su ON sr.user_id_ = su.id_
+        LEFT JOIN student st ON st.user_id_ = su.id_
         LEFT JOIN (
         SELECT v.student_id_, COUNT(*) num
         FROM student_visit v

+ 4 - 2
mec-biz/src/main/resources/config/mybatis/SysMusicCompareWeekDataMapper.xml

@@ -130,8 +130,10 @@
 			</if>
 		</if>
 		WHERE smcwd.monday_ = #{monday}
-			AND stu.user_id_=smcwd.user_id_ and smcwd.tenant_id_ = #{tenantId}
-			<if test="orderType==1">
+			AND stu.user_id_ = smcwd.user_id_ and smcwd.tenant_id_ = #{tenantId}
+			<if test="organId != null">
+				AND su.organ_id_ = #{organId}
+			</if>			<if test="orderType==1">
 				<if test="heardLevel==null">
 					AND smcwd.advanced_max_score_ > 0
 				</if>

+ 7 - 2
mec-biz/src/main/resources/config/mybatis/SysMusicScoreAccompanimentMapper.xml

@@ -32,6 +32,9 @@
 		<result column="render_from_" property="renderFrom" />
 		<result column="enable_evaluation_" property="enableEvaluation" />
 		<result column="client_type_" property="clientType" typeHandler="com.ym.mec.common.dal.CustomEnumTypeHandler"/>
+		<result column="play_mode_" property="playMode" typeHandler="com.ym.mec.common.dal.CustomEnumTypeHandler" />
+		<result column="muse_score_url_" property="museScoreUrl" />
+		<result column="muse_score_memo_" property="museScoreMemo" />
 	</resultMap>
 
 	<delete id="deleteBySongId">
@@ -128,7 +131,8 @@
 	
 	<!-- 分页查询 -->
 	<select id="queryPage" resultMap="SysMusicScoreAccompaniment" parameterType="map">
-		SELECT sesa.*,ses.name_,ses.type_,ses.url_,s.name_ subject_name_,sesc.name_ categories_name_,sesc.id_ categories_id_,sesc.parent_id_ parent_categories_id_,ses.client_type_,ses.rank_ids_,ses.render_from_,ses.enable_evaluation_,ses.metronome_url_
+		SELECT sesa.*,ses.name_,ses.type_,ses.url_,s.name_ subject_name_,sesc.name_ categories_name_,sesc.id_ categories_id_,sesc.parent_id_ parent_categories_id_,ses.client_type_,ses.rank_ids_,ses.render_from_,
+		ses.enable_evaluation_,ses.metronome_url_,ses.muse_score_url_,ses.muse_score_memo_,ses.play_mode_
 		FROM sys_music_score ses
 		LEFT JOIN sys_music_score_accompaniment sesa ON ses.id_ = sesa.exam_song_id_
 		LEFT JOIN sys_music_score_categories sesc ON sesc.id_ = ses.music_score_categories_id_
@@ -163,7 +167,8 @@
 		</where>
 	</select>
 	<select id="queryAccPage" resultMap="SysMusicScoreAccompaniment">
-		SELECT sesa.*,ses.name_,ses.type_,ses.url_,s.name_ subject_name_,sesc.name_ categories_name_,sesc.id_ categories_id_,sesc.parent_id_ parent_categories_id_,ses.client_type_,ses.enable_evaluation_,ses.metronome_url_
+		SELECT sesa.*,ses.name_,ses.type_,ses.url_,s.name_ subject_name_,sesc.name_ categories_name_,sesc.id_ categories_id_,sesc.parent_id_ parent_categories_id_,ses.client_type_,
+		ses.enable_evaluation_,ses.metronome_url_,ses.muse_score_url_,ses.muse_score_memo_,ses.play_mode_
 		FROM sys_music_score ses
 		LEFT JOIN sys_music_score_accompaniment sesa ON ses.id_ = sesa.exam_song_id_
 		LEFT JOIN sys_music_score_categories sesc ON sesc.id_ = ses.music_score_categories_id_

+ 14 - 2
mec-biz/src/main/resources/config/mybatis/SysMusicScoreMapper.xml

@@ -30,6 +30,9 @@
 		<result column="render_from_" property="renderFrom" />
 		<result column="enable_evaluation_" property="enableEvaluation" />
 		<result column="subject_id_" property="subjectId" />
+		<result column="play_mode_" property="playMode" typeHandler="com.ym.mec.common.dal.CustomEnumTypeHandler" />
+		<result column="muse_score_url_" property="museScoreUrl" />
+		<result column="muse_score_memo_" property="museScoreMemo" />
 		<result column="client_type_" property="clientType" typeHandler="com.ym.mec.common.dal.CustomEnumTypeHandler"/>
 		<result column="update_time_" property="updateTime" />
 		<result column="create_time_" property="createTime" />
@@ -48,10 +51,10 @@
 	<!-- 向数据库增加一条记录 -->
 	<insert id="insert" parameterType="com.ym.mec.biz.dal.entity.SysMusicScore" useGeneratedKeys="true" keyColumn="id" keyProperty="id">
 		INSERT INTO sys_music_score (music_score_categories_id_,name_,type_,speed_,url_,metronome_url_,midi_url_,create_user_id_,order_,
-		                             update_time_,create_time_,client_type_,rank_ids_,render_from_,enable_evaluation_,show_flag_)
+		                             update_time_,create_time_,client_type_,rank_ids_,render_from_,enable_evaluation_,show_flag_,play_mode_,muse_score_url_,muse_score_memo_)
 		VALUES(#{musicScoreCategoriesId},#{name},#{type,typeHandler=com.ym.mec.common.dal.CustomEnumTypeHandler},
 		       #{speed},#{url},#{metronomeUrl},#{midiUrl},#{createUserId},#{order},NOW(),NOW(),#{clientType,typeHandler=com.ym.mec.common.dal.CustomEnumTypeHandler},
-		       #{rankIds},#{renderFrom},#{enableEvaluation},#{showFlag})
+		       #{rankIds},#{renderFrom},#{enableEvaluation},#{showFlag},#{playMode},#{museScoreUrl},#{museScoreMemo})
 	</insert>
 
 	<!-- 根据主键查询一条记录 -->
@@ -96,6 +99,15 @@
 		<if test="speed != null">
 			speed_ = #{speed},
 		</if>
+		<if test="playMode != null">
+			play_mode_ = #{playMode,typeHandler=com.ym.mec.common.dal.CustomEnumTypeHandler},
+		</if>
+		<if test="museScoreUrl != null">
+			muse_score_url_ = #{museScoreUrl},
+		</if>
+		<if test="museScoreMemo != null">
+			muse_score_memo_ = #{museScoreMemo},
+		</if>
 			update_time_ = NOW()
 	</set>
 		WHERE id_ = #{id}

+ 3 - 0
mec-common/common-core/src/main/java/com/ym/mec/common/dal/CustomEnumTypeHandler.java

@@ -50,6 +50,9 @@ public class CustomEnumTypeHandler extends BaseTypeHandler<BaseEnum> {
 			return null;
 		}
 		Object code = null;
+		if(type == null){
+			return null;
+		}
 		for (BaseEnum enumBaseInterface : type.getEnumConstants()) {
 
 			code = enumBaseInterface.getCode();

+ 0 - 1
mec-im/pom.xml

@@ -104,7 +104,6 @@
     <dependency>
       <groupId>io.netty</groupId>
       <artifactId>netty-all</artifactId>
-      <version>4.1.24.Final</version>
     </dependency>
   </dependencies>
 

+ 1 - 0
mec-student/src/main/java/com/ym/mec/student/controller/CloudStudyController.java

@@ -38,6 +38,7 @@ public class CloudStudyController extends BaseController {
             return failed("获取用户信息失败");
         }
         queryInfo.setUserId(sysUser.getId());
+        queryInfo.setOrganId(sysUser.getOrganId());
         return succeed(sysMusicCompareRecordService.rankingList(queryInfo));
     }
 

+ 2 - 2
mec-thirdparty/src/main/java/com/ym/mec/thirdparty/eseal/provider/TsignPlugin.java

@@ -58,12 +58,12 @@ public class TsignPlugin implements ESealPlugin, InitializingBean, DisposableBea
         projectconfig.setItsmApiUrl(apisUrl);
         Result result = ServiceClientManager.registClient(projectconfig, null, null);
         if (result.getErrCode() != 0) {
-            throw new ThirdpartyException("e签宝客户端注册失败:{}", result.getMsg());
+            //throw new ThirdpartyException("e签宝客户端注册失败:{}", result.getMsg());
         }
 
         serviceClient = ServiceClientManager.get(projectId);
         if (serviceClient == null) {
-            throw new ThirdpartyException("获取e签宝客户端失败");
+            //throw new ThirdpartyException("获取e签宝客户端失败");
         }
     }
 

+ 9 - 0
mec-thirdparty/src/main/java/com/ym/mec/thirdparty/storage/StoragePlugin.java

@@ -16,6 +16,15 @@ public interface StoragePlugin {
 	String uploadFile(String folderName, File file);
 
 	/**
+	 * 上传文件
+	 * @param folderName 文件夹
+	 * @param file 需要上传的文件
+	 * @param delLocalFile 删除本地文件
+	 * @return 返回文件路径
+	 */
+	String asyncUploadFile(String folderName, File file, boolean delLocalFile);
+
+	/**
 	 * 下载文件
 	 * @param folderName 文件夹
 	 * @param fileName 文件名称

+ 5 - 0
mec-thirdparty/src/main/java/com/ym/mec/thirdparty/storage/StoragePluginContext.java

@@ -24,6 +24,11 @@ public class StoragePluginContext {
 		StoragePlugin StoragePlugin = getStoragePlugin(storagePluginName);
 		return StoragePlugin.uploadFile(folderName, file);
 	}
+	
+	public String asyncUploadFile(String storagePluginName, String folderName, File file, boolean delLocalFile){
+		StoragePlugin StoragePlugin = getStoragePlugin(storagePluginName);
+		return StoragePlugin.asyncUploadFile(folderName, file, delLocalFile);
+	}
 
 	private StoragePlugin getStoragePlugin(String storagePluginName) {
 		StoragePlugin storagePlugin = mapper.get(storagePluginName);

+ 28 - 0
mec-thirdparty/src/main/java/com/ym/mec/thirdparty/storage/provider/AliyunOssStoragePlugin.java

@@ -3,6 +3,7 @@ package com.ym.mec.thirdparty.storage.provider;
 import java.io.File;
 import java.io.IOException;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.poi.util.IOUtils;
 import org.springframework.beans.factory.DisposableBean;
 import org.springframework.beans.factory.InitializingBean;
@@ -80,6 +81,33 @@ public class AliyunOssStoragePlugin implements StoragePlugin, InitializingBean,
 	}
 
 	@Override
+	public String asyncUploadFile(String folderName, File file, boolean delLocalFile) {
+		if (!file.exists()) {
+			throw new ThirdpartyException("需要上传的文件[{}]不存在", file.getAbsolutePath());
+		}
+
+		if (folderName.endsWith("/")) {
+			folderName = folderName.substring(0, folderName.length() - 1);
+		}
+		
+		final String dir = folderName;
+		
+		Thread thread = new Thread(new Runnable() {
+			
+			@Override
+			public void run() {
+				ossClient.putObject(bucketName, dir + "/" + file.getName(), file);
+				if(delLocalFile){
+					FileUtils.deleteQuietly(file);
+				}
+			}
+		});
+		thread.start();
+
+		return "https://" + bucketName + "." + endpoint + "/" + folderName + "/" + file.getName();
+	}
+
+	@Override
 	public byte[] getFile(String folderName, String fileName) throws IOException {
 		OSSObject ossObject = ossClient.getObject(bucketName, folderName + "/" + fileName);
 		try {

+ 31 - 0
mec-thirdparty/src/main/java/com/ym/mec/thirdparty/storage/provider/KS3StoragePlugin.java

@@ -3,6 +3,7 @@ package com.ym.mec.thirdparty.storage.provider;
 import java.io.File;
 import java.io.IOException;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.poi.util.IOUtils;
 import org.springframework.beans.factory.DisposableBean;
 import org.springframework.beans.factory.InitializingBean;
@@ -93,6 +94,36 @@ public class KS3StoragePlugin implements StoragePlugin, InitializingBean, Dispos
 	}
 
 	@Override
+	public String asyncUploadFile(String folderName, File file, boolean delLocalFile) {
+		if (!file.exists()) {
+			throw new ThirdpartyException("需要上传的文件[{}]不存在", file.getAbsolutePath());
+		}
+
+		if (folderName.endsWith("/")) {
+			folderName = folderName.substring(0, folderName.length() - 1);
+		}
+
+		PutObjectRequest request = new PutObjectRequest(bucketName, folderName + "/" + file.getName(), file);
+
+		// 上传一个公开文件
+		request.setCannedAcl(CannedAccessControlList.PublicRead);
+		
+		Thread thread = new Thread(new Runnable() {
+			
+			@Override
+			public void run() {
+				client.putObject(request);
+				if(delLocalFile){
+					FileUtils.deleteQuietly(file);
+				}
+			}
+		});
+		thread.start();
+
+		return "https://" + bucketName + "." + endpoint + "/" + folderName + "/" + file.getName();
+	}
+
+	@Override
 	public byte[] getFile(String folderName, String fileName) throws IOException {
 		GetObjectRequest request = new GetObjectRequest(bucketName, folderName + "/" + fileName);
 		GetObjectResult result = client.getObject(request);

+ 15 - 3
pom.xml

@@ -30,6 +30,12 @@
 				<version>${spring-boot.version}</version>
 				<type>pom</type>
 				<scope>import</scope>
+				<exclusions>
+					<exclusion>
+						<groupId>org.springframework.boot</groupId>
+        				<artifactId>spring-boot-starter-log4j2</artifactId>
+					</exclusion>
+				</exclusions>
 			</dependency>
 
 			<dependency>
@@ -38,6 +44,12 @@
 				<version>${spring-cloud.version}</version>
 				<type>pom</type>
 				<scope>import</scope>
+				<exclusions>
+					<exclusion>
+						<groupId>org.springframework.boot</groupId>
+        				<artifactId>spring-boot-starter-log4j2</artifactId>
+					</exclusion>
+				</exclusions>
 			</dependency>
 
 			<dependency>
@@ -377,6 +389,6 @@
 		<module>mec-student</module>
 		<module>mec-teacher</module>
 		<module>mec-biz</module>
-	  <module>dynamic-datasource</module>
-  </modules>
-</project>
+	  	<module>dynamic-datasource</module>
+	  	<module>audio-analysis</module>
+	</modules></project>