|
@@ -14,12 +14,14 @@ import java.util.stream.Collectors;
|
|
|
|
|
|
import javax.sound.sampled.AudioFormat;
|
|
|
|
|
|
+import org.apache.commons.lang3.ArrayUtils;
|
|
|
import org.apache.commons.lang3.StringUtils;
|
|
|
-import org.apache.commons.math3.analysis.function.Gaussian;
|
|
|
import org.slf4j.Logger;
|
|
|
import org.slf4j.LoggerFactory;
|
|
|
|
|
|
+import com.yonge.audio.analysis.AudioFloatConverter;
|
|
|
import com.yonge.audio.analysis.Signals;
|
|
|
+import com.yonge.audio.analysis.detector.PercussionRhythmDetector;
|
|
|
import com.yonge.audio.analysis.detector.YINPitchDetector;
|
|
|
import com.yonge.netty.dto.NoteAnalysis.NoteErrorType;
|
|
|
import com.yonge.netty.entity.MusicXmlBasicInfo;
|
|
@@ -42,8 +44,16 @@ public class UserChannelContext {
|
|
|
private final static int MIN_FREQUECY = 43;
|
|
|
|
|
|
private final static int MAX_FREQUECY = 2000;
|
|
|
+
|
|
|
+ private AudioFormat audioFormat = new AudioFormat(44100, 16, 1, true, false);
|
|
|
+
|
|
|
+ private int bufferSize = 1024;
|
|
|
+
|
|
|
+ private int frameSize = 128;
|
|
|
|
|
|
private FastYin detector;
|
|
|
+
|
|
|
+ private AudioFloatConverter converter;
|
|
|
|
|
|
private String user;
|
|
|
|
|
@@ -81,7 +91,9 @@ public class UserChannelContext {
|
|
|
private List<SectionAnalysis> doneSectionAnalysisList = new ArrayList<SectionAnalysis>();
|
|
|
|
|
|
private List<ChunkAnalysis> totalChunkAnalysisList = new ArrayList<ChunkAnalysis>();
|
|
|
-
|
|
|
+
|
|
|
+ private List<Float> floatSamples = new ArrayList<Float>();
|
|
|
+
|
|
|
private byte[] channelBufferBytes = new byte[0];
|
|
|
|
|
|
private double playTime;
|
|
@@ -125,15 +137,12 @@ public class UserChannelContext {
|
|
|
return result;
|
|
|
}
|
|
|
|
|
|
- public void init(MusicXmlBasicInfo musicXmlBasicInfo, float sampleRate, int bufferSize) {
|
|
|
+ public void init(MusicXmlBasicInfo musicXmlBasicInfo) {
|
|
|
this.platform = musicXmlBasicInfo.getPlatform();
|
|
|
this.subjectId = musicXmlBasicInfo.getSubjectId();
|
|
|
this.beatDuration = musicXmlBasicInfo.getBeatLength();
|
|
|
this.hardLevel = HardLevelEnum.valueOf(musicXmlBasicInfo.getHeardLevel());
|
|
|
this.evaluationCriteria = musicXmlBasicInfo.getEvaluationCriteria();
|
|
|
- if (detector == null) {
|
|
|
- detector = new FastYin(sampleRate, bufferSize);
|
|
|
- }
|
|
|
}
|
|
|
|
|
|
public void setUser(String user) {
|
|
@@ -220,6 +229,7 @@ public class UserChannelContext {
|
|
|
doneNoteAnalysisList = new ArrayList<NoteAnalysis>();
|
|
|
doneSectionAnalysisList = new ArrayList<SectionAnalysis>();
|
|
|
totalChunkAnalysisList = new ArrayList<ChunkAnalysis>();
|
|
|
+ floatSamples = new ArrayList<Float>();
|
|
|
recordId = null;
|
|
|
playTime = 0;
|
|
|
delayProcessed = false;
|
|
@@ -335,7 +345,27 @@ public class UserChannelContext {
|
|
|
return evaluatingSectionIndex;
|
|
|
}
|
|
|
|
|
|
- public void handle(float[] samples, AudioFormat audioFormat) {
|
|
|
+ public int getBufferSize() {
|
|
|
+ return bufferSize;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void handle(byte[] datas) {
|
|
|
+
|
|
|
+ if(converter == null) {
|
|
|
+ converter = AudioFloatConverter.getConverter(audioFormat);
|
|
|
+ }
|
|
|
+
|
|
|
+ float[] samples = new float[getBufferSize()];
|
|
|
+
|
|
|
+ converter.toFloatArray(datas, samples);
|
|
|
+
|
|
|
+ for(float f : samples) {
|
|
|
+ floatSamples.add(f);
|
|
|
+ }
|
|
|
+
|
|
|
+ if (detector == null) {
|
|
|
+ detector = new FastYin(audioFormat.getSampleRate(), getBufferSize());
|
|
|
+ }
|
|
|
|
|
|
// YINPitchDetector frequencyDetector = new YINPitchDetector(samples.length , audioFormat.getSampleRate());
|
|
|
// int playFrequency = (int) frequencyDetector.getFrequency(samples);
|
|
@@ -345,23 +375,16 @@ public class UserChannelContext {
|
|
|
playFrequency = (int) detector.getPitch(samples).getPitch();
|
|
|
}
|
|
|
|
|
|
- double splDb = Signals.soundPressureLevel(samples);
|
|
|
- double power = Signals.power(samples);
|
|
|
+ double splDb = 0;
|
|
|
+ float[] energyEnvelop = Signals.energyEnvelope(samples, frameSize);
|
|
|
int amplitude = (int) Signals.norm(samples);
|
|
|
|
|
|
- int decibels = (int) Signals.decibels(samples);
|
|
|
- if (StringUtils.equalsIgnoreCase(evaluationCriteria, EvaluationCriteriaEnum.AMPLITUDE.getCode())) {
|
|
|
- amplitude = (int) Signals.norm(samples);
|
|
|
- } else if (StringUtils.equalsIgnoreCase(evaluationCriteria, EvaluationCriteriaEnum.DECIBELS.getCode())) {
|
|
|
- amplitude = (int) Signals.decibels(samples);
|
|
|
- amplitude = amplitude >= 60 ? amplitude : 0;
|
|
|
- }
|
|
|
- // float rms = Signals.rms(samples);
|
|
|
-
|
|
|
+ int decibels = 0;
|
|
|
+
|
|
|
double durationTime = 1000 * (samples.length * 2) / audioFormat.getSampleRate() / (audioFormat.getSampleSizeInBits() / 8);
|
|
|
|
|
|
playTime += durationTime;
|
|
|
-
|
|
|
+
|
|
|
// 获取当前音符信息
|
|
|
MusicXmlNote musicXmlNote = getCurrentMusicNote(null, null);
|
|
|
|
|
@@ -380,32 +403,44 @@ public class UserChannelContext {
|
|
|
|
|
|
if (noteAnalysis.getMusicalNotesIndex() >= 0 && noteAnalysis.getMusicalNotesIndex() <= getTotalMusicNoteIndex(null)) {
|
|
|
|
|
|
- LOGGER.debug("user:{} delayProcessed:{} dynamicOffset:{} Frequency:{} splDb:{} power:{} amplitude:{} decibels:{} endtime:{}", user,
|
|
|
- delayProcessed, dynamicOffset, playFrequency, splDb, power, amplitude, decibels, playTime);
|
|
|
-
|
|
|
- ChunkAnalysis chunkAnalysis = new ChunkAnalysis(playTime - durationTime, playTime, playFrequency, splDb, power, amplitude);
|
|
|
+ ChunkAnalysis chunkAnalysis = new ChunkAnalysis(playTime - durationTime, playTime, playFrequency, splDb, energyEnvelop, amplitude);
|
|
|
|
|
|
if (totalChunkAnalysisList.size() > 0) {
|
|
|
if (totalChunkAnalysisList.get(totalChunkAnalysisList.size() - 1).getAmplitude() + 2 < chunkAnalysis.getAmplitude()) {
|
|
|
chunkAnalysis.setPeak(true);// 只针对打击乐
|
|
|
}
|
|
|
}
|
|
|
+
|
|
|
+ //处理泛音
|
|
|
+ float basicFrequency = -1;
|
|
|
+ if(totalChunkAnalysisList.size() > 0) {
|
|
|
+ basicFrequency = totalChunkAnalysisList.get(totalChunkAnalysisList.size() - 1).getFrequency();
|
|
|
+ }
|
|
|
+ chunkAnalysis.setFrequency(handleHarmonic(basicFrequency, playFrequency));
|
|
|
+
|
|
|
totalChunkAnalysisList.add(chunkAnalysis);
|
|
|
|
|
|
- if (playTime >= (musicXmlNote.getDuration() + musicXmlNote.getTimeStamp())) {
|
|
|
+ LOGGER.debug("user:{} delayProcessed:{} dynamicOffset:{} Frequency:{} splDb:{} amplitude:{} decibels:{} endtime:{}", user,
|
|
|
+ delayProcessed, dynamicOffset, chunkAnalysis.getFrequency(), splDb, amplitude, decibels, playTime);
|
|
|
+
|
|
|
+ if (playTime >= (musicXmlNote.getDuration() + musicXmlNote.getTimeStamp() + getOffsetMS() + beatDuration)) {
|
|
|
|
|
|
- //musicXmlNote.setTimeStamp(musicXmlNote.getTimeStamp() + micDelayMS);
|
|
|
+ musicXmlNote.setTimeStamp(musicXmlNote.getTimeStamp() + getOffsetMS() + beatDuration);
|
|
|
+
|
|
|
+ if(musicXmlNote.getFrequency() <= 0) {
|
|
|
+ musicXmlNote.setDontEvaluating(true);
|
|
|
+ }
|
|
|
|
|
|
if (musicXmlNote.getDontEvaluating()) {
|
|
|
noteAnalysis.setIgnore(true);
|
|
|
}
|
|
|
|
|
|
List<ChunkAnalysis> nodeChunkAnalysisList = extract(musicXmlNote, noteAnalysis);
|
|
|
-
|
|
|
+
|
|
|
// 判断节奏(音符持续时间内有不间断的音高,就节奏正确)
|
|
|
if (!StringUtils.equalsIgnoreCase(evaluationCriteria, EvaluationCriteriaEnum.FREQUENCY.getCode())) {
|
|
|
noteAnalysis.setPlayFrequency(-1);
|
|
|
- noteAnalysis.setTempoStatus(computeTempoWithAmplitude2(musicXmlNote, noteAnalysis, nodeChunkAnalysisList));
|
|
|
+ noteAnalysis.setTempoStatus(computeTempoWithAmplitude(musicXmlNote, noteAnalysis, nodeChunkAnalysisList));
|
|
|
} else {
|
|
|
noteAnalysis.setPlayFrequency(computeFrequency(musicXmlNote, nodeChunkAnalysisList));
|
|
|
noteAnalysis.setTempoStatus(computeTempoWithFrequency(musicXmlNote, noteAnalysis, nodeChunkAnalysisList));
|
|
@@ -532,24 +567,7 @@ public class UserChannelContext {
|
|
|
|
|
|
double playDurationTime = 0;
|
|
|
|
|
|
- if (!StringUtils.equalsIgnoreCase(evaluationCriteria, EvaluationCriteriaEnum.FREQUENCY.getCode())) {
|
|
|
- if (noteAnalysis.getFrequency() == -1) {// 休止符
|
|
|
- if (noteAnalysis.getTempoStatus() == 0) {
|
|
|
- noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
|
|
|
- } else {
|
|
|
- noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
|
|
|
- }
|
|
|
- } else {
|
|
|
- int beatTimes = (int) chunkAnalysisList.stream().filter(t -> t.getAmplitude() > 0).count();
|
|
|
- if (beatTimes == 0) {
|
|
|
- noteAnalysis.setMusicalErrorType(NoteErrorType.NOT_PLAY);
|
|
|
- } else if (noteAnalysis.getTempoStatus() == 0) {
|
|
|
- noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
|
|
|
- } else {
|
|
|
- noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
|
|
|
- }
|
|
|
- }
|
|
|
- } else {
|
|
|
+ if (StringUtils.equalsIgnoreCase(evaluationCriteria, EvaluationCriteriaEnum.FREQUENCY.getCode())) {
|
|
|
|
|
|
NotePlayResult notePlayResult = queryNoteFrequency(musicXmlNote, noteAnalysis.getPlayFrequency());
|
|
|
|
|
@@ -623,9 +641,9 @@ public class UserChannelContext {
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- private int computeFrequency(MusicXmlNote musicXmlNote, List<ChunkAnalysis> chunkAnalysisList) {
|
|
|
+ private float computeFrequency(MusicXmlNote musicXmlNote, List<ChunkAnalysis> chunkAnalysisList) {
|
|
|
|
|
|
- if (chunkAnalysisList == null || chunkAnalysisList.size() == 0) {
|
|
|
+ if (chunkAnalysisList == null || chunkAnalysisList.size() == 0 || musicXmlNote.getDontEvaluating()) {
|
|
|
return -1;
|
|
|
}
|
|
|
|
|
@@ -633,14 +651,14 @@ public class UserChannelContext {
|
|
|
|
|
|
List<ChunkAnalysis> chunkList = new ArrayList<ChunkAnalysis>(chunkAnalysisList);
|
|
|
|
|
|
- List<Integer> chunkFrequencyList = chunkList.stream().map(t -> t.getFrequency())
|
|
|
+ List<Float> chunkFrequencyList = chunkList.stream().map(t -> t.getFrequency())
|
|
|
.filter(t -> t.doubleValue() > MIN_FREQUECY && t.doubleValue() < MAX_FREQUECY).collect(Collectors.toList());
|
|
|
|
|
|
if (chunkFrequencyList.size() == 0) {
|
|
|
return -1;
|
|
|
}
|
|
|
|
|
|
- int frequency = (int) (chunkFrequencyList.stream().mapToInt(t -> t).sum() / chunkFrequencyList.size());
|
|
|
+ float frequency = chunkFrequencyList.stream().reduce(0f, (a, b) -> a + b) / chunkFrequencyList.size();
|
|
|
|
|
|
return frequency;
|
|
|
}
|
|
@@ -653,248 +671,125 @@ public class UserChannelContext {
|
|
|
*/
|
|
|
private int computeTempoWithFrequency(MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis, List<ChunkAnalysis> chunkAnalysisList) {
|
|
|
|
|
|
+ if (chunkAnalysisList == null || chunkAnalysisList.size() == 0 || musicXmlNote.getDontEvaluating()) {
|
|
|
+ return -1;
|
|
|
+ }
|
|
|
+
|
|
|
LOGGER.debug("---------------------TEMPO----------------------");
|
|
|
|
|
|
- double avgPower = chunkAnalysisList.stream().collect(Collectors.averagingDouble(ChunkAnalysis::getSplDb));
|
|
|
- noteAnalysis.setDBSPL(avgPower);
|
|
|
-
|
|
|
- if (noteAnalysis.getTempoStatus() == 0 || chunkAnalysisList == null || chunkAnalysisList.size() == 0) {
|
|
|
+ ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
|
|
|
+
|
|
|
+ Map<Integer, Double> frequencyRhythmMap = queryRhythmsByFrequency(musicXmlNote, noteAnalysis, chunkAnalysisList);
|
|
|
+
|
|
|
+ if(frequencyRhythmMap.size() > 1) {
|
|
|
+ LOGGER.debug("根据音高检测到[{}]个断点,分别在[{}]", frequencyRhythmMap.size(), frequencyRhythmMap.values().stream().map(value -> value + firstChunkAnalysis.getStartTime()).map(Object::toString).collect(Collectors.joining(",")));
|
|
|
return 0;
|
|
|
}
|
|
|
-
|
|
|
- reduceNoise(chunkAnalysisList, EvaluationCriteriaEnum.FREQUENCY);
|
|
|
-
|
|
|
- if (musicXmlNote.getFrequency() == -1) {// 休止符
|
|
|
- return chunkAnalysisList.stream().filter(t -> t.getFrequency() > MIN_FREQUECY).count() <= 1 ? 1 : 0;
|
|
|
- }
|
|
|
-
|
|
|
- // 将信号分堆归类
|
|
|
- Map<Integer, Integer> signalGrouping = new HashMap<Integer, Integer>();
|
|
|
- for (int i = 1; i < chunkAnalysisList.size(); i++) {
|
|
|
- if (isSamePitch(chunkAnalysisList.get(i).getFrequency(), chunkAnalysisList.get(i - 1).getFrequency())) {
|
|
|
- if (signalGrouping.get(chunkAnalysisList.get(i - 1).getFrequency()) == null) {
|
|
|
- signalGrouping.put(chunkAnalysisList.get(i - 1).getFrequency(), 1);
|
|
|
- } else {
|
|
|
- signalGrouping.put(chunkAnalysisList.get(i - 1).getFrequency(), signalGrouping.get(chunkAnalysisList.get(i - 1).getFrequency()) + 1);
|
|
|
- }
|
|
|
- } else {
|
|
|
- signalGrouping.put(chunkAnalysisList.get(i - 1).getFrequency(), 1);
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- int maxTimes = 0, avgFrequency = 0;
|
|
|
-
|
|
|
- for (Entry<Integer, Integer> entry : signalGrouping.entrySet()) {
|
|
|
- if (entry.getValue() > maxTimes) {
|
|
|
- maxTimes = entry.getValue();
|
|
|
- avgFrequency = entry.getKey();
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- LOGGER.debug("当前音符时值范围内平均音高[{}] 声压[{}]", avgFrequency, avgPower);
|
|
|
-
|
|
|
- int firstBeatIndex = -1;
|
|
|
+
|
|
|
double firstBeatTime = 0;
|
|
|
-
|
|
|
- int depthThreshold = 2;
|
|
|
- int peakNum = 0;
|
|
|
- int continueNums = 0;
|
|
|
- boolean isContinue = false;
|
|
|
- // 检测音高是否间断
|
|
|
- for (int i = 0; i < chunkAnalysisList.size(); i++) {
|
|
|
- if (firstBeatIndex == -1) {
|
|
|
- if (chunkAnalysisList.get(i).getFrequency() > MIN_FREQUECY || musicXmlNote.getFrequency() == -1) {
|
|
|
- firstBeatIndex = i;
|
|
|
- firstBeatTime = chunkAnalysisList.get(i).getStartTime();
|
|
|
- }
|
|
|
+
|
|
|
+ if(frequencyRhythmMap.size() == 1) {
|
|
|
+ // 判断进入时间点
|
|
|
+ for(Entry<Integer, Double> entry : frequencyRhythmMap.entrySet()) {
|
|
|
+ firstBeatTime = entry.getValue();
|
|
|
}
|
|
|
- if (!isSamePitch(avgFrequency, chunkAnalysisList.get(i).getFrequency())) {
|
|
|
- ++continueNums;
|
|
|
- if (continueNums >= depthThreshold) {
|
|
|
- if (isContinue == false) {
|
|
|
- LOGGER.debug("范围内查询到异常音高信号,StartTime:{} CurrentFrequency:{} AvgFrequency:{}", chunkAnalysisList.get(i).getStartTime(), chunkAnalysisList.get(i).getFrequency(), avgFrequency);
|
|
|
- peakNum++;
|
|
|
- isContinue = true;
|
|
|
- }
|
|
|
- }
|
|
|
- } else {
|
|
|
- continueNums = 0;
|
|
|
- isContinue = false;
|
|
|
+
|
|
|
+ if ((firstBeatTime - firstChunkAnalysis.getStartTime()) * 100 / musicXmlNote.getDuration() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator(),
|
|
|
+ musicXmlNote.getDuration()) * 2) {
|
|
|
+ LOGGER.debug("节奏错误原因:进入时间点[{}]太晚", firstBeatTime);
|
|
|
+ return 0;
|
|
|
}
|
|
|
- }
|
|
|
-
|
|
|
- if (peakNum > 0) {
|
|
|
- LOGGER.debug("已找到节奏起始点,根据音高又检测到[{}]个断点", peakNum);
|
|
|
+ LOGGER.debug("找到节奏点StartTime:{}", firstBeatTime);
|
|
|
+ } else {
|
|
|
+ LOGGER.debug("没有找到节奏点");
|
|
|
return 0;
|
|
|
}
|
|
|
+
|
|
|
+ /**
|
|
|
+ //根据能量包络再进行分析节奏
|
|
|
+ Map<Integer, Double> energyRrhythmMap = queryRhythmsByEnergyEnvelope(musicXmlNote, noteAnalysis, chunkAnalysisList);
|
|
|
|
|
|
- // 判断进入时间点
|
|
|
- if (firstBeatIndex != -1 && firstBeatIndex * 100 / (chunkAnalysisList.size() - 1) > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator(), musicXmlNote.getDuration())) {
|
|
|
- LOGGER.debug("节奏(音高)错误原因:进入时间点[{}]太晚", firstBeatTime);
|
|
|
+ if (energyRrhythmMap.size() > 1) {
|
|
|
+ LOGGER.debug("根据能量包络检测到[{}]个节奏点,分别在[{}]", energyRrhythmMap.size(), energyRrhythmMap.values().stream().map(value -> value + firstChunkAnalysis.getStartTime()).map(Object::toString).collect(Collectors.joining(",")));
|
|
|
return 0;
|
|
|
}
|
|
|
|
|
|
- //判断是否与上一个音延续下来的
|
|
|
-
|
|
|
- ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
|
|
|
-
|
|
|
- Optional<ChunkAnalysis> chunkAnalysisOptional = totalChunkAnalysisList.stream()
|
|
|
- .filter(t -> Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(firstChunkAnalysis.getStartTime()))
|
|
|
- .reduce((first, second) -> second);
|
|
|
-
|
|
|
- ChunkAnalysis lastChunkAnalysis = null;
|
|
|
- if (chunkAnalysisOptional.isPresent()) {
|
|
|
- lastChunkAnalysis = chunkAnalysisOptional.get();
|
|
|
- }
|
|
|
+ if(energyRrhythmMap.size() == 1) {
|
|
|
+ for(Entry<Integer, Double> entry : energyRrhythmMap.entrySet()) {
|
|
|
+ firstBeatTime = entry.getValue();
|
|
|
+ }
|
|
|
+
|
|
|
+ firstBeatTime = firstChunkAnalysis.getStartTime() + firstBeatTime;
|
|
|
|
|
|
- if (lastChunkAnalysis == null) {
|
|
|
- lastChunkAnalysis = new ChunkAnalysis(0, 0, -1, -40, 0, 0);
|
|
|
- }
|
|
|
-
|
|
|
- //只有音高节奏点才需要检测这个条件
|
|
|
- if(noteAnalysis.getTempoStatus() == 1) {
|
|
|
- if(firstChunkAnalysis.getFrequency() > MIN_FREQUECY && lastChunkAnalysis.getFrequency() > MIN_FREQUECY){
|
|
|
- if(isSamePitch(firstChunkAnalysis.getFrequency(), lastChunkAnalysis.getFrequency())){
|
|
|
- LOGGER.debug("节奏错误原因:上一个音[{}]延续下来导致的", lastChunkAnalysis.getFrequency());
|
|
|
- return 0;
|
|
|
- }
|
|
|
+ // 判断进入时间点
|
|
|
+ if ((firstBeatTime - firstChunkAnalysis.getStartTime()) * 100 / musicXmlNote.getDuration() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator(),
|
|
|
+ musicXmlNote.getDuration()) * 2) {
|
|
|
+ LOGGER.debug("节奏错误原因:进入时间点[{}]太晚", firstBeatTime);
|
|
|
+ return 0;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- continueNums = 0;
|
|
|
- firstBeatIndex = -1;
|
|
|
- firstBeatTime = 0;
|
|
|
- // 判断过程中声音是否有起伏
|
|
|
-
|
|
|
- //获取上一个音符的声压等级
|
|
|
- Optional<NoteAnalysis> preNoteAnalysisOptinal = doneNoteAnalysisList.stream().filter(t -> t.getIndex() == musicXmlNote.getMusicalNotesIndex() - 1).findFirst();
|
|
|
- if(preNoteAnalysisOptinal.isPresent()) {
|
|
|
- avgPower = preNoteAnalysisOptinal.get().getDBSPL();
|
|
|
- }
|
|
|
-
|
|
|
- chunkAnalysisList.add(0, lastChunkAnalysis);
|
|
|
- Map<Integer, ChunkAnalysis> peakMap = detectPeaks(chunkAnalysisList, avgPower + 2);
|
|
|
- peakNum = peakMap.size();
|
|
|
- if (peakNum > 0) {
|
|
|
- firstBeatIndex = peakMap.keySet().stream().min(Integer::compare).get();
|
|
|
- firstBeatTime = peakMap.get(firstBeatIndex).getStartTime();
|
|
|
- }
|
|
|
-
|
|
|
- if (peakNum > 1) {
|
|
|
- LOGGER.debug("根据声压检测到[{}]个断点", peakNum);
|
|
|
+ if(energyRrhythmMap.size() == 0 && frequencyRhythmMap.size() == 0) {
|
|
|
+ LOGGER.debug("节奏错误原因:没有找到节奏点");
|
|
|
return 0;
|
|
|
}
|
|
|
-
|
|
|
- // 判断进入时间点
|
|
|
- if (firstBeatIndex * 100 / (chunkAnalysisList.size() - 1) > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator(), musicXmlNote.getDuration())) {
|
|
|
- LOGGER.debug("节奏(声压)错误原因:进入时间点[{}]太晚", firstBeatTime);
|
|
|
- return 0;
|
|
|
+ */
|
|
|
+
|
|
|
+ if(dynamicOffset == 0) {
|
|
|
+ dynamicOffset = firstBeatTime - firstChunkAnalysis.getStartTime();
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
return 99;
|
|
|
}
|
|
|
+
|
|
|
|
|
|
- private int computeTempoWithAmplitude2(MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis, List<ChunkAnalysis> chunkAnalysisList) {
|
|
|
+ private int computeTempoWithAmplitude(MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis, List<ChunkAnalysis> chunkAnalysisList) {
|
|
|
|
|
|
- if (chunkAnalysisList == null || chunkAnalysisList.size() == 0) {
|
|
|
+ if (chunkAnalysisList == null || chunkAnalysisList.size() == 0 || musicXmlNote.getDontEvaluating()) {
|
|
|
return 0;
|
|
|
}
|
|
|
- //计算平均振幅
|
|
|
- double avgAmplitude = chunkAnalysisList.stream().collect(Collectors.averagingDouble(ChunkAnalysis::getAmplitude));
|
|
|
|
|
|
- noteAnalysis.setAmplitude(avgAmplitude);
|
|
|
- noteAnalysis.setDBSPL(chunkAnalysisList.stream().collect(Collectors.averagingDouble(ChunkAnalysis::getSplDb)));
|
|
|
-
|
|
|
- reduceNoise(chunkAnalysisList, EvaluationCriteriaEnum.AMPLITUDE);
|
|
|
+ //Map<Integer, Double> rhythmMap = queryRhythmsByEnergyEnvelope(musicXmlNote, noteAnalysis, chunkAnalysisList);
|
|
|
|
|
|
- if (musicXmlNote.getFrequency() == -1) {// 休止符
|
|
|
-
|
|
|
- LOGGER.debug("--Amplitude:{} Denominator:{}", chunkAnalysisList.stream().map(t -> t.getAmplitude()).collect(Collectors.toList()),
|
|
|
- musicXmlNote.getDenominator());
|
|
|
- return chunkAnalysisList.stream().filter(t -> t.getAmplitude() > 0).count() <= 0 ? 3 : 0;
|
|
|
+ Map<Integer, Double> rhythmMap = queryRhythmsByAmplitude(musicXmlNote, noteAnalysis, chunkAnalysisList);
|
|
|
+
|
|
|
+ if(rhythmMap.size() != 1) {
|
|
|
+ ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
|
|
|
+ LOGGER.debug("根据能量包络检测到[{}]个断点,分别在[{}]", rhythmMap.size(), rhythmMap.values().stream().map(value -> value + firstChunkAnalysis.getStartTime()).map(Object::toString).collect(Collectors.joining(",")));
|
|
|
+
|
|
|
+ if(rhythmMap.size() > 1) {
|
|
|
+ noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
|
|
|
+ }else {
|
|
|
+ noteAnalysis.setMusicalErrorType(NoteErrorType.NOT_PLAY);
|
|
|
+ }
|
|
|
+ return 0;
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
|
|
|
-
|
|
|
- Optional<ChunkAnalysis> chunkAnalysisOptional = totalChunkAnalysisList.stream()
|
|
|
- .filter(t -> Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(firstChunkAnalysis.getStartTime()))
|
|
|
- .reduce((first, second) -> second);
|
|
|
-
|
|
|
- ChunkAnalysis lastChunkAnalysis = new ChunkAnalysis(0, 0, -1, -40, 0, 0);
|
|
|
|
|
|
- if (chunkAnalysisOptional.isPresent()) {
|
|
|
- lastChunkAnalysis = chunkAnalysisOptional.get();
|
|
|
- }
|
|
|
-
|
|
|
- chunkAnalysisList.add(0, lastChunkAnalysis);
|
|
|
-
|
|
|
- // 检测是否有多个波峰
|
|
|
- boolean tempo = false;
|
|
|
- int peakNum = 0;
|
|
|
- int firstBeatIndex = -1;
|
|
|
+ // 判断进入时间点
|
|
|
double firstBeatTime = 0;
|
|
|
-
|
|
|
- /**
|
|
|
- boolean isContinue = false;
|
|
|
- int firstPeakIndex = -1;
|
|
|
- int firstPeakValue = 0;
|
|
|
- // int range = hardLevel.getAmplitudeThreshold();
|
|
|
- int range = 5;
|
|
|
-
|
|
|
- if (StringUtils.equalsIgnoreCase(evaluationCriteria, EvaluationCriteriaEnum.DECIBELS.getCode())) {
|
|
|
- range = 50;
|
|
|
+ for(Entry<Integer, Double> entry : rhythmMap.entrySet()) {
|
|
|
+ firstBeatTime = entry.getValue();
|
|
|
}
|
|
|
-
|
|
|
- for (int i = 1; i < chunkAmplitudeList.size(); i++) {
|
|
|
- if (chunkAmplitudeList.get(i - 1) + range >= chunkAmplitudeList.get(i)) {
|
|
|
- isContinue = false;
|
|
|
- continue;
|
|
|
- }
|
|
|
-
|
|
|
- if (isContinue == false && chunkAmplitudeList.get(i - 1) + range < chunkAmplitudeList.get(i)) {
|
|
|
- isContinue = true;
|
|
|
- peakSize++;
|
|
|
-
|
|
|
- if (firstPeakIndex == -1) {
|
|
|
- firstPeakIndex = i;
|
|
|
- firstPeakValue = chunkAmplitudeList.get(i);
|
|
|
- }
|
|
|
- }
|
|
|
- }*/
|
|
|
|
|
|
- //获取上一个音符的振幅
|
|
|
- Optional<NoteAnalysis> preNoteAnalysisOptinal = doneNoteAnalysisList.stream().filter(t -> t.getIndex() == musicXmlNote.getMusicalNotesIndex() - 1).findFirst();
|
|
|
- if(preNoteAnalysisOptinal.isPresent()) {
|
|
|
- avgAmplitude = preNoteAnalysisOptinal.get().getDBSPL();
|
|
|
- }
|
|
|
+ firstBeatTime = firstChunkAnalysis.getStartTime() + firstBeatTime;
|
|
|
|
|
|
- Map<Integer, ChunkAnalysis> peakMap = detectPeaks(chunkAnalysisList, avgAmplitude);
|
|
|
- peakNum = peakMap.size();
|
|
|
- if (peakNum > 0) {
|
|
|
- firstBeatIndex = peakMap.keySet().stream().min(Integer::compare).get();
|
|
|
- firstBeatTime = peakMap.get(firstBeatIndex).getStartTime();
|
|
|
- }
|
|
|
-
|
|
|
- if (peakNum == 0) {
|
|
|
- tempo = lastChunkAnalysis.isPeak();
|
|
|
- } else if (peakNum == 1) {
|
|
|
- tempo = true;
|
|
|
- } else {
|
|
|
- tempo = false;
|
|
|
- LOGGER.debug("有多个波峰");
|
|
|
- }
|
|
|
-
|
|
|
- if (tempo) {
|
|
|
- // 判断进入时间点
|
|
|
- if (firstBeatIndex * 100 / (chunkAnalysisList.size() - 1) > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator(),
|
|
|
- musicXmlNote.getDuration())) {
|
|
|
- LOGGER.debug("节奏错误原因:进入时间点[{}]太晚", firstBeatTime);
|
|
|
- tempo = false;
|
|
|
- }
|
|
|
+ if ((firstBeatTime - firstChunkAnalysis.getStartTime()) * 100 / musicXmlNote.getDuration() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator(),
|
|
|
+ musicXmlNote.getDuration()) * 2) {
|
|
|
+ LOGGER.debug("节奏错误原因:进入时间点[{}]太晚", firstBeatTime);
|
|
|
+
|
|
|
+ noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
|
|
|
+ return 0;
|
|
|
}
|
|
|
|
|
|
-
|
|
|
- return tempo == false ? 0 : 3;
|
|
|
+ LOGGER.debug("找到节奏点StartTime:{}", firstBeatTime);
|
|
|
+
|
|
|
+ noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
|
|
|
+
|
|
|
+ dynamicOffset = firstBeatTime - firstChunkAnalysis.getStartTime();
|
|
|
+
|
|
|
+ return 3;
|
|
|
}
|
|
|
|
|
|
private double queryFirstNoteStartTime(List<ChunkAnalysis> chunkAnalysisList, MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis) {
|
|
@@ -903,6 +798,10 @@ public class UserChannelContext {
|
|
|
LOGGER.debug("找不到数据,StartTime:{}", musicXmlNote.getTimeStamp() + dynamicOffset);
|
|
|
return musicXmlNote.getTimeStamp() + dynamicOffset;
|
|
|
}
|
|
|
+
|
|
|
+ if (!StringUtils.equalsIgnoreCase(evaluationCriteria, EvaluationCriteriaEnum.FREQUENCY.getCode())) {
|
|
|
+ return Math.max(chunkAnalysisList.get(0).getStartTime(), 0);
|
|
|
+ }
|
|
|
|
|
|
double onsetStartTime = 0;
|
|
|
double preNoteAvgPower = 0;
|
|
@@ -933,7 +832,7 @@ public class UserChannelContext {
|
|
|
|
|
|
if (StringUtils.equalsIgnoreCase(evaluationCriteria, EvaluationCriteriaEnum.FREQUENCY.getCode())) {
|
|
|
//有音高,声压才有效
|
|
|
- double preNoteAvgFrequency = -1;
|
|
|
+ float preNoteAvgFrequency = -1;
|
|
|
|
|
|
if(preNoteAnalysisOptinal.isPresent()) {
|
|
|
preNoteAvgPower = preNoteAnalysisOptinal.get().getDBSPL();
|
|
@@ -977,7 +876,7 @@ public class UserChannelContext {
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- peakMap = detectPeaks(chunkAnalysisList, preNoteAvgPower + 2);
|
|
|
+ peakMap = detectPeaks(chunkAnalysisList, preNoteAvgPower + 1);
|
|
|
|
|
|
} else {
|
|
|
|
|
@@ -1005,6 +904,7 @@ public class UserChannelContext {
|
|
|
// return chunkAnalysisList.get(chunkAnalysisList.size() - 1).getEndTime();
|
|
|
return Math.max(musicXmlNote.getTimeStamp() + dynamicOffset, onsetStartTime);
|
|
|
}
|
|
|
+
|
|
|
|
|
|
|
|
|
private void reduceNoise(List<ChunkAnalysis> chunkAnalysisList, EvaluationCriteriaEnum criteria) {
|
|
@@ -1035,17 +935,20 @@ public class UserChannelContext {
|
|
|
|
|
|
}
|
|
|
|
|
|
- private boolean isSamePitch(double frequency1, double frequency2) {
|
|
|
- if (frequency1 == frequency2) {
|
|
|
+ private boolean isSamePitch(float basicFrequency, float frequency) {
|
|
|
+ if (new BigDecimal(basicFrequency + "").equals(new BigDecimal(frequency + ""))) {
|
|
|
return true;
|
|
|
}
|
|
|
- return (frequency1 != -1 || frequency2 != -1) && Math.abs(YINPitchDetector.getDeviationCent(frequency1, frequency2)) < 50;
|
|
|
+
|
|
|
+ frequency = handleHarmonic(basicFrequency, frequency);
|
|
|
+
|
|
|
+ return (basicFrequency != -1 && frequency != -1) && Math.abs(YINPitchDetector.getDeviationCent(basicFrequency, frequency)) < 50;
|
|
|
}
|
|
|
|
|
|
private Map<Integer, ChunkAnalysis> detectPeaks(List<ChunkAnalysis> signalList, double threshold) {
|
|
|
Map<Integer, ChunkAnalysis> peaks = new HashMap<Integer, ChunkAnalysis>();
|
|
|
|
|
|
- movingAverageSmooth(signalList, 10);
|
|
|
+ LOGGER.debug("平均声压值:[{}}", threshold);
|
|
|
|
|
|
/**
|
|
|
int waveCrestNums = 0,waveTroughNums = 0;
|
|
@@ -1074,30 +977,18 @@ public class UserChannelContext {
|
|
|
|
|
|
int continueSignalThreshold = 2;
|
|
|
int continueNums = 0;
|
|
|
- int disconnectNums = 0;
|
|
|
- boolean isContinue = false;
|
|
|
|
|
|
if (StringUtils.equalsIgnoreCase(evaluationCriteria, EvaluationCriteriaEnum.FREQUENCY.getCode())) {
|
|
|
- for (int i = 0; i < signalList.size(); i++) {
|
|
|
- //如果连续2个超过平均值
|
|
|
- if(Math.abs(threshold - signalList.get(i).getSplDb()) > 0) {
|
|
|
+ for (int i = 1; i < signalList.size(); i++) {
|
|
|
+ //如果连续2个下降超过平均值
|
|
|
+ if(threshold - signalList.get(i).getSplDb() < 0 && signalList.get(i).getSplDb() > signalList.get(i - 1).getSplDb()) {
|
|
|
++continueNums;
|
|
|
- if(continueNums >= continueSignalThreshold) {
|
|
|
- disconnectNums = 0;
|
|
|
- if(continueNums != i+1) {
|
|
|
- if(isContinue == false) {
|
|
|
- peaks.put(i, signalList.get(i));
|
|
|
- LOGGER.debug("【过程】范围内查询到声压信号,StartTime:{} CurrentSplDb:{} Threshold:{}" , signalList.get(i).getStartTime(), signalList.get(i).getSplDb(), threshold);
|
|
|
- isContinue = true;
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
} else {
|
|
|
- disconnectNums++;
|
|
|
- if(disconnectNums >= continueSignalThreshold) {
|
|
|
- continueNums = 0;
|
|
|
- isContinue = false;
|
|
|
+ if(continueNums >= continueSignalThreshold) {
|
|
|
+ peaks.put(i, signalList.get(i));
|
|
|
+ LOGGER.debug("【过程】范围内查询到声压信号,StartTime:{} CurrentSplDb:{} Threshold:{}" , signalList.get(i).getStartTime(), signalList.get(i - 1).getSplDb(), threshold);
|
|
|
}
|
|
|
+ continueNums = 0;
|
|
|
}
|
|
|
}
|
|
|
} else {
|
|
@@ -1119,83 +1010,224 @@ public class UserChannelContext {
|
|
|
return peaks;
|
|
|
}
|
|
|
|
|
|
- private List<ChunkAnalysis> extract(MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis) {
|
|
|
+
|
|
|
+ private Map<Integer,Double> queryRhythmsByEnergyEnvelope(MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis, List<ChunkAnalysis> datas){
|
|
|
|
|
|
- LOGGER.debug("---------------------Extract Data----------------------");
|
|
|
-
|
|
|
- double floatingRange = musicXmlNote.getDuration() * hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator(), musicXmlNote.getDuration()) / 100;
|
|
|
+ LOGGER.debug("------------利用能量包络寻找节奏点[{} - {}]------------", noteAnalysis.getStartTime(), noteAnalysis.getEndTime());
|
|
|
+
|
|
|
+
|
|
|
+ int start = (int) (noteAnalysis.getStartTime() / ((1 * 1000) / audioFormat.getSampleRate()));
|
|
|
+ int end = (int) (noteAnalysis.getEndTime() * audioFormat.getSampleRate() / 1 / 1000);
|
|
|
+
|
|
|
+ //取信号
|
|
|
+ float[] fSamples = new float[end - start];
|
|
|
+ int j = 0;
|
|
|
+ for(int i = start; i < end; i++) {
|
|
|
+ fSamples[j] = floatSamples.get(i - 1);
|
|
|
+ j++;
|
|
|
+ }
|
|
|
+
|
|
|
+ float[] energyEnvelope = Signals.energyEnvelope(fSamples, frameSize);
|
|
|
+
|
|
|
+ /**
|
|
|
+ float[] energyEnvelope = new float[datas.size() * getBufferSize() / frameSize];
|
|
|
+
|
|
|
+ for(int i = 0; i < datas.size(); i++) {
|
|
|
+ for(int j = 0; j < datas.get(i).getEnergyEnvelop().length; j++) {
|
|
|
+ energyEnvelope[i * datas.get(i).getEnergyEnvelop().length + j] = datas.get(i).getEnergyEnvelop()[j];
|
|
|
+ }
|
|
|
+ }
|
|
|
+ */
|
|
|
+ PercussionRhythmDetector percussionRhythmDetector = new PercussionRhythmDetector(energyEnvelope, audioFormat.getSampleRate(), frameSize);
|
|
|
+
|
|
|
+ Map<Integer, Double> rhythMap = percussionRhythmDetector.detect();
|
|
|
+
|
|
|
+ return rhythMap;
|
|
|
+ }
|
|
|
+
|
|
|
+ private Map<Integer,Double> queryRhythmsByAmplitude(MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis, List<ChunkAnalysis> datas){
|
|
|
+
|
|
|
+ Map<Integer, Double> rhythMap = new HashMap<Integer, Double>();
|
|
|
|
|
|
- double endTime = musicXmlNote.getTimeStamp() + dynamicOffset + floatingRange;
|
|
|
- double startTime = musicXmlNote.getTimeStamp() + dynamicOffset - floatingRange;
|
|
|
+ reduceNoise(datas, EvaluationCriteriaEnum.AMPLITUDE);
|
|
|
|
|
|
- LOGGER.debug("在范围内寻找起始点——floatingRange:{} modified [ {} - {} ]", floatingRange, startTime, endTime);
|
|
|
+ int amplitudeThreshold = 2;
|
|
|
+ int beatContinueNum = 0;
|
|
|
+ int intervalTime = 150;
|
|
|
+ ChunkAnalysis chunkAnalysis = null;
|
|
|
+ double rhythmTime = -1;
|
|
|
+ int peakIndex = 0;
|
|
|
+ int continueNumThreshold = 0;
|
|
|
+
|
|
|
+ for (int i = 0; i < datas.size(); i++) {
|
|
|
+ chunkAnalysis = datas.get(i);
|
|
|
|
|
|
- List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream()
|
|
|
- .filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime)
|
|
|
- && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime))
|
|
|
- .collect(Collectors.toList());
|
|
|
+ if (chunkAnalysis.getAmplitude() >= amplitudeThreshold) {
|
|
|
+ beatContinueNum++;
|
|
|
+ if (beatContinueNum == 1) {
|
|
|
+ rhythmTime = i * bufferSize * 1000 / audioFormat.getSampleRate();
|
|
|
+ }
|
|
|
+
|
|
|
+ if (beatContinueNum > continueNumThreshold) {
|
|
|
+ if (rhythMap.size() == 0 || rhythmTime - rhythMap.get(peakIndex) > intervalTime) {
|
|
|
+ peakIndex++;
|
|
|
+ rhythMap.put(peakIndex, rhythmTime);
|
|
|
+ }
|
|
|
+ }
|
|
|
|
|
|
+ } else {
|
|
|
+ beatContinueNum = 0;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ return rhythMap;
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+ private Map<Integer, Double> queryRhythmsByFrequency(MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis, List<ChunkAnalysis> datas){
|
|
|
+ LOGGER.debug("------------利用频率寻找节奏点------------");
|
|
|
/**
|
|
|
- * 过程中找到了起始点都可以作为节奏点,后面再出现波峰波谷就表明节奏错误;否则,后面需要出现一个波峰或波谷
|
|
|
- * 是否要加上前一个音符的最后一个信号???
|
|
|
- */
|
|
|
- double correctedStartTime = queryFirstNoteStartTime(chunkAnalysisList, musicXmlNote, noteAnalysis);
|
|
|
- double correctedEndTime = correctedStartTime + musicXmlNote.getDuration();
|
|
|
+ //获取上一个音符
|
|
|
+ Optional<NoteAnalysis> preNoteAnalysisOptinal = doneNoteAnalysisList.stream().filter(t -> t.getIndex() == musicXmlNote.getMusicalNotesIndex() - 1).findFirst();
|
|
|
+
|
|
|
+ double preNoteAvgFrequency = -1;
|
|
|
+ if(preNoteAnalysisOptinal.isPresent()) {
|
|
|
+ preNoteAvgFrequency = preNoteAnalysisOptinal.get().getPlayFrequency();
|
|
|
+ }
|
|
|
+
|
|
|
+ LOGGER.debug("上一个音符的平均音高[{}]", preNoteAvgFrequency);
|
|
|
+ */
|
|
|
|
|
|
- LOGGER.debug("在范围内寻找到起始点,调整后的信号时值范围[ {} - {} ]", correctedStartTime, correctedEndTime);
|
|
|
+ //获取上一个信号
|
|
|
+ ChunkAnalysis firstChunkAnalysis = datas.get(0);
|
|
|
+
|
|
|
+ Optional<ChunkAnalysis> chunkAnalysisOptional = totalChunkAnalysisList.stream()
|
|
|
+ .filter(t -> Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(firstChunkAnalysis.getStartTime()))
|
|
|
+ .reduce((first, second) -> second);
|
|
|
|
|
|
- if (correctedStartTime != startTime) {
|
|
|
- if (startTime < 0) {
|
|
|
- dynamicOffset = correctedStartTime;
|
|
|
- } else {
|
|
|
- dynamicOffset = correctedStartTime - startTime;
|
|
|
+ ChunkAnalysis lastChunkAnalysis = null;
|
|
|
+ if (chunkAnalysisOptional.isPresent()) {
|
|
|
+ lastChunkAnalysis = chunkAnalysisOptional.get();
|
|
|
+ }
|
|
|
+
|
|
|
+ if (lastChunkAnalysis == null) {
|
|
|
+ lastChunkAnalysis = new ChunkAnalysis(0, 0, -1, 0, new float[0], 0);
|
|
|
+ }
|
|
|
+
|
|
|
+ float preNoteAvgFrequency = lastChunkAnalysis.getFrequency();
|
|
|
+
|
|
|
+ ChunkAnalysis chunkAnalysis = null;
|
|
|
+ Map<Integer, Double> rhythMap = new HashMap<Integer, Double>();
|
|
|
+
|
|
|
+ int intervalTime = 150;
|
|
|
+ double lastestRhythmTime = Math.negateExact(intervalTime);
|
|
|
+
|
|
|
+ int silenceContinueNum = 0;
|
|
|
+ int beatContinueNum = 0;
|
|
|
+ double rhythmTime = -1;
|
|
|
+ int peakIndex = 0;
|
|
|
+ int continueNumThreshold = 0;
|
|
|
+
|
|
|
+ for(int i = 0; i < datas.size(); i++) {
|
|
|
+
|
|
|
+ chunkAnalysis = datas.get(i);
|
|
|
+
|
|
|
+ //若不是休止符
|
|
|
+ if(musicXmlNote.getFrequency() >= MIN_FREQUECY) {
|
|
|
+ if(chunkAnalysis.getFrequency() < MIN_FREQUECY) {
|
|
|
+
|
|
|
+ silenceContinueNum++;
|
|
|
+ beatContinueNum = 0;
|
|
|
+
|
|
|
+ if(silenceContinueNum > continueNumThreshold) {
|
|
|
+ preNoteAvgFrequency = chunkAnalysis.getFrequency();
|
|
|
+ }
|
|
|
+ }else {
|
|
|
+
|
|
|
+ silenceContinueNum = 0;
|
|
|
+
|
|
|
+ if(preNoteAvgFrequency < MIN_FREQUECY || !isSamePitch(preNoteAvgFrequency, chunkAnalysis.getFrequency())) {
|
|
|
+
|
|
|
+ if (beatContinueNum == 0) {
|
|
|
+ rhythmTime = chunkAnalysis.getStartTime();
|
|
|
+ }
|
|
|
+
|
|
|
+ beatContinueNum++;
|
|
|
+ if (beatContinueNum > continueNumThreshold) {
|
|
|
+ if (chunkAnalysis.getStartTime() - lastestRhythmTime > intervalTime) {
|
|
|
+
|
|
|
+ lastestRhythmTime = rhythmTime;
|
|
|
+ peakIndex++;
|
|
|
+
|
|
|
+ if(peakIndex == 1 || lastestRhythmTime - rhythMap.get(peakIndex - 1) > intervalTime) {
|
|
|
+ rhythMap.put(peakIndex, lastestRhythmTime);
|
|
|
+ LOGGER.debug("范围内查询到音高信号,preNoteFrequency:{} peakIndex:{} EndTime:{}", preNoteAvgFrequency, peakIndex, lastestRhythmTime);
|
|
|
+ }
|
|
|
+
|
|
|
+ preNoteAvgFrequency = chunkAnalysis.getFrequency();
|
|
|
+ beatContinueNum = 0;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }else {
|
|
|
+ beatContinueNum = 0;
|
|
|
+ }
|
|
|
+
|
|
|
+ }
|
|
|
}
|
|
|
+
|
|
|
+
|
|
|
}
|
|
|
+
|
|
|
+ return rhythMap;
|
|
|
+ }
|
|
|
+
|
|
|
+ private List<ChunkAnalysis> extract(MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis) {
|
|
|
+
|
|
|
+ LOGGER.debug("---------------------Extract Data----------------------");
|
|
|
+
|
|
|
+ int range = hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator(), musicXmlNote.getDuration());
|
|
|
+
|
|
|
+ double floatingRange = musicXmlNote.getDuration() * range / 100;
|
|
|
|
|
|
- chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(correctedStartTime)
|
|
|
- && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(correctedEndTime)).collect(Collectors.toList());
|
|
|
+ double startTime = musicXmlNote.getTimeStamp() + dynamicOffset - floatingRange;
|
|
|
+
|
|
|
+ double endTime = startTime + musicXmlNote.getDuration();
|
|
|
+
|
|
|
+ LOGGER.debug("当前音符有效信号时值[{}]偏移[{}]后的范围[ {} - {} ]", musicXmlNote.getDuration(), floatingRange, startTime, endTime);
|
|
|
+
|
|
|
+ List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream()
|
|
|
+ .filter(t -> Double.doubleToLongBits(t.getEndTime()) >= Double.doubleToLongBits(startTime)
|
|
|
+ && Double.doubleToLongBits(t.getStartTime()) <= Double.doubleToLongBits(endTime))
|
|
|
+ .collect(Collectors.toList());
|
|
|
|
|
|
// 根据完整度取部分有效信号
|
|
|
- int elementSize = chunkAnalysisList.size() * (100 - hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator(), musicXmlNote.getDuration())) / 100;
|
|
|
+ int elementSize = chunkAnalysisList.size() * (100 - range) / 100;
|
|
|
|
|
|
List<ChunkAnalysis> datas = chunkAnalysisList.subList(0, elementSize);
|
|
|
|
|
|
- LOGGER.debug("根据用户的评测难度,当前音符有效的信号时值范围[ {} - {} ]", chunkAnalysisList.get(0).getStartTime(), chunkAnalysisList.get(elementSize - 1).getEndTime());
|
|
|
+ noteAnalysis.setStartTime(datas.get(0).getStartTime());
|
|
|
+ noteAnalysis.setEndTime(datas.get(elementSize - 1).getEndTime());
|
|
|
|
|
|
+ LOGGER.debug("根据用户的评测难度[{}],当前音符有效的信号时值的取值范围[ {} - {} ]", range, datas.get(0).getStartTime(), datas.get(elementSize - 1).getEndTime());
|
|
|
+
|
|
|
return datas;
|
|
|
}
|
|
|
|
|
|
- public void gaussianSmooth(List<ChunkAnalysis> signalList, int windowSize) {
|
|
|
- double sigma = windowSize / 3.0;
|
|
|
- Gaussian gaussian = new Gaussian(1.0, windowSize / 2.0, sigma);
|
|
|
- double[] kernel = new double[windowSize];
|
|
|
- for (int i = 0; i < windowSize; i++) {
|
|
|
- kernel[i] = gaussian.value(i);
|
|
|
- }
|
|
|
-
|
|
|
- for (int i = 0; i < signalList.size(); i++) {
|
|
|
- double sum = 0;
|
|
|
- double weightSum = 0;
|
|
|
- for (int j = 0; j < windowSize; j++) {
|
|
|
- int index = i + j - windowSize / 2;
|
|
|
- if (index >= 0 && index < signalList.size()) {
|
|
|
- sum += signalList.get(index).getSplDb() * kernel[j];
|
|
|
- weightSum += kernel[j];
|
|
|
- }
|
|
|
- }
|
|
|
- signalList.get(i).setSplDb(sum / weightSum);
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- public void movingAverageSmooth(List<ChunkAnalysis> signalList, int windowSize) {
|
|
|
- for (int i = 0; i < signalList.size(); i++) {
|
|
|
- int start = Math.max(0, i - windowSize / 2);
|
|
|
- int end = Math.min(signalList.size(), i + windowSize / 2);
|
|
|
- double sum = 0;
|
|
|
- for (int j = start; j < end; j++) {
|
|
|
- sum += signalList.get(j).getSplDb();
|
|
|
- }
|
|
|
- signalList.get(i).setSplDb(sum / (end - start));
|
|
|
- }
|
|
|
- }
|
|
|
+ private float handleHarmonic(float basicFrequency, float frequency) {
|
|
|
+
|
|
|
+ if (basicFrequency > frequency) {
|
|
|
+ return frequency;
|
|
|
+ }
|
|
|
+
|
|
|
+ float threshold = 0.02f;
|
|
|
+ // 处理泛音
|
|
|
+ int roundedRatio = Math.round(frequency / basicFrequency);
|
|
|
+ float ratio = frequency / basicFrequency;
|
|
|
+ if (roundedRatio >= 2 && Math.abs(ratio - roundedRatio) <= threshold) {
|
|
|
+ return frequency / ratio;
|
|
|
+ }
|
|
|
+
|
|
|
+ return frequency;
|
|
|
+ }
|
|
|
+
|
|
|
}
|