|
@@ -458,7 +458,7 @@ public class AccompanyFragment extends BaseMVPFragment<FragmentAccompanyBinding,
|
|
|
}
|
|
|
|
|
|
private void showLoadingAnim() {
|
|
|
- if (mViewBinding!=null && mViewBinding.llLoading.getVisibility() != View.VISIBLE) {
|
|
|
+ if (mViewBinding != null && mViewBinding.llLoading.getVisibility() != View.VISIBLE) {
|
|
|
currentProgressCount = 0;
|
|
|
mViewBinding.progress.setProgress(0);
|
|
|
mViewBinding.ivLoadingBack.setVisibility(View.VISIBLE);
|
|
@@ -482,39 +482,39 @@ public class AccompanyFragment extends BaseMVPFragment<FragmentAccompanyBinding,
|
|
|
msg = message;
|
|
|
if (webSocketClient == null) {
|
|
|
Observable.create(new ObservableOnSubscribe<String>() {
|
|
|
- @Override
|
|
|
- public void subscribe(@NonNull ObservableEmitter<String> emitter) throws Exception {
|
|
|
- Map<String, String> httpHeaders = new HashMap<String, String>();
|
|
|
- httpHeaders.put("Authorization", UserHelper.getUserToken());
|
|
|
- webSocketClient = new JWebSocketClient(webSocketUri, httpHeaders) {
|
|
|
@Override
|
|
|
- public void onMessage(String message) {
|
|
|
- emitter.onNext(message);
|
|
|
- }
|
|
|
+ public void subscribe(@NonNull ObservableEmitter<String> emitter) throws Exception {
|
|
|
+ Map<String, String> httpHeaders = new HashMap<String, String>();
|
|
|
+ httpHeaders.put("Authorization", UserHelper.getUserToken());
|
|
|
+ webSocketClient = new JWebSocketClient(webSocketUri, httpHeaders) {
|
|
|
+ @Override
|
|
|
+ public void onMessage(String message) {
|
|
|
+ emitter.onNext(message);
|
|
|
+ }
|
|
|
|
|
|
- @Override
|
|
|
- public void onError(Exception ex) {
|
|
|
- super.onError(ex);
|
|
|
- emitter.onNext("-2");
|
|
|
- }
|
|
|
+ @Override
|
|
|
+ public void onError(Exception ex) {
|
|
|
+ super.onError(ex);
|
|
|
+ emitter.onNext("-2");
|
|
|
+ }
|
|
|
|
|
|
- @Override
|
|
|
- public void onOpen(ServerHandshake handshakedata) {
|
|
|
- super.onOpen(handshakedata);
|
|
|
- emitter.onNext("-1");
|
|
|
+ @Override
|
|
|
+ public void onOpen(ServerHandshake handshakedata) {
|
|
|
+ super.onOpen(handshakedata);
|
|
|
+ emitter.onNext("-1");
|
|
|
|
|
|
- }
|
|
|
+ }
|
|
|
|
|
|
- @Override
|
|
|
- public void onClose(int code, String reason, boolean remote) {
|
|
|
- super.onClose(code, reason, remote);
|
|
|
- emitter.onNext("-2");
|
|
|
+ @Override
|
|
|
+ public void onClose(int code, String reason, boolean remote) {
|
|
|
+ super.onClose(code, reason, remote);
|
|
|
+ emitter.onNext("-2");
|
|
|
+ }
|
|
|
+ };
|
|
|
+ webSocketClient.setConnectionLostTimeout(60 * 1000);
|
|
|
+ webSocketClient.connect();
|
|
|
}
|
|
|
- };
|
|
|
- webSocketClient.setConnectionLostTimeout(60 * 1000);
|
|
|
- webSocketClient.connect();
|
|
|
- }
|
|
|
- }).subscribeOn(Schedulers.newThread())
|
|
|
+ }).subscribeOn(Schedulers.newThread())
|
|
|
.observeOn(AndroidSchedulers.mainThread())
|
|
|
.subscribe(new Observer<String>() {
|
|
|
@Override
|
|
@@ -692,29 +692,29 @@ public class AccompanyFragment extends BaseMVPFragment<FragmentAccompanyBinding,
|
|
|
wavRecorder.startRecording(getContext());
|
|
|
} else {
|
|
|
Observable.create((ObservableOnSubscribe<String>) emitter -> {
|
|
|
- wavRecorder = MsRecorder.wav(
|
|
|
- new File(getVoicePath()),
|
|
|
- new AudioRecordConfig(),
|
|
|
- // AudioRecordConfig(MediaRecorder.AudioSource.MIC, AudioFormat.ENCODING_PCM_16BIT, AudioFormat.CHANNEL_IN_MONO, 44100),
|
|
|
- new PullTransport.Default().setOnAudioChunkPulledListener(new PullTransport.OnAudioChunkPulledListener() {
|
|
|
- @Override
|
|
|
- public void onAudioChunkPulled(AudioChunk audioChunk) {
|
|
|
- if (webSocketClient != null && webSocketClient.isOpen()) {
|
|
|
- webSocketClient.send(audioChunk.toBytes());
|
|
|
- if (isSendRecordStartTime) {
|
|
|
- isSendRecordStartTime = false;
|
|
|
- emitter.onNext("-2");
|
|
|
+ wavRecorder = MsRecorder.wav(
|
|
|
+ new File(getVoicePath()),
|
|
|
+ new AudioRecordConfig(),
|
|
|
+ // AudioRecordConfig(MediaRecorder.AudioSource.MIC, AudioFormat.ENCODING_PCM_16BIT, AudioFormat.CHANNEL_IN_MONO, 44100),
|
|
|
+ new PullTransport.Default().setOnAudioChunkPulledListener(new PullTransport.OnAudioChunkPulledListener() {
|
|
|
+ @Override
|
|
|
+ public void onAudioChunkPulled(AudioChunk audioChunk) {
|
|
|
+ if (webSocketClient != null && webSocketClient.isOpen()) {
|
|
|
+ webSocketClient.send(audioChunk.toBytes());
|
|
|
+ if (isSendRecordStartTime) {
|
|
|
+ isSendRecordStartTime = false;
|
|
|
+ emitter.onNext("-2");
|
|
|
+ }
|
|
|
+ } else {
|
|
|
+ emitter.onNext("-1");
|
|
|
+ }
|
|
|
}
|
|
|
- } else {
|
|
|
- emitter.onNext("-1");
|
|
|
- }
|
|
|
- }
|
|
|
- }));
|
|
|
- if (wavRecorder != null) {
|
|
|
- wavRecorder.startRecording(getContext());
|
|
|
- }
|
|
|
- emitter.onNext("1");
|
|
|
- }).subscribeOn(Schedulers.newThread())
|
|
|
+ }));
|
|
|
+ if (wavRecorder != null) {
|
|
|
+ wavRecorder.startRecording(getContext());
|
|
|
+ }
|
|
|
+ emitter.onNext("1");
|
|
|
+ }).subscribeOn(Schedulers.newThread())
|
|
|
.observeOn(AndroidSchedulers.mainThread())
|
|
|
.subscribe(new Observer<String>() {
|
|
|
@Override
|
|
@@ -865,29 +865,29 @@ public class AccompanyFragment extends BaseMVPFragment<FragmentAccompanyBinding,
|
|
|
wavRecorder.startRecording(getContext());
|
|
|
} else {
|
|
|
Observable.create((ObservableOnSubscribe<String>) emitter -> {
|
|
|
- wavRecorder = MsRecorder.wav(
|
|
|
- new File(getVoicePath()),
|
|
|
- new AudioRecordConfig(),
|
|
|
- // AudioRecordConfig(MediaRecorder.AudioSource.MIC, AudioFormat.ENCODING_PCM_16BIT, AudioFormat.CHANNEL_IN_MONO, 44100),
|
|
|
- new PullTransport.Default().setOnAudioChunkPulledListener(new PullTransport.OnAudioChunkPulledListener() {
|
|
|
- @Override
|
|
|
- public void onAudioChunkPulled(AudioChunk audioChunk) {
|
|
|
- if (webSocketClient != null && webSocketClient.isOpen()) {
|
|
|
- webSocketClient.send(audioChunk.toBytes());
|
|
|
- if (isSoundCheckStartTime) {
|
|
|
- isSoundCheckStartTime = false;
|
|
|
- emitter.onNext("-2");
|
|
|
+ wavRecorder = MsRecorder.wav(
|
|
|
+ new File(getVoicePath()),
|
|
|
+ new AudioRecordConfig(),
|
|
|
+ // AudioRecordConfig(MediaRecorder.AudioSource.MIC, AudioFormat.ENCODING_PCM_16BIT, AudioFormat.CHANNEL_IN_MONO, 44100),
|
|
|
+ new PullTransport.Default().setOnAudioChunkPulledListener(new PullTransport.OnAudioChunkPulledListener() {
|
|
|
+ @Override
|
|
|
+ public void onAudioChunkPulled(AudioChunk audioChunk) {
|
|
|
+ if (webSocketClient != null && webSocketClient.isOpen()) {
|
|
|
+ webSocketClient.send(audioChunk.toBytes());
|
|
|
+ if (isSoundCheckStartTime) {
|
|
|
+ isSoundCheckStartTime = false;
|
|
|
+ emitter.onNext("-2");
|
|
|
+ }
|
|
|
+ } else {
|
|
|
+ emitter.onNext("-1");
|
|
|
+ }
|
|
|
}
|
|
|
- } else {
|
|
|
- emitter.onNext("-1");
|
|
|
- }
|
|
|
- }
|
|
|
- }));
|
|
|
- if (wavRecorder != null) {
|
|
|
- wavRecorder.startRecording(getContext());
|
|
|
- }
|
|
|
- emitter.onNext("1");
|
|
|
- }).subscribeOn(Schedulers.newThread())
|
|
|
+ }));
|
|
|
+ if (wavRecorder != null) {
|
|
|
+ wavRecorder.startRecording(getContext());
|
|
|
+ }
|
|
|
+ emitter.onNext("1");
|
|
|
+ }).subscribeOn(Schedulers.newThread())
|
|
|
.observeOn(AndroidSchedulers.mainThread())
|
|
|
.subscribe(new Observer<String>() {
|
|
|
@Override
|
|
@@ -972,7 +972,7 @@ public class AccompanyFragment extends BaseMVPFragment<FragmentAccompanyBinding,
|
|
|
|
|
|
// 录音文件存储名称
|
|
|
private String getVoicePath() {
|
|
|
- return MyFileUtils.getCacheDir(getContext()) + File.separator + "wav-accompany" + ".wav";
|
|
|
+ return MyFileUtils.getRecordFilePath();
|
|
|
}
|
|
|
|
|
|
/**
|
|
@@ -1466,6 +1466,8 @@ public class AccompanyFragment extends BaseMVPFragment<FragmentAccompanyBinding,
|
|
|
* @param url
|
|
|
*/
|
|
|
void onDownloadAccompaniment(String url);
|
|
|
+
|
|
|
+ void openAdjustRecording(String recordId, String title, String coverImg);
|
|
|
}
|
|
|
|
|
|
public WebViewListener listener;
|
|
@@ -1492,7 +1494,7 @@ public class AccompanyFragment extends BaseMVPFragment<FragmentAccompanyBinding,
|
|
|
String videoUrl = content.getString("video");
|
|
|
/*增加自定义按钮的分享面板*/
|
|
|
mShareAction = new ShareAction(getActivity()).setDisplayList(
|
|
|
- SHARE_MEDIA.WEIXIN, SHARE_MEDIA.WEIXIN_CIRCLE, SHARE_MEDIA.SINA)
|
|
|
+ SHARE_MEDIA.WEIXIN, SHARE_MEDIA.WEIXIN_CIRCLE, SHARE_MEDIA.SINA)
|
|
|
.setShareboardclickCallback(new ShareBoardlistener() {
|
|
|
@Override
|
|
|
public void onclick(SnsPlatform snsPlatform, SHARE_MEDIA share_media) {
|
|
@@ -1512,7 +1514,7 @@ public class AccompanyFragment extends BaseMVPFragment<FragmentAccompanyBinding,
|
|
|
String shareUrl = WebParamsUtils.getShareUrl(content);
|
|
|
/*增加自定义按钮的分享面板*/
|
|
|
mShareAction = new ShareAction(getActivity()).setDisplayList(
|
|
|
- SHARE_MEDIA.WEIXIN, SHARE_MEDIA.WEIXIN_CIRCLE, SHARE_MEDIA.SINA)
|
|
|
+ SHARE_MEDIA.WEIXIN, SHARE_MEDIA.WEIXIN_CIRCLE, SHARE_MEDIA.SINA)
|
|
|
.setShareboardclickCallback(new ShareBoardlistener() {
|
|
|
@Override
|
|
|
public void onclick(SnsPlatform snsPlatform, SHARE_MEDIA share_media) {
|
|
@@ -2060,6 +2062,25 @@ public class AccompanyFragment extends BaseMVPFragment<FragmentAccompanyBinding,
|
|
|
}
|
|
|
}
|
|
|
|
|
|
+ @Override
|
|
|
+ public void openAdjustRecording(JSONObject message) {
|
|
|
+ if (message != null) {
|
|
|
+ JSONObject contentJson = message.optJSONObject("content");
|
|
|
+ if (contentJson != null) {
|
|
|
+ int recordIdByInt = contentJson.optInt("recordId", -1);
|
|
|
+ String recordId = String.valueOf(recordIdByInt);
|
|
|
+ if (recordIdByInt == -1) {
|
|
|
+ recordId = contentJson.optString("recordId");
|
|
|
+ }
|
|
|
+ String title = contentJson.optString("title");
|
|
|
+ String coverImg = contentJson.optString("coverImg");
|
|
|
+ if (onAccompanyListener != null) {
|
|
|
+ onAccompanyListener.openAdjustRecording(recordId, title, coverImg);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
private void handleCloudFollow(String mode) {
|
|
|
if (mMusicTunerHelper == null) {
|
|
|
mMusicTunerHelper = new MusicTunerHelper(new MusicTunerHelper.OnEventListener() {
|
|
@@ -2271,61 +2292,61 @@ public class AccompanyFragment extends BaseMVPFragment<FragmentAccompanyBinding,
|
|
|
//弹出预备拍
|
|
|
preCountDialog = CommonDialog.init().setLayoutId(com.cooleshow.base.R.layout.dialog_student_precount);
|
|
|
preCountDialog.setConvertListener(new ViewConvertListener() {
|
|
|
- @Override
|
|
|
- public void convertView(ViewHolder holder, BaseDialog dialog) {
|
|
|
- if (count == 2) {
|
|
|
- iv1 = holder.getView(com.cooleshow.base.R.id.iv_3);
|
|
|
- iv2 = holder.getView(com.cooleshow.base.R.id.iv_4);
|
|
|
- iv3 = holder.getView(com.cooleshow.base.R.id.iv_1);
|
|
|
- iv4 = holder.getView(com.cooleshow.base.R.id.iv_2);
|
|
|
- iv5 = holder.getView(com.cooleshow.base.R.id.iv_5);
|
|
|
- iv6 = holder.getView(com.cooleshow.base.R.id.iv_6);
|
|
|
- iv3.setVisibility(View.INVISIBLE);
|
|
|
- iv4.setVisibility(View.INVISIBLE);
|
|
|
- iv5.setVisibility(View.INVISIBLE);
|
|
|
- iv6.setVisibility(View.INVISIBLE);
|
|
|
- } else if (count == 3) {
|
|
|
- iv1 = holder.getView(com.cooleshow.base.R.id.iv_3);
|
|
|
- iv2 = holder.getView(com.cooleshow.base.R.id.iv_4);
|
|
|
- iv3 = holder.getView(com.cooleshow.base.R.id.iv_5);
|
|
|
- iv4 = holder.getView(com.cooleshow.base.R.id.iv_2);
|
|
|
- iv5 = holder.getView(com.cooleshow.base.R.id.iv_1);
|
|
|
- iv6 = holder.getView(com.cooleshow.base.R.id.iv_6);
|
|
|
- iv4.setVisibility(View.INVISIBLE);
|
|
|
- iv5.setVisibility(View.INVISIBLE);
|
|
|
- iv6.setVisibility(View.INVISIBLE);
|
|
|
- } else if (count == 4) {
|
|
|
- iv1 = holder.getView(com.cooleshow.base.R.id.iv_2);
|
|
|
- iv2 = holder.getView(com.cooleshow.base.R.id.iv_3);
|
|
|
- iv3 = holder.getView(com.cooleshow.base.R.id.iv_4);
|
|
|
- iv4 = holder.getView(com.cooleshow.base.R.id.iv_5);
|
|
|
- iv5 = holder.getView(com.cooleshow.base.R.id.iv_1);
|
|
|
- iv6 = holder.getView(com.cooleshow.base.R.id.iv_6);
|
|
|
- iv5.setVisibility(View.INVISIBLE);
|
|
|
- iv6.setVisibility(View.INVISIBLE);
|
|
|
- }
|
|
|
- iv1.setImageResource(com.cooleshow.base.R.drawable.bg_play_metronome_gray_dots_shape);
|
|
|
- iv2.setImageResource(com.cooleshow.base.R.drawable.bg_play_metronome_gray_dots_shape);
|
|
|
- iv3.setImageResource(com.cooleshow.base.R.drawable.bg_play_metronome_gray_dots_shape);
|
|
|
- iv4.setImageResource(com.cooleshow.base.R.drawable.bg_play_metronome_gray_dots_shape);
|
|
|
- iv5.setImageResource(com.cooleshow.base.R.drawable.bg_play_metronome_gray_dots_shape);
|
|
|
- iv6.setImageResource(com.cooleshow.base.R.drawable.bg_play_metronome_gray_dots_shape);
|
|
|
- ImageView iv_dialog_student_precount = holder.getView(com.cooleshow.base.R.id.iv_dialog_student_precount);
|
|
|
- iv_dialog_student_precount.setOnClickListener(v -> {
|
|
|
- dialog.dismiss();
|
|
|
- if (soundpool != null) {
|
|
|
- soundpool.release();
|
|
|
- soundpool = null;
|
|
|
- }
|
|
|
- try {
|
|
|
- message.optJSONObject("content").put("status", "cancel");
|
|
|
- } catch (JSONException e) {
|
|
|
- e.printStackTrace();
|
|
|
+ @Override
|
|
|
+ public void convertView(ViewHolder holder, BaseDialog dialog) {
|
|
|
+ if (count == 2) {
|
|
|
+ iv1 = holder.getView(com.cooleshow.base.R.id.iv_3);
|
|
|
+ iv2 = holder.getView(com.cooleshow.base.R.id.iv_4);
|
|
|
+ iv3 = holder.getView(com.cooleshow.base.R.id.iv_1);
|
|
|
+ iv4 = holder.getView(com.cooleshow.base.R.id.iv_2);
|
|
|
+ iv5 = holder.getView(com.cooleshow.base.R.id.iv_5);
|
|
|
+ iv6 = holder.getView(com.cooleshow.base.R.id.iv_6);
|
|
|
+ iv3.setVisibility(View.INVISIBLE);
|
|
|
+ iv4.setVisibility(View.INVISIBLE);
|
|
|
+ iv5.setVisibility(View.INVISIBLE);
|
|
|
+ iv6.setVisibility(View.INVISIBLE);
|
|
|
+ } else if (count == 3) {
|
|
|
+ iv1 = holder.getView(com.cooleshow.base.R.id.iv_3);
|
|
|
+ iv2 = holder.getView(com.cooleshow.base.R.id.iv_4);
|
|
|
+ iv3 = holder.getView(com.cooleshow.base.R.id.iv_5);
|
|
|
+ iv4 = holder.getView(com.cooleshow.base.R.id.iv_2);
|
|
|
+ iv5 = holder.getView(com.cooleshow.base.R.id.iv_1);
|
|
|
+ iv6 = holder.getView(com.cooleshow.base.R.id.iv_6);
|
|
|
+ iv4.setVisibility(View.INVISIBLE);
|
|
|
+ iv5.setVisibility(View.INVISIBLE);
|
|
|
+ iv6.setVisibility(View.INVISIBLE);
|
|
|
+ } else if (count == 4) {
|
|
|
+ iv1 = holder.getView(com.cooleshow.base.R.id.iv_2);
|
|
|
+ iv2 = holder.getView(com.cooleshow.base.R.id.iv_3);
|
|
|
+ iv3 = holder.getView(com.cooleshow.base.R.id.iv_4);
|
|
|
+ iv4 = holder.getView(com.cooleshow.base.R.id.iv_5);
|
|
|
+ iv5 = holder.getView(com.cooleshow.base.R.id.iv_1);
|
|
|
+ iv6 = holder.getView(com.cooleshow.base.R.id.iv_6);
|
|
|
+ iv5.setVisibility(View.INVISIBLE);
|
|
|
+ iv6.setVisibility(View.INVISIBLE);
|
|
|
+ }
|
|
|
+ iv1.setImageResource(com.cooleshow.base.R.drawable.bg_play_metronome_gray_dots_shape);
|
|
|
+ iv2.setImageResource(com.cooleshow.base.R.drawable.bg_play_metronome_gray_dots_shape);
|
|
|
+ iv3.setImageResource(com.cooleshow.base.R.drawable.bg_play_metronome_gray_dots_shape);
|
|
|
+ iv4.setImageResource(com.cooleshow.base.R.drawable.bg_play_metronome_gray_dots_shape);
|
|
|
+ iv5.setImageResource(com.cooleshow.base.R.drawable.bg_play_metronome_gray_dots_shape);
|
|
|
+ iv6.setImageResource(com.cooleshow.base.R.drawable.bg_play_metronome_gray_dots_shape);
|
|
|
+ ImageView iv_dialog_student_precount = holder.getView(com.cooleshow.base.R.id.iv_dialog_student_precount);
|
|
|
+ iv_dialog_student_precount.setOnClickListener(v -> {
|
|
|
+ dialog.dismiss();
|
|
|
+ if (soundpool != null) {
|
|
|
+ soundpool.release();
|
|
|
+ soundpool = null;
|
|
|
+ }
|
|
|
+ try {
|
|
|
+ message.optJSONObject("content").put("status", "cancel");
|
|
|
+ } catch (JSONException e) {
|
|
|
+ e.printStackTrace();
|
|
|
+ }
|
|
|
+ onSendMessage(message.toString());
|
|
|
+ });
|
|
|
}
|
|
|
- onSendMessage(message.toString());
|
|
|
- });
|
|
|
- }
|
|
|
- })
|
|
|
+ })
|
|
|
.setDimAmount(0.6f)
|
|
|
.setOutCancel(true)
|
|
|
.setGravity(Gravity.CENTER)
|