Initial commit
This commit is contained in:
104
sources/com/hw/videoprocessor/AudioProcessThread.java
Normal file
104
sources/com/hw/videoprocessor/AudioProcessThread.java
Normal file
@@ -0,0 +1,104 @@
|
||||
package com.hw.videoprocessor;
|
||||
|
||||
import android.content.Context;
|
||||
import android.media.MediaExtractor;
|
||||
import android.media.MediaFormat;
|
||||
import android.media.MediaMuxer;
|
||||
import com.baidu.cloud.media.player.misc.IMediaFormat;
|
||||
import com.hw.videoprocessor.util.AudioUtil;
|
||||
import com.hw.videoprocessor.util.CL;
|
||||
import com.hw.videoprocessor.util.VideoProgressAve;
|
||||
import com.hw.videoprocessor.util.VideoProgressListener;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
/* loaded from: classes.dex */
|
||||
public class AudioProcessThread extends Thread implements VideoProgressListener {
|
||||
private String a;
|
||||
private Integer b;
|
||||
private Integer c;
|
||||
private Float d;
|
||||
private Context e;
|
||||
private Exception f;
|
||||
private MediaMuxer g;
|
||||
private int h;
|
||||
private MediaExtractor i;
|
||||
private CountDownLatch j;
|
||||
private VideoProgressAve k;
|
||||
|
||||
public AudioProcessThread(Context context, String str, MediaMuxer mediaMuxer, Integer num, Integer num2, Float f, int i, CountDownLatch countDownLatch) {
|
||||
super("VideoProcessDecodeThread");
|
||||
this.a = str;
|
||||
this.b = num;
|
||||
this.c = num2;
|
||||
this.d = f;
|
||||
this.g = mediaMuxer;
|
||||
this.e = context;
|
||||
this.h = i;
|
||||
this.i = new MediaExtractor();
|
||||
this.j = countDownLatch;
|
||||
}
|
||||
|
||||
private void b() throws Exception {
|
||||
this.i.setDataSource(this.a);
|
||||
int a = VideoUtil.a(this.i, true);
|
||||
if (a >= 0) {
|
||||
this.i.selectTrack(a);
|
||||
MediaFormat trackFormat = this.i.getTrackFormat(a);
|
||||
String string = trackFormat.containsKey(IMediaFormat.KEY_MIME) ? trackFormat.getString(IMediaFormat.KEY_MIME) : "audio/mp4a-latm";
|
||||
Integer num = this.b;
|
||||
Integer valueOf = num == null ? null : Integer.valueOf(num.intValue() * 1000);
|
||||
Integer num2 = this.c;
|
||||
Integer valueOf2 = num2 != null ? Integer.valueOf(num2.intValue() * 1000) : null;
|
||||
if (!this.j.await(3L, TimeUnit.SECONDS)) {
|
||||
throw new TimeoutException("wait muxerStartLatch timeout!");
|
||||
}
|
||||
if (this.d == null && string.equals("audio/mp4a-latm")) {
|
||||
AudioUtil.a(this.i, this.g, this.h, valueOf, valueOf2, this);
|
||||
} else {
|
||||
Context context = this.e;
|
||||
MediaExtractor mediaExtractor = this.i;
|
||||
MediaMuxer mediaMuxer = this.g;
|
||||
int i = this.h;
|
||||
Float f = this.d;
|
||||
AudioUtil.a(context, mediaExtractor, mediaMuxer, i, valueOf, valueOf2, Float.valueOf(f == null ? 1.0f : f.floatValue()), this);
|
||||
}
|
||||
}
|
||||
VideoProgressAve videoProgressAve = this.k;
|
||||
if (videoProgressAve != null) {
|
||||
videoProgressAve.a(1.0f);
|
||||
}
|
||||
}
|
||||
|
||||
public Exception a() {
|
||||
return this.f;
|
||||
}
|
||||
|
||||
@Override // com.hw.videoprocessor.util.VideoProgressListener
|
||||
public void onProgress(float f) {
|
||||
VideoProgressAve videoProgressAve = this.k;
|
||||
if (videoProgressAve != null) {
|
||||
videoProgressAve.a(f);
|
||||
}
|
||||
}
|
||||
|
||||
@Override // java.lang.Thread, java.lang.Runnable
|
||||
public void run() {
|
||||
super.run();
|
||||
try {
|
||||
try {
|
||||
b();
|
||||
} catch (Exception e) {
|
||||
this.f = e;
|
||||
CL.a(e);
|
||||
}
|
||||
} finally {
|
||||
this.i.release();
|
||||
}
|
||||
}
|
||||
|
||||
public void a(VideoProgressAve videoProgressAve) {
|
||||
this.k = videoProgressAve;
|
||||
}
|
||||
}
|
11
sources/com/hw/videoprocessor/IVideoEncodeThread.java
Normal file
11
sources/com/hw/videoprocessor/IVideoEncodeThread.java
Normal file
@@ -0,0 +1,11 @@
|
||||
package com.hw.videoprocessor;
|
||||
|
||||
import android.view.Surface;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
/* loaded from: classes.dex */
|
||||
public interface IVideoEncodeThread {
|
||||
CountDownLatch a();
|
||||
|
||||
Surface b();
|
||||
}
|
180
sources/com/hw/videoprocessor/VideoDecodeThread.java
Normal file
180
sources/com/hw/videoprocessor/VideoDecodeThread.java
Normal file
@@ -0,0 +1,180 @@
|
||||
package com.hw.videoprocessor;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaExtractor;
|
||||
import com.hw.videoprocessor.util.FrameDropper;
|
||||
import com.hw.videoprocessor.util.InputSurface;
|
||||
import com.hw.videoprocessor.util.OutputSurface;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
/* loaded from: classes.dex */
|
||||
public class VideoDecodeThread extends Thread {
|
||||
private MediaExtractor a;
|
||||
private MediaCodec b;
|
||||
private Integer c;
|
||||
private Integer d;
|
||||
private Float e;
|
||||
private AtomicBoolean f;
|
||||
private Exception g;
|
||||
private int h;
|
||||
private IVideoEncodeThread i;
|
||||
private InputSurface j;
|
||||
private OutputSurface k;
|
||||
private Integer l;
|
||||
private Integer m;
|
||||
private boolean n;
|
||||
private FrameDropper o;
|
||||
|
||||
public VideoDecodeThread(IVideoEncodeThread iVideoEncodeThread, MediaExtractor mediaExtractor, Integer num, Integer num2, Integer num3, Integer num4, Float f, boolean z, int i, AtomicBoolean atomicBoolean) {
|
||||
super("VideoProcessDecodeThread");
|
||||
this.a = mediaExtractor;
|
||||
this.c = num;
|
||||
this.d = num2;
|
||||
this.e = f;
|
||||
this.h = i;
|
||||
this.f = atomicBoolean;
|
||||
this.i = iVideoEncodeThread;
|
||||
this.l = num4;
|
||||
this.m = num3;
|
||||
this.n = z;
|
||||
}
|
||||
|
||||
/* JADX WARN: Code restructure failed: missing block: B:88:0x02f5, code lost:
|
||||
|
||||
r0 = r3;
|
||||
*/
|
||||
/* JADX WARN: Multi-variable type inference failed */
|
||||
/* JADX WARN: Removed duplicated region for block: B:35:0x0129 */
|
||||
/* JADX WARN: Type inference failed for: r6v23 */
|
||||
/* JADX WARN: Type inference failed for: r6v9 */
|
||||
/*
|
||||
Code decompiled incorrectly, please refer to instructions dump.
|
||||
To view partially-correct code enable 'Show inconsistent code' option in preferences
|
||||
*/
|
||||
private void b() throws java.io.IOException {
|
||||
/*
|
||||
Method dump skipped, instructions count: 776
|
||||
To view this dump change 'Code comments level' option to 'DEBUG'
|
||||
*/
|
||||
throw new UnsupportedOperationException("Method not decompiled: com.hw.videoprocessor.VideoDecodeThread.b():void");
|
||||
}
|
||||
|
||||
public Exception a() {
|
||||
return this.g;
|
||||
}
|
||||
|
||||
/* JADX WARN: Code restructure failed: missing block: B:20:0x0026, code lost:
|
||||
|
||||
if (r1 != null) goto L18;
|
||||
*/
|
||||
/* JADX WARN: Code restructure failed: missing block: B:21:0x0028, code lost:
|
||||
|
||||
r1 = r0;
|
||||
*/
|
||||
/* JADX WARN: Code restructure failed: missing block: B:22:0x0029, code lost:
|
||||
|
||||
r3.g = r1;
|
||||
com.hw.videoprocessor.util.CL.a(r0);
|
||||
*/
|
||||
/* JADX WARN: Code restructure failed: missing block: B:23:?, code lost:
|
||||
|
||||
return;
|
||||
*/
|
||||
/* JADX WARN: Code restructure failed: missing block: B:42:0x0057, code lost:
|
||||
|
||||
if (r1 != null) goto L18;
|
||||
*/
|
||||
@Override // java.lang.Thread, java.lang.Runnable
|
||||
/*
|
||||
Code decompiled incorrectly, please refer to instructions dump.
|
||||
To view partially-correct code enable 'Show inconsistent code' option in preferences
|
||||
*/
|
||||
public void run() {
|
||||
/*
|
||||
r3 = this;
|
||||
super.run()
|
||||
r3.b() // Catch: java.lang.Throwable -> L2f java.lang.Exception -> L31
|
||||
com.hw.videoprocessor.util.InputSurface r0 = r3.j
|
||||
if (r0 == 0) goto Ld
|
||||
r0.b()
|
||||
Ld:
|
||||
com.hw.videoprocessor.util.OutputSurface r0 = r3.k
|
||||
if (r0 == 0) goto L14
|
||||
r0.c()
|
||||
L14:
|
||||
android.media.MediaCodec r0 = r3.b // Catch: java.lang.Exception -> L23
|
||||
if (r0 == 0) goto L5a
|
||||
android.media.MediaCodec r0 = r3.b // Catch: java.lang.Exception -> L23
|
||||
r0.stop() // Catch: java.lang.Exception -> L23
|
||||
android.media.MediaCodec r0 = r3.b // Catch: java.lang.Exception -> L23
|
||||
r0.release() // Catch: java.lang.Exception -> L23
|
||||
goto L5a
|
||||
L23:
|
||||
r0 = move-exception
|
||||
java.lang.Exception r1 = r3.g
|
||||
if (r1 != 0) goto L29
|
||||
L28:
|
||||
r1 = r0
|
||||
L29:
|
||||
r3.g = r1
|
||||
com.hw.videoprocessor.util.CL.a(r0)
|
||||
goto L5a
|
||||
L2f:
|
||||
r0 = move-exception
|
||||
goto L5b
|
||||
L31:
|
||||
r0 = move-exception
|
||||
r3.g = r0 // Catch: java.lang.Throwable -> L2f
|
||||
com.hw.videoprocessor.util.CL.a(r0) // Catch: java.lang.Throwable -> L2f
|
||||
com.hw.videoprocessor.util.InputSurface r0 = r3.j
|
||||
if (r0 == 0) goto L3e
|
||||
r0.b()
|
||||
L3e:
|
||||
com.hw.videoprocessor.util.OutputSurface r0 = r3.k
|
||||
if (r0 == 0) goto L45
|
||||
r0.c()
|
||||
L45:
|
||||
android.media.MediaCodec r0 = r3.b // Catch: java.lang.Exception -> L54
|
||||
if (r0 == 0) goto L5a
|
||||
android.media.MediaCodec r0 = r3.b // Catch: java.lang.Exception -> L54
|
||||
r0.stop() // Catch: java.lang.Exception -> L54
|
||||
android.media.MediaCodec r0 = r3.b // Catch: java.lang.Exception -> L54
|
||||
r0.release() // Catch: java.lang.Exception -> L54
|
||||
goto L5a
|
||||
L54:
|
||||
r0 = move-exception
|
||||
java.lang.Exception r1 = r3.g
|
||||
if (r1 != 0) goto L29
|
||||
goto L28
|
||||
L5a:
|
||||
return
|
||||
L5b:
|
||||
com.hw.videoprocessor.util.InputSurface r1 = r3.j
|
||||
if (r1 == 0) goto L62
|
||||
r1.b()
|
||||
L62:
|
||||
com.hw.videoprocessor.util.OutputSurface r1 = r3.k
|
||||
if (r1 == 0) goto L69
|
||||
r1.c()
|
||||
L69:
|
||||
android.media.MediaCodec r1 = r3.b // Catch: java.lang.Exception -> L78
|
||||
if (r1 == 0) goto L83
|
||||
android.media.MediaCodec r1 = r3.b // Catch: java.lang.Exception -> L78
|
||||
r1.stop() // Catch: java.lang.Exception -> L78
|
||||
android.media.MediaCodec r1 = r3.b // Catch: java.lang.Exception -> L78
|
||||
r1.release() // Catch: java.lang.Exception -> L78
|
||||
goto L83
|
||||
L78:
|
||||
r1 = move-exception
|
||||
java.lang.Exception r2 = r3.g
|
||||
if (r2 != 0) goto L7e
|
||||
r2 = r1
|
||||
L7e:
|
||||
r3.g = r2
|
||||
com.hw.videoprocessor.util.CL.a(r1)
|
||||
L83:
|
||||
throw r0
|
||||
*/
|
||||
throw new UnsupportedOperationException("Method not decompiled: com.hw.videoprocessor.VideoDecodeThread.run():void");
|
||||
}
|
||||
}
|
265
sources/com/hw/videoprocessor/VideoEncodeThread.java
Normal file
265
sources/com/hw/videoprocessor/VideoEncodeThread.java
Normal file
@@ -0,0 +1,265 @@
|
||||
package com.hw.videoprocessor;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaCrypto;
|
||||
import android.media.MediaExtractor;
|
||||
import android.media.MediaFormat;
|
||||
import android.media.MediaMuxer;
|
||||
import android.view.Surface;
|
||||
import com.hw.videoprocessor.util.CL;
|
||||
import com.hw.videoprocessor.util.VideoProgressAve;
|
||||
import com.ijm.dataencryption.de.DataDecryptTool;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
/* loaded from: classes.dex */
|
||||
public class VideoEncodeThread extends Thread implements IVideoEncodeThread {
|
||||
private MediaCodec a;
|
||||
private MediaMuxer b;
|
||||
private AtomicBoolean c;
|
||||
private CountDownLatch d;
|
||||
private Exception e;
|
||||
private int f;
|
||||
private int g;
|
||||
private int h;
|
||||
private int i;
|
||||
private int j;
|
||||
private MediaExtractor k;
|
||||
private int l;
|
||||
private volatile CountDownLatch m;
|
||||
private volatile Surface n;
|
||||
private VideoProgressAve o;
|
||||
|
||||
public VideoEncodeThread(MediaExtractor mediaExtractor, MediaMuxer mediaMuxer, int i, int i2, int i3, int i4, int i5, int i6, AtomicBoolean atomicBoolean, CountDownLatch countDownLatch) {
|
||||
super("VideoProcessEncodeThread");
|
||||
this.b = mediaMuxer;
|
||||
this.c = atomicBoolean;
|
||||
this.d = countDownLatch;
|
||||
this.k = mediaExtractor;
|
||||
this.f = i;
|
||||
this.h = i3;
|
||||
this.g = i2;
|
||||
this.i = i4;
|
||||
this.l = i6;
|
||||
this.j = i5;
|
||||
this.m = new CountDownLatch(1);
|
||||
}
|
||||
|
||||
private void a(MediaCodec.BufferInfo bufferInfo) {
|
||||
VideoProgressAve videoProgressAve = this.o;
|
||||
if (videoProgressAve == null) {
|
||||
return;
|
||||
}
|
||||
videoProgressAve.a((bufferInfo.flags & 4) > 0 ? Long.MAX_VALUE : bufferInfo.presentationTimeUs);
|
||||
}
|
||||
|
||||
private void d() throws IOException {
|
||||
boolean z;
|
||||
MediaFormat trackFormat = this.k.getTrackFormat(this.l);
|
||||
int i = this.j;
|
||||
if (i <= 0) {
|
||||
i = trackFormat.containsKey("frame-rate") ? trackFormat.getInteger("frame-rate") : VideoProcessor.a;
|
||||
}
|
||||
MediaFormat createVideoFormat = MediaFormat.createVideoFormat("video/avc", this.g, this.h);
|
||||
createVideoFormat.setInteger("color-format", 2130708361);
|
||||
createVideoFormat.setInteger("frame-rate", i);
|
||||
createVideoFormat.setInteger("i-frame-interval", this.i);
|
||||
this.a = MediaCodec.createEncoderByType("video/avc");
|
||||
if (VideoUtil.a(this.a, "video/avc", createVideoFormat, 8, DataDecryptTool.DECRYPT_DB_FILE)) {
|
||||
CL.c("supportProfileHigh,enable ProfileHigh", new Object[0]);
|
||||
}
|
||||
int a = VideoUtil.a(this.a, "video/avc");
|
||||
if (a > 0 && this.f > a) {
|
||||
CL.a(this.f + " bitrate too large,set to:" + a, new Object[0]);
|
||||
this.f = (int) (((float) a) * 0.8f);
|
||||
}
|
||||
createVideoFormat.setInteger("bitrate", this.f);
|
||||
int i2 = 1;
|
||||
this.a.configure(createVideoFormat, (Surface) null, (MediaCrypto) null, 1);
|
||||
this.n = this.a.createInputSurface();
|
||||
this.a.start();
|
||||
this.m.countDown();
|
||||
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
|
||||
int i3 = (int) (1000000.0f / i);
|
||||
int i4 = -5;
|
||||
boolean z2 = false;
|
||||
int i5 = 0;
|
||||
int i6 = -5;
|
||||
boolean z3 = false;
|
||||
long j = -1;
|
||||
while (true) {
|
||||
if (this.c.get() && !z2) {
|
||||
this.a.signalEndOfInputStream();
|
||||
z2 = true;
|
||||
}
|
||||
int dequeueOutputBuffer = this.a.dequeueOutputBuffer(bufferInfo, 2500L);
|
||||
CL.c("encode outputBufferIndex = " + dequeueOutputBuffer, new Object[0]);
|
||||
if (z2 && dequeueOutputBuffer == -1) {
|
||||
i5 += i2;
|
||||
if (i5 > 10) {
|
||||
CL.a("INFO_TRY_AGAIN_LATER 10 times,force End!", new Object[0]);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
i5 = 0;
|
||||
}
|
||||
if (dequeueOutputBuffer != -1) {
|
||||
if (dequeueOutputBuffer == -2) {
|
||||
MediaFormat outputFormat = this.a.getOutputFormat();
|
||||
if (i6 == i4) {
|
||||
i6 = this.b.addTrack(outputFormat);
|
||||
this.b.start();
|
||||
this.d.countDown();
|
||||
}
|
||||
CL.c("encode newFormat = " + outputFormat, new Object[0]);
|
||||
} else if (dequeueOutputBuffer < 0) {
|
||||
CL.a("unexpected result from decoder.dequeueOutputBuffer: " + dequeueOutputBuffer, new Object[0]);
|
||||
} else {
|
||||
ByteBuffer outputBuffer = this.a.getOutputBuffer(dequeueOutputBuffer);
|
||||
z = z2;
|
||||
if (bufferInfo.flags == 4 && bufferInfo.presentationTimeUs < 0) {
|
||||
bufferInfo.presentationTimeUs = 0L;
|
||||
}
|
||||
if (!z3 && j != -1 && bufferInfo.presentationTimeUs < (i3 / 2) + j) {
|
||||
CL.a("video 时间戳错误,lastVideoFrameTimeUs:" + j + " info.presentationTimeUs:" + bufferInfo.presentationTimeUs + " VIDEO_FRAME_TIME_US:" + i3, new Object[0]);
|
||||
z3 = true;
|
||||
}
|
||||
if (z3) {
|
||||
bufferInfo.presentationTimeUs = i3 + j;
|
||||
CL.a("video 时间戳错误,使用修正的时间戳:" + bufferInfo.presentationTimeUs, new Object[0]);
|
||||
z3 = false;
|
||||
}
|
||||
if (bufferInfo.flags != 2) {
|
||||
j = bufferInfo.presentationTimeUs;
|
||||
}
|
||||
CL.c("writeSampleData,size:" + bufferInfo.size + " time:" + (bufferInfo.presentationTimeUs / 1000), new Object[0]);
|
||||
this.b.writeSampleData(i6, outputBuffer, bufferInfo);
|
||||
a(bufferInfo);
|
||||
this.a.releaseOutputBuffer(dequeueOutputBuffer, false);
|
||||
if (bufferInfo.flags == 4) {
|
||||
CL.c("encoderDone", new Object[0]);
|
||||
return;
|
||||
}
|
||||
z2 = z;
|
||||
i2 = 1;
|
||||
i4 = -5;
|
||||
}
|
||||
z = z2;
|
||||
z2 = z;
|
||||
i2 = 1;
|
||||
i4 = -5;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override // com.hw.videoprocessor.IVideoEncodeThread
|
||||
public Surface b() {
|
||||
return this.n;
|
||||
}
|
||||
|
||||
public Exception c() {
|
||||
return this.e;
|
||||
}
|
||||
|
||||
/* JADX WARN: Code restructure failed: missing block: B:14:0x0018, code lost:
|
||||
|
||||
if (r1 != null) goto L12;
|
||||
*/
|
||||
/* JADX WARN: Code restructure failed: missing block: B:15:0x001a, code lost:
|
||||
|
||||
r1 = r0;
|
||||
*/
|
||||
/* JADX WARN: Code restructure failed: missing block: B:16:0x001b, code lost:
|
||||
|
||||
r3.e = r1;
|
||||
com.hw.videoprocessor.util.CL.a(r0);
|
||||
*/
|
||||
/* JADX WARN: Code restructure failed: missing block: B:17:?, code lost:
|
||||
|
||||
return;
|
||||
*/
|
||||
/* JADX WARN: Code restructure failed: missing block: B:42:0x003b, code lost:
|
||||
|
||||
if (r1 != null) goto L12;
|
||||
*/
|
||||
@Override // java.lang.Thread, java.lang.Runnable
|
||||
/*
|
||||
Code decompiled incorrectly, please refer to instructions dump.
|
||||
To view partially-correct code enable 'Show inconsistent code' option in preferences
|
||||
*/
|
||||
public void run() {
|
||||
/*
|
||||
r3 = this;
|
||||
super.run()
|
||||
r3.d() // Catch: java.lang.Throwable -> L21 java.lang.Exception -> L23
|
||||
android.media.MediaCodec r0 = r3.a // Catch: java.lang.Exception -> L15
|
||||
if (r0 == 0) goto L3e
|
||||
android.media.MediaCodec r0 = r3.a // Catch: java.lang.Exception -> L15
|
||||
r0.stop() // Catch: java.lang.Exception -> L15
|
||||
android.media.MediaCodec r0 = r3.a // Catch: java.lang.Exception -> L15
|
||||
r0.release() // Catch: java.lang.Exception -> L15
|
||||
goto L3e
|
||||
L15:
|
||||
r0 = move-exception
|
||||
java.lang.Exception r1 = r3.e
|
||||
if (r1 != 0) goto L1b
|
||||
L1a:
|
||||
r1 = r0
|
||||
L1b:
|
||||
r3.e = r1
|
||||
com.hw.videoprocessor.util.CL.a(r0)
|
||||
goto L3e
|
||||
L21:
|
||||
r0 = move-exception
|
||||
goto L3f
|
||||
L23:
|
||||
r0 = move-exception
|
||||
com.hw.videoprocessor.util.CL.a(r0) // Catch: java.lang.Throwable -> L21
|
||||
r3.e = r0 // Catch: java.lang.Throwable -> L21
|
||||
android.media.MediaCodec r0 = r3.a // Catch: java.lang.Exception -> L38
|
||||
if (r0 == 0) goto L3e
|
||||
android.media.MediaCodec r0 = r3.a // Catch: java.lang.Exception -> L38
|
||||
r0.stop() // Catch: java.lang.Exception -> L38
|
||||
android.media.MediaCodec r0 = r3.a // Catch: java.lang.Exception -> L38
|
||||
r0.release() // Catch: java.lang.Exception -> L38
|
||||
goto L3e
|
||||
L38:
|
||||
r0 = move-exception
|
||||
java.lang.Exception r1 = r3.e
|
||||
if (r1 != 0) goto L1b
|
||||
goto L1a
|
||||
L3e:
|
||||
return
|
||||
L3f:
|
||||
android.media.MediaCodec r1 = r3.a // Catch: java.lang.Exception -> L4e
|
||||
if (r1 == 0) goto L59
|
||||
android.media.MediaCodec r1 = r3.a // Catch: java.lang.Exception -> L4e
|
||||
r1.stop() // Catch: java.lang.Exception -> L4e
|
||||
android.media.MediaCodec r1 = r3.a // Catch: java.lang.Exception -> L4e
|
||||
r1.release() // Catch: java.lang.Exception -> L4e
|
||||
goto L59
|
||||
L4e:
|
||||
r1 = move-exception
|
||||
java.lang.Exception r2 = r3.e
|
||||
if (r2 != 0) goto L54
|
||||
r2 = r1
|
||||
L54:
|
||||
r3.e = r2
|
||||
com.hw.videoprocessor.util.CL.a(r1)
|
||||
L59:
|
||||
throw r0
|
||||
*/
|
||||
throw new UnsupportedOperationException("Method not decompiled: com.hw.videoprocessor.VideoEncodeThread.run():void");
|
||||
}
|
||||
|
||||
@Override // com.hw.videoprocessor.IVideoEncodeThread
|
||||
public CountDownLatch a() {
|
||||
return this.m;
|
||||
}
|
||||
|
||||
public void a(VideoProgressAve videoProgressAve) {
|
||||
this.o = videoProgressAve;
|
||||
}
|
||||
}
|
221
sources/com/hw/videoprocessor/VideoProcessor.java
Normal file
221
sources/com/hw/videoprocessor/VideoProcessor.java
Normal file
@@ -0,0 +1,221 @@
|
||||
package com.hw.videoprocessor;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Context;
|
||||
import android.media.MediaExtractor;
|
||||
import android.media.MediaFormat;
|
||||
import android.media.MediaMetadataRetriever;
|
||||
import android.media.MediaMuxer;
|
||||
import com.hw.videoprocessor.util.AudioUtil;
|
||||
import com.hw.videoprocessor.util.CL;
|
||||
import com.hw.videoprocessor.util.VideoProgressAve;
|
||||
import com.hw.videoprocessor.util.VideoProgressListener;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
@TargetApi(21)
|
||||
/* loaded from: classes.dex */
|
||||
public class VideoProcessor {
|
||||
public static int a = 20;
|
||||
|
||||
public static class Processor {
|
||||
private Context a;
|
||||
private String b;
|
||||
private String c;
|
||||
private Integer d;
|
||||
private Integer e;
|
||||
private Integer f;
|
||||
private Integer g;
|
||||
private Float h;
|
||||
private Boolean i;
|
||||
private Integer j;
|
||||
private Integer k;
|
||||
private Integer l;
|
||||
private VideoProgressListener m;
|
||||
private boolean n = true;
|
||||
|
||||
public Processor(Context context) {
|
||||
this.a = context;
|
||||
}
|
||||
|
||||
public Processor a(String str) {
|
||||
this.b = str;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Processor b(String str) {
|
||||
this.c = str;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Processor a(int i) {
|
||||
this.j = Integer.valueOf(i);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Processor a(VideoProgressListener videoProgressListener) {
|
||||
this.m = videoProgressListener;
|
||||
return this;
|
||||
}
|
||||
|
||||
public void a() throws Exception {
|
||||
VideoProcessor.a(this.a, this);
|
||||
}
|
||||
}
|
||||
|
||||
public static void a(Context context, Processor processor) throws Exception {
|
||||
int i;
|
||||
int i2;
|
||||
MediaMuxer mediaMuxer;
|
||||
Integer num;
|
||||
int i3;
|
||||
MediaMuxer mediaMuxer2;
|
||||
long j;
|
||||
MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever();
|
||||
mediaMetadataRetriever.setDataSource(processor.b);
|
||||
int parseInt = Integer.parseInt(mediaMetadataRetriever.extractMetadata(18));
|
||||
int parseInt2 = Integer.parseInt(mediaMetadataRetriever.extractMetadata(19));
|
||||
int parseInt3 = Integer.parseInt(mediaMetadataRetriever.extractMetadata(24));
|
||||
int parseInt4 = Integer.parseInt(mediaMetadataRetriever.extractMetadata(20));
|
||||
int parseInt5 = Integer.parseInt(mediaMetadataRetriever.extractMetadata(9));
|
||||
mediaMetadataRetriever.release();
|
||||
if (processor.j == null) {
|
||||
processor.j = Integer.valueOf(parseInt4);
|
||||
}
|
||||
if (processor.l == null) {
|
||||
processor.l = 1;
|
||||
}
|
||||
if (processor.d != null) {
|
||||
parseInt = processor.d.intValue();
|
||||
}
|
||||
if (processor.e != null) {
|
||||
parseInt2 = processor.e.intValue();
|
||||
}
|
||||
if (parseInt % 2 != 0) {
|
||||
parseInt++;
|
||||
}
|
||||
if (parseInt2 % 2 != 0) {
|
||||
parseInt2++;
|
||||
}
|
||||
if (parseInt3 == 90 || parseInt3 == 270) {
|
||||
i = parseInt;
|
||||
i2 = parseInt2;
|
||||
} else {
|
||||
i2 = parseInt;
|
||||
i = parseInt2;
|
||||
}
|
||||
MediaExtractor mediaExtractor = new MediaExtractor();
|
||||
mediaExtractor.setDataSource(processor.b);
|
||||
int a2 = VideoUtil.a(mediaExtractor, false);
|
||||
int a3 = VideoUtil.a(mediaExtractor, true);
|
||||
MediaMuxer mediaMuxer3 = new MediaMuxer(processor.c, 0);
|
||||
boolean booleanValue = processor.i == null ? true : processor.i.booleanValue();
|
||||
Integer num2 = processor.g;
|
||||
if (a3 >= 0) {
|
||||
MediaFormat trackFormat = mediaExtractor.getTrackFormat(a3);
|
||||
int a4 = AudioUtil.a(trackFormat);
|
||||
int integer = trackFormat.getInteger("channel-count");
|
||||
int integer2 = trackFormat.getInteger("sample-rate");
|
||||
int b = AudioUtil.b(trackFormat);
|
||||
MediaFormat createAudioFormat = MediaFormat.createAudioFormat("audio/mp4a-latm", integer2, integer);
|
||||
createAudioFormat.setInteger("bitrate", a4);
|
||||
createAudioFormat.setInteger("aac-profile", 2);
|
||||
createAudioFormat.setInteger("max-input-size", b);
|
||||
if (!booleanValue) {
|
||||
mediaMuxer2 = mediaMuxer3;
|
||||
long j2 = parseInt5 * 1000;
|
||||
long j3 = trackFormat.getLong("durationUs");
|
||||
if (processor.f != null || processor.g != null || processor.h != null) {
|
||||
if (processor.f != null && processor.g != null) {
|
||||
j2 = (processor.g.intValue() - processor.f.intValue()) * 1000;
|
||||
}
|
||||
long floatValue = processor.h != null ? (long) (j2 / processor.h.floatValue()) : j2;
|
||||
if (floatValue >= j3) {
|
||||
floatValue = j3;
|
||||
}
|
||||
createAudioFormat.setLong("durationUs", floatValue);
|
||||
num2 = Integer.valueOf((processor.f == null ? 0 : processor.f.intValue()) + ((int) (floatValue / 1000)));
|
||||
}
|
||||
} else if (processor.f == null && processor.g == null && processor.h == null) {
|
||||
mediaMuxer2 = mediaMuxer3;
|
||||
} else {
|
||||
long j4 = trackFormat.getLong("durationUs");
|
||||
if (processor.f == null || processor.g == null) {
|
||||
mediaMuxer2 = mediaMuxer3;
|
||||
j = j4;
|
||||
} else {
|
||||
mediaMuxer2 = mediaMuxer3;
|
||||
j = (processor.g.intValue() - processor.f.intValue()) * 1000;
|
||||
}
|
||||
if (processor.h != null) {
|
||||
j = (long) (j / processor.h.floatValue());
|
||||
}
|
||||
createAudioFormat.setLong("durationUs", j);
|
||||
}
|
||||
AudioUtil.a(createAudioFormat, 2, integer2, integer);
|
||||
mediaMuxer = mediaMuxer2;
|
||||
i3 = mediaMuxer.addTrack(createAudioFormat);
|
||||
num = num2;
|
||||
} else {
|
||||
mediaMuxer = mediaMuxer3;
|
||||
num = num2;
|
||||
i3 = 0;
|
||||
}
|
||||
mediaExtractor.selectTrack(a2);
|
||||
if (processor.f != null) {
|
||||
mediaExtractor.seekTo(processor.f.intValue() * 1000, 0);
|
||||
} else {
|
||||
mediaExtractor.seekTo(0L, 0);
|
||||
}
|
||||
VideoProgressAve videoProgressAve = new VideoProgressAve(processor.m);
|
||||
videoProgressAve.a(processor.h);
|
||||
videoProgressAve.b(processor.f == null ? 0 : processor.f.intValue());
|
||||
if (processor.g != null) {
|
||||
parseInt5 = processor.g.intValue();
|
||||
}
|
||||
videoProgressAve.a(parseInt5);
|
||||
AtomicBoolean atomicBoolean = new AtomicBoolean(false);
|
||||
CountDownLatch countDownLatch = new CountDownLatch(1);
|
||||
VideoEncodeThread videoEncodeThread = new VideoEncodeThread(mediaExtractor, mediaMuxer, processor.j.intValue(), i2, i, processor.l.intValue(), processor.k == null ? a : processor.k.intValue(), a2, atomicBoolean, countDownLatch);
|
||||
int b2 = VideoUtil.b(processor.b);
|
||||
if (b2 <= 0) {
|
||||
b2 = (int) Math.ceil(VideoUtil.a(processor.b));
|
||||
}
|
||||
VideoDecodeThread videoDecodeThread = new VideoDecodeThread(videoEncodeThread, mediaExtractor, processor.f, processor.g, Integer.valueOf(b2), Integer.valueOf(processor.k == null ? a : processor.k.intValue()), processor.h, processor.n, a2, atomicBoolean);
|
||||
AudioProcessThread audioProcessThread = new AudioProcessThread(context, processor.b, mediaMuxer, processor.f, num, booleanValue ? processor.h : null, i3, countDownLatch);
|
||||
videoEncodeThread.a(videoProgressAve);
|
||||
audioProcessThread.a(videoProgressAve);
|
||||
videoDecodeThread.start();
|
||||
videoEncodeThread.start();
|
||||
audioProcessThread.start();
|
||||
try {
|
||||
long currentTimeMillis = System.currentTimeMillis();
|
||||
videoDecodeThread.join();
|
||||
videoEncodeThread.join();
|
||||
long currentTimeMillis2 = System.currentTimeMillis();
|
||||
audioProcessThread.join();
|
||||
CL.d(String.format("编解码:%dms,音频:%dms", Long.valueOf(currentTimeMillis2 - currentTimeMillis), Long.valueOf(System.currentTimeMillis() - currentTimeMillis)), new Object[0]);
|
||||
} catch (InterruptedException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
try {
|
||||
mediaMuxer.release();
|
||||
mediaExtractor.release();
|
||||
} catch (Exception e2) {
|
||||
CL.a(e2);
|
||||
}
|
||||
if (videoEncodeThread.c() != null) {
|
||||
throw videoEncodeThread.c();
|
||||
}
|
||||
if (videoDecodeThread.a() != null) {
|
||||
throw videoDecodeThread.a();
|
||||
}
|
||||
if (audioProcessThread.a() != null) {
|
||||
throw audioProcessThread.a();
|
||||
}
|
||||
}
|
||||
|
||||
public static Processor a(Context context) {
|
||||
return new Processor(context);
|
||||
}
|
||||
}
|
83
sources/com/hw/videoprocessor/VideoUtil.java
Normal file
83
sources/com/hw/videoprocessor/VideoUtil.java
Normal file
@@ -0,0 +1,83 @@
|
||||
package com.hw.videoprocessor;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaCodecInfo;
|
||||
import android.media.MediaExtractor;
|
||||
import android.media.MediaFormat;
|
||||
import com.baidu.cloud.media.player.misc.IMediaFormat;
|
||||
import com.hw.videoprocessor.util.CL;
|
||||
import java.io.IOException;
|
||||
|
||||
/* loaded from: classes.dex */
|
||||
public class VideoUtil {
|
||||
public static int a(MediaExtractor mediaExtractor, boolean z) {
|
||||
int trackCount = mediaExtractor.getTrackCount();
|
||||
for (int i = 0; i < trackCount; i++) {
|
||||
String string = mediaExtractor.getTrackFormat(i).getString(IMediaFormat.KEY_MIME);
|
||||
if (z) {
|
||||
if (string.startsWith("audio/")) {
|
||||
return i;
|
||||
}
|
||||
} else if (string.startsWith("video/")) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return -5;
|
||||
}
|
||||
|
||||
public static int b(String str) {
|
||||
MediaExtractor mediaExtractor = new MediaExtractor();
|
||||
try {
|
||||
mediaExtractor.setDataSource(str);
|
||||
MediaFormat trackFormat = mediaExtractor.getTrackFormat(a(mediaExtractor, false));
|
||||
return trackFormat.containsKey("frame-rate") ? trackFormat.getInteger("frame-rate") : -1;
|
||||
} catch (IOException e) {
|
||||
CL.a(e);
|
||||
return -1;
|
||||
} finally {
|
||||
mediaExtractor.release();
|
||||
}
|
||||
}
|
||||
|
||||
public static float a(String str) throws IOException {
|
||||
MediaExtractor mediaExtractor = new MediaExtractor();
|
||||
mediaExtractor.setDataSource(str);
|
||||
int i = 0;
|
||||
mediaExtractor.selectTrack(a(mediaExtractor, false));
|
||||
long j = 0;
|
||||
while (true) {
|
||||
long sampleTime = mediaExtractor.getSampleTime();
|
||||
if (sampleTime < 0) {
|
||||
mediaExtractor.release();
|
||||
return i / ((j / 1000.0f) / 1000.0f);
|
||||
}
|
||||
i++;
|
||||
mediaExtractor.advance();
|
||||
j = sampleTime;
|
||||
}
|
||||
}
|
||||
|
||||
public static boolean a(MediaCodec mediaCodec, String str, MediaFormat mediaFormat, int i, int i2) {
|
||||
MediaCodecInfo.CodecProfileLevel[] codecProfileLevelArr = mediaCodec.getCodecInfo().getCapabilitiesForType(str).profileLevels;
|
||||
if (codecProfileLevelArr == null) {
|
||||
return false;
|
||||
}
|
||||
for (MediaCodecInfo.CodecProfileLevel codecProfileLevel : codecProfileLevelArr) {
|
||||
if (codecProfileLevel.profile == i && codecProfileLevel.level == i2) {
|
||||
mediaFormat.setInteger("profile", i);
|
||||
mediaFormat.setInteger("level", i2);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public static int a(MediaCodec mediaCodec, String str) {
|
||||
try {
|
||||
return mediaCodec.getCodecInfo().getCapabilitiesForType(str).getVideoCapabilities().getBitrateRange().getUpper().intValue();
|
||||
} catch (Exception e) {
|
||||
CL.a(e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
}
|
122
sources/com/hw/videoprocessor/util/AudioUtil.java
Normal file
122
sources/com/hw/videoprocessor/util/AudioUtil.java
Normal file
@@ -0,0 +1,122 @@
|
||||
package com.hw.videoprocessor.util;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaExtractor;
|
||||
import android.media.MediaFormat;
|
||||
import android.media.MediaMuxer;
|
||||
import com.hw.videoprocessor.VideoUtil;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/* loaded from: classes.dex */
|
||||
public class AudioUtil {
|
||||
private static final Map<Integer, Integer> a = new HashMap();
|
||||
|
||||
static {
|
||||
a.put(96000, 0);
|
||||
a.put(88200, 1);
|
||||
a.put(64000, 2);
|
||||
a.put(48000, 3);
|
||||
a.put(44100, 4);
|
||||
a.put(32000, 5);
|
||||
a.put(24000, 6);
|
||||
a.put(22050, 7);
|
||||
a.put(16000, 8);
|
||||
a.put(12000, 9);
|
||||
a.put(11025, 10);
|
||||
a.put(8000, 11);
|
||||
a.put(7350, 12);
|
||||
}
|
||||
|
||||
public static long a(MediaExtractor mediaExtractor, MediaMuxer mediaMuxer, int i, Integer num, Integer num2, VideoProgressListener videoProgressListener) throws IOException {
|
||||
return a(mediaExtractor, mediaMuxer, i, num, num2, 0L, videoProgressListener);
|
||||
}
|
||||
|
||||
public static int b(MediaFormat mediaFormat) {
|
||||
if (mediaFormat.containsKey("max-input-size")) {
|
||||
return mediaFormat.getInteger("max-input-size");
|
||||
}
|
||||
return 100000;
|
||||
}
|
||||
|
||||
public static long a(MediaExtractor mediaExtractor, MediaMuxer mediaMuxer, int i, Integer num, Integer num2, long j, VideoProgressListener videoProgressListener) throws IOException {
|
||||
int a2 = VideoUtil.a(mediaExtractor, true);
|
||||
mediaExtractor.selectTrack(a2);
|
||||
Integer num3 = num == null ? 0 : num;
|
||||
mediaExtractor.seekTo(num3.intValue(), 2);
|
||||
MediaFormat trackFormat = mediaExtractor.getTrackFormat(a2);
|
||||
long j2 = trackFormat.getLong("durationUs");
|
||||
ByteBuffer allocateDirect = ByteBuffer.allocateDirect(trackFormat.getInteger("max-input-size"));
|
||||
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
|
||||
long j3 = j;
|
||||
while (true) {
|
||||
long sampleTime = mediaExtractor.getSampleTime();
|
||||
if (sampleTime == -1) {
|
||||
break;
|
||||
}
|
||||
if (sampleTime >= num3.intValue()) {
|
||||
if (num2 != null && sampleTime > num2.intValue()) {
|
||||
break;
|
||||
}
|
||||
if (videoProgressListener != null) {
|
||||
float intValue = (sampleTime - num3.intValue()) / (num2 == null ? j2 : num2.intValue() - num3.intValue());
|
||||
if (intValue < 0.0f) {
|
||||
intValue = 0.0f;
|
||||
}
|
||||
if (intValue > 1.0f) {
|
||||
intValue = 1.0f;
|
||||
}
|
||||
videoProgressListener.onProgress(intValue);
|
||||
}
|
||||
bufferInfo.presentationTimeUs = (sampleTime - num3.intValue()) + j;
|
||||
bufferInfo.flags = mediaExtractor.getSampleFlags();
|
||||
bufferInfo.size = mediaExtractor.readSampleData(allocateDirect, 0);
|
||||
if (bufferInfo.size < 0) {
|
||||
break;
|
||||
}
|
||||
CL.c("writeAudioSampleData,time:" + (bufferInfo.presentationTimeUs / 1000.0f), new Object[0]);
|
||||
mediaMuxer.writeSampleData(i, allocateDirect, bufferInfo);
|
||||
long j4 = bufferInfo.presentationTimeUs;
|
||||
mediaExtractor.advance();
|
||||
j3 = j4;
|
||||
} else {
|
||||
mediaExtractor.advance();
|
||||
}
|
||||
}
|
||||
return j3;
|
||||
}
|
||||
|
||||
/* JADX WARN: Removed duplicated region for block: B:11:0x0126 A[Catch: all -> 0x011a, TryCatch #3 {all -> 0x011a, blocks: (B:45:0x0084, B:47:0x008a, B:52:0x00b9, B:11:0x0126, B:36:0x0134, B:33:0x0150, B:18:0x0166, B:21:0x01b3, B:24:0x016f, B:26:0x0193, B:29:0x01ab, B:30:0x01a1, B:53:0x00ce, B:54:0x0098, B:56:0x00a5, B:59:0x00ac), top: B:44:0x0084 }] */
|
||||
/* JADX WARN: Removed duplicated region for block: B:148:0x0357 A[SYNTHETIC] */
|
||||
/* JADX WARN: Removed duplicated region for block: B:52:0x00b9 A[Catch: all -> 0x011a, TryCatch #3 {all -> 0x011a, blocks: (B:45:0x0084, B:47:0x008a, B:52:0x00b9, B:11:0x0126, B:36:0x0134, B:33:0x0150, B:18:0x0166, B:21:0x01b3, B:24:0x016f, B:26:0x0193, B:29:0x01ab, B:30:0x01a1, B:53:0x00ce, B:54:0x0098, B:56:0x00a5, B:59:0x00ac), top: B:44:0x0084 }] */
|
||||
/* JADX WARN: Removed duplicated region for block: B:53:0x00ce A[Catch: all -> 0x011a, TryCatch #3 {all -> 0x011a, blocks: (B:45:0x0084, B:47:0x008a, B:52:0x00b9, B:11:0x0126, B:36:0x0134, B:33:0x0150, B:18:0x0166, B:21:0x01b3, B:24:0x016f, B:26:0x0193, B:29:0x01ab, B:30:0x01a1, B:53:0x00ce, B:54:0x0098, B:56:0x00a5, B:59:0x00ac), top: B:44:0x0084 }] */
|
||||
/* JADX WARN: Removed duplicated region for block: B:92:0x035b */
|
||||
/*
|
||||
Code decompiled incorrectly, please refer to instructions dump.
|
||||
To view partially-correct code enable 'Show inconsistent code' option in preferences
|
||||
*/
|
||||
public static void a(android.content.Context r33, android.media.MediaExtractor r34, android.media.MediaMuxer r35, int r36, java.lang.Integer r37, java.lang.Integer r38, java.lang.Float r39, com.hw.videoprocessor.util.VideoProgressListener r40) throws java.lang.Exception {
|
||||
/*
|
||||
Method dump skipped, instructions count: 1224
|
||||
To view this dump change 'Code comments level' option to 'DEBUG'
|
||||
*/
|
||||
throw new UnsupportedOperationException("Method not decompiled: com.hw.videoprocessor.util.AudioUtil.a(android.content.Context, android.media.MediaExtractor, android.media.MediaMuxer, int, java.lang.Integer, java.lang.Integer, java.lang.Float, com.hw.videoprocessor.util.VideoProgressListener):void");
|
||||
}
|
||||
|
||||
public static int a(MediaFormat mediaFormat) {
|
||||
if (mediaFormat.containsKey("bitrate")) {
|
||||
return mediaFormat.getInteger("bitrate");
|
||||
}
|
||||
return 192000;
|
||||
}
|
||||
|
||||
public static void a(MediaFormat mediaFormat, int i, int i2, int i3) {
|
||||
int intValue = a.containsKey(Integer.valueOf(i2)) ? a.get(Integer.valueOf(i2)).intValue() : 4;
|
||||
ByteBuffer allocate = ByteBuffer.allocate(2);
|
||||
allocate.put(0, (byte) ((i << 3) | (intValue >> 1)));
|
||||
allocate.put(1, (byte) (((intValue & 1) << 7) | (i3 << 3)));
|
||||
mediaFormat.setByteBuffer("csd-0", allocate);
|
||||
}
|
||||
}
|
65
sources/com/hw/videoprocessor/util/CL.java
Normal file
65
sources/com/hw/videoprocessor/util/CL.java
Normal file
@@ -0,0 +1,65 @@
|
||||
package com.hw.videoprocessor.util;
|
||||
|
||||
import android.util.Log;
|
||||
|
||||
/* loaded from: classes.dex */
|
||||
public class CL {
|
||||
private static boolean a = false;
|
||||
|
||||
static class TagInfo {
|
||||
String a;
|
||||
String b;
|
||||
int c;
|
||||
|
||||
TagInfo() {
|
||||
}
|
||||
}
|
||||
|
||||
private static String a(TagInfo tagInfo, String str, Object... objArr) {
|
||||
return "[" + tagInfo.b + "():" + tagInfo.c + "]" + b(str, objArr);
|
||||
}
|
||||
|
||||
private static String b(String str, Object... objArr) {
|
||||
return objArr.length == 0 ? str : String.format(str, objArr);
|
||||
}
|
||||
|
||||
public static void c(String str, Object... objArr) {
|
||||
if (a) {
|
||||
TagInfo a2 = a(new Throwable().getStackTrace());
|
||||
Log.i(a2.a, a(a2, str, objArr));
|
||||
}
|
||||
}
|
||||
|
||||
public static void d(String str, Object... objArr) {
|
||||
if (a) {
|
||||
TagInfo a2 = a(new Throwable().getStackTrace());
|
||||
Log.w(a2.a, a(a2, str, objArr));
|
||||
}
|
||||
}
|
||||
|
||||
private static TagInfo a(StackTraceElement[] stackTraceElementArr) {
|
||||
TagInfo tagInfo = new TagInfo();
|
||||
if (stackTraceElementArr.length > 1) {
|
||||
tagInfo.a = stackTraceElementArr[1].getFileName();
|
||||
if (tagInfo.a.endsWith(".java")) {
|
||||
tagInfo.a = tagInfo.a.substring(0, r1.length() - 5);
|
||||
}
|
||||
tagInfo.b = stackTraceElementArr[1].getMethodName();
|
||||
tagInfo.c = stackTraceElementArr[1].getLineNumber();
|
||||
}
|
||||
return tagInfo;
|
||||
}
|
||||
|
||||
public static void a(String str, Object... objArr) {
|
||||
if (a) {
|
||||
TagInfo a2 = a(new Throwable().getStackTrace());
|
||||
Log.e(a2.a, a(a2, str, objArr));
|
||||
}
|
||||
}
|
||||
|
||||
public static void a(Throwable th) {
|
||||
if (a) {
|
||||
Log.e(a(th.getStackTrace()).a, "", th);
|
||||
}
|
||||
}
|
||||
}
|
39
sources/com/hw/videoprocessor/util/FrameDropper.java
Normal file
39
sources/com/hw/videoprocessor/util/FrameDropper.java
Normal file
@@ -0,0 +1,39 @@
|
||||
package com.hw.videoprocessor.util;
|
||||
|
||||
/* loaded from: classes.dex */
|
||||
public class FrameDropper {
|
||||
private int a;
|
||||
private int b;
|
||||
private boolean c;
|
||||
private int d;
|
||||
private int e;
|
||||
|
||||
public FrameDropper(int i, int i2) {
|
||||
this.a = i;
|
||||
this.b = i2;
|
||||
if (i <= i2) {
|
||||
CL.a("原始帧率:" + i + "小于目标帧率:" + i2 + ",不支持补帧", new Object[0]);
|
||||
this.c = true;
|
||||
}
|
||||
}
|
||||
|
||||
public boolean a(int i) {
|
||||
if (this.c) {
|
||||
return false;
|
||||
}
|
||||
if (i == 0) {
|
||||
this.e++;
|
||||
return false;
|
||||
}
|
||||
float f = (r7 - this.b) / this.a;
|
||||
int i2 = this.d;
|
||||
int i3 = this.e;
|
||||
boolean z = Math.abs((((float) (i2 + 1)) / ((float) (i2 + i3))) - f) < Math.abs((((float) i2) / ((float) ((i2 + i3) + 1))) - f);
|
||||
if (z) {
|
||||
this.d++;
|
||||
} else {
|
||||
this.e++;
|
||||
}
|
||||
return z;
|
||||
}
|
||||
}
|
95
sources/com/hw/videoprocessor/util/InputSurface.java
Normal file
95
sources/com/hw/videoprocessor/util/InputSurface.java
Normal file
@@ -0,0 +1,95 @@
|
||||
package com.hw.videoprocessor.util;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.opengl.EGL14;
|
||||
import android.opengl.EGLConfig;
|
||||
import android.opengl.EGLContext;
|
||||
import android.opengl.EGLDisplay;
|
||||
import android.opengl.EGLExt;
|
||||
import android.opengl.EGLSurface;
|
||||
import android.view.Surface;
|
||||
|
||||
@TargetApi(18)
|
||||
/* loaded from: classes.dex */
|
||||
public class InputSurface {
|
||||
private EGLDisplay a;
|
||||
private EGLContext b;
|
||||
private EGLSurface c;
|
||||
private Surface d;
|
||||
|
||||
public InputSurface(Surface surface) {
|
||||
if (surface == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
this.d = surface;
|
||||
d();
|
||||
}
|
||||
|
||||
private void d() {
|
||||
this.a = EGL14.eglGetDisplay(0);
|
||||
EGLDisplay eGLDisplay = this.a;
|
||||
if (eGLDisplay == EGL14.EGL_NO_DISPLAY) {
|
||||
throw new RuntimeException("unable to get EGL14 display");
|
||||
}
|
||||
int[] iArr = new int[2];
|
||||
if (!EGL14.eglInitialize(eGLDisplay, iArr, 0, iArr, 1)) {
|
||||
this.a = null;
|
||||
throw new RuntimeException("unable to initialize EGL14");
|
||||
}
|
||||
EGLConfig[] eGLConfigArr = new EGLConfig[1];
|
||||
if (!EGL14.eglChooseConfig(this.a, new int[]{12324, 8, 12323, 8, 12322, 8, 12352, 4, 12610, 1, 12344}, 0, eGLConfigArr, 0, eGLConfigArr.length, new int[1], 0)) {
|
||||
throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
|
||||
}
|
||||
this.b = EGL14.eglCreateContext(this.a, eGLConfigArr[0], EGL14.EGL_NO_CONTEXT, new int[]{12440, 2, 12344}, 0);
|
||||
a("eglCreateContext");
|
||||
if (this.b == null) {
|
||||
throw new RuntimeException("null context");
|
||||
}
|
||||
this.c = EGL14.eglCreateWindowSurface(this.a, eGLConfigArr[0], this.d, new int[]{12344}, 0);
|
||||
a("eglCreateWindowSurface");
|
||||
if (this.c == null) {
|
||||
throw new RuntimeException("surface was null");
|
||||
}
|
||||
}
|
||||
|
||||
public void a() {
|
||||
EGLDisplay eGLDisplay = this.a;
|
||||
EGLSurface eGLSurface = this.c;
|
||||
if (!EGL14.eglMakeCurrent(eGLDisplay, eGLSurface, eGLSurface, this.b)) {
|
||||
throw new RuntimeException("eglMakeCurrent failed");
|
||||
}
|
||||
}
|
||||
|
||||
public void b() {
|
||||
if (EGL14.eglGetCurrentContext().equals(this.b)) {
|
||||
EGLDisplay eGLDisplay = this.a;
|
||||
EGLSurface eGLSurface = EGL14.EGL_NO_SURFACE;
|
||||
EGL14.eglMakeCurrent(eGLDisplay, eGLSurface, eGLSurface, EGL14.EGL_NO_CONTEXT);
|
||||
}
|
||||
EGL14.eglDestroySurface(this.a, this.c);
|
||||
EGL14.eglDestroyContext(this.a, this.b);
|
||||
this.d.release();
|
||||
this.a = null;
|
||||
this.b = null;
|
||||
this.c = null;
|
||||
this.d = null;
|
||||
}
|
||||
|
||||
public boolean c() {
|
||||
return EGL14.eglSwapBuffers(this.a, this.c);
|
||||
}
|
||||
|
||||
public void a(long j) {
|
||||
EGLExt.eglPresentationTimeANDROID(this.a, this.c, j);
|
||||
}
|
||||
|
||||
private void a(String str) {
|
||||
boolean z = false;
|
||||
while (EGL14.eglGetError() != 12288) {
|
||||
z = true;
|
||||
}
|
||||
if (z) {
|
||||
throw new RuntimeException("EGL error encountered (see log)");
|
||||
}
|
||||
}
|
||||
}
|
96
sources/com/hw/videoprocessor/util/OutputSurface.java
Normal file
96
sources/com/hw/videoprocessor/util/OutputSurface.java
Normal file
@@ -0,0 +1,96 @@
|
||||
package com.hw.videoprocessor.util;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.view.Surface;
|
||||
import javax.microedition.khronos.egl.EGL10;
|
||||
import javax.microedition.khronos.egl.EGLContext;
|
||||
import javax.microedition.khronos.egl.EGLDisplay;
|
||||
import javax.microedition.khronos.egl.EGLSurface;
|
||||
|
||||
@TargetApi(16)
|
||||
/* loaded from: classes.dex */
|
||||
public class OutputSurface implements SurfaceTexture.OnFrameAvailableListener {
|
||||
private EGL10 a;
|
||||
private SurfaceTexture e;
|
||||
private Surface f;
|
||||
private boolean h;
|
||||
private TextureRenderer i;
|
||||
private EGLDisplay b = null;
|
||||
private EGLContext c = null;
|
||||
private EGLSurface d = null;
|
||||
private final Object g = new Object();
|
||||
private int j = 0;
|
||||
|
||||
public OutputSurface() {
|
||||
d();
|
||||
}
|
||||
|
||||
private void d() {
|
||||
this.i = new TextureRenderer(this.j);
|
||||
this.i.b();
|
||||
this.e = new SurfaceTexture(this.i.a());
|
||||
this.e.setOnFrameAvailableListener(this);
|
||||
this.f = new Surface(this.e);
|
||||
}
|
||||
|
||||
public void a() {
|
||||
synchronized (this.g) {
|
||||
do {
|
||||
if (this.h) {
|
||||
this.h = false;
|
||||
} else {
|
||||
try {
|
||||
this.g.wait(5000L);
|
||||
} catch (InterruptedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
} while (this.h);
|
||||
throw new RuntimeException("Surface frame wait timed out");
|
||||
}
|
||||
this.i.a("before updateTexImage");
|
||||
this.e.updateTexImage();
|
||||
}
|
||||
|
||||
public Surface b() {
|
||||
return this.f;
|
||||
}
|
||||
|
||||
public void c() {
|
||||
EGL10 egl10 = this.a;
|
||||
if (egl10 != null) {
|
||||
if (egl10.eglGetCurrentContext().equals(this.c)) {
|
||||
EGL10 egl102 = this.a;
|
||||
EGLDisplay eGLDisplay = this.b;
|
||||
EGLSurface eGLSurface = EGL10.EGL_NO_SURFACE;
|
||||
egl102.eglMakeCurrent(eGLDisplay, eGLSurface, eGLSurface, EGL10.EGL_NO_CONTEXT);
|
||||
}
|
||||
this.a.eglDestroySurface(this.b, this.d);
|
||||
this.a.eglDestroyContext(this.b, this.c);
|
||||
}
|
||||
this.f.release();
|
||||
this.b = null;
|
||||
this.c = null;
|
||||
this.d = null;
|
||||
this.a = null;
|
||||
this.i = null;
|
||||
this.f = null;
|
||||
this.e = null;
|
||||
}
|
||||
|
||||
@Override // android.graphics.SurfaceTexture.OnFrameAvailableListener
|
||||
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
|
||||
synchronized (this.g) {
|
||||
if (this.h) {
|
||||
throw new RuntimeException("mFrameAvailable already set, frame could be dropped");
|
||||
}
|
||||
this.h = true;
|
||||
this.g.notifyAll();
|
||||
}
|
||||
}
|
||||
|
||||
public void a(boolean z) {
|
||||
this.i.a(this.e, z);
|
||||
}
|
||||
}
|
52
sources/com/hw/videoprocessor/util/PcmToWavUtil.java
Normal file
52
sources/com/hw/videoprocessor/util/PcmToWavUtil.java
Normal file
@@ -0,0 +1,52 @@
|
||||
package com.hw.videoprocessor.util;
|
||||
|
||||
import android.media.AudioRecord;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
|
||||
/* loaded from: classes.dex */
|
||||
public class PcmToWavUtil {
|
||||
private int a;
|
||||
private int b;
|
||||
private int c;
|
||||
private int d;
|
||||
private int e;
|
||||
|
||||
public PcmToWavUtil(int i, int i2, int i3, int i4) {
|
||||
this.b = 8000;
|
||||
this.c = 12;
|
||||
this.d = 2;
|
||||
this.e = 2;
|
||||
this.b = i;
|
||||
this.c = i2;
|
||||
this.d = i3;
|
||||
this.e = i4;
|
||||
this.a = AudioRecord.getMinBufferSize(this.b, this.c, this.e);
|
||||
}
|
||||
|
||||
public void a(String str, String str2) {
|
||||
int i = this.b;
|
||||
long j = i;
|
||||
int i2 = this.d;
|
||||
long j2 = ((i * 16) * i2) / 8;
|
||||
byte[] bArr = new byte[this.a];
|
||||
try {
|
||||
FileInputStream fileInputStream = new FileInputStream(str);
|
||||
FileOutputStream fileOutputStream = new FileOutputStream(str2);
|
||||
long size = fileInputStream.getChannel().size();
|
||||
a(fileOutputStream, size, size + 36, j, i2, j2);
|
||||
while (fileInputStream.read(bArr) != -1) {
|
||||
fileOutputStream.write(bArr);
|
||||
}
|
||||
fileInputStream.close();
|
||||
fileOutputStream.close();
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
private void a(FileOutputStream fileOutputStream, long j, long j2, long j3, int i, long j4) throws IOException {
|
||||
fileOutputStream.write(new byte[]{82, 73, 70, 70, (byte) (j2 & 255), (byte) ((j2 >> 8) & 255), (byte) ((j2 >> 16) & 255), (byte) ((j2 >> 24) & 255), 87, 65, 86, 69, 102, 109, 116, 32, 16, 0, 0, 0, 1, 0, (byte) i, 0, (byte) (j3 & 255), (byte) ((j3 >> 8) & 255), (byte) ((j3 >> 16) & 255), (byte) ((j3 >> 24) & 255), (byte) (j4 & 255), (byte) ((j4 >> 8) & 255), (byte) ((j4 >> 16) & 255), (byte) ((j4 >> 24) & 255), 4, 0, 16, 0, 100, 97, 116, 97, (byte) (j & 255), (byte) ((j >> 8) & 255), (byte) ((j >> 16) & 255), (byte) ((j >> 24) & 255)}, 0, 44);
|
||||
}
|
||||
}
|
156
sources/com/hw/videoprocessor/util/TextureRenderer.java
Normal file
156
sources/com/hw/videoprocessor/util/TextureRenderer.java
Normal file
@@ -0,0 +1,156 @@
|
||||
package com.hw.videoprocessor.util;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.GLES20;
|
||||
import android.opengl.Matrix;
|
||||
import java.nio.Buffer;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.nio.FloatBuffer;
|
||||
|
||||
@TargetApi(16)
|
||||
/* loaded from: classes.dex */
|
||||
public class TextureRenderer {
|
||||
private static final float[] k = {-1.0f, -1.0f, 0.0f, 0.0f, 0.0f, 1.0f, -1.0f, 0.0f, 1.0f, 0.0f, -1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f};
|
||||
private int d;
|
||||
private int f;
|
||||
private int g;
|
||||
private int h;
|
||||
private int i;
|
||||
private int j;
|
||||
private float[] b = new float[16];
|
||||
private float[] c = new float[16];
|
||||
private int e = -12345;
|
||||
private FloatBuffer a = ByteBuffer.allocateDirect(k.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
|
||||
|
||||
public TextureRenderer(int i) {
|
||||
this.j = 0;
|
||||
this.j = i;
|
||||
this.a.put(k).position(0);
|
||||
Matrix.setIdentityM(this.c, 0);
|
||||
}
|
||||
|
||||
public int a() {
|
||||
return this.e;
|
||||
}
|
||||
|
||||
public void b() {
|
||||
this.d = a("uniform mat4 uMVPMatrix;\nuniform mat4 uSTMatrix;\nattribute vec4 aPosition;\nattribute vec4 aTextureCoord;\nvarying vec2 vTextureCoord;\nvoid main() {\n gl_Position = uMVPMatrix * aPosition;\n vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n}\n", "#extension GL_OES_EGL_image_external : require\nprecision mediump float;\nvarying vec2 vTextureCoord;\nuniform samplerExternalOES sTexture;\nvoid main() {\n gl_FragColor = texture2D(sTexture, vTextureCoord);\n}\n");
|
||||
int i = this.d;
|
||||
if (i == 0) {
|
||||
throw new RuntimeException("failed creating program");
|
||||
}
|
||||
this.h = GLES20.glGetAttribLocation(i, "aPosition");
|
||||
a("glGetAttribLocation aPosition");
|
||||
if (this.h == -1) {
|
||||
throw new RuntimeException("Could not get attrib location for aPosition");
|
||||
}
|
||||
this.i = GLES20.glGetAttribLocation(this.d, "aTextureCoord");
|
||||
a("glGetAttribLocation aTextureCoord");
|
||||
if (this.i == -1) {
|
||||
throw new RuntimeException("Could not get attrib location for aTextureCoord");
|
||||
}
|
||||
this.f = GLES20.glGetUniformLocation(this.d, "uMVPMatrix");
|
||||
a("glGetUniformLocation uMVPMatrix");
|
||||
if (this.f == -1) {
|
||||
throw new RuntimeException("Could not get attrib location for uMVPMatrix");
|
||||
}
|
||||
this.g = GLES20.glGetUniformLocation(this.d, "uSTMatrix");
|
||||
a("glGetUniformLocation uSTMatrix");
|
||||
if (this.g == -1) {
|
||||
throw new RuntimeException("Could not get attrib location for uSTMatrix");
|
||||
}
|
||||
int[] iArr = new int[1];
|
||||
GLES20.glGenTextures(1, iArr, 0);
|
||||
this.e = iArr[0];
|
||||
GLES20.glBindTexture(36197, this.e);
|
||||
a("glBindTexture mTextureID");
|
||||
GLES20.glTexParameterf(36197, 10241, 9728.0f);
|
||||
GLES20.glTexParameterf(36197, 10240, 9729.0f);
|
||||
GLES20.glTexParameteri(36197, 10242, 33071);
|
||||
GLES20.glTexParameteri(36197, 10243, 33071);
|
||||
a("glTexParameter");
|
||||
Matrix.setIdentityM(this.b, 0);
|
||||
int i2 = this.j;
|
||||
if (i2 != 0) {
|
||||
Matrix.rotateM(this.b, 0, i2, 0.0f, 0.0f, 1.0f);
|
||||
}
|
||||
}
|
||||
|
||||
public void a(SurfaceTexture surfaceTexture, boolean z) {
|
||||
a("onDrawFrame start");
|
||||
surfaceTexture.getTransformMatrix(this.c);
|
||||
if (z) {
|
||||
float[] fArr = this.c;
|
||||
fArr[5] = -fArr[5];
|
||||
fArr[13] = 1.0f - fArr[13];
|
||||
}
|
||||
GLES20.glUseProgram(this.d);
|
||||
a("glUseProgram");
|
||||
GLES20.glActiveTexture(33984);
|
||||
GLES20.glBindTexture(36197, this.e);
|
||||
this.a.position(0);
|
||||
GLES20.glVertexAttribPointer(this.h, 3, 5126, false, 20, (Buffer) this.a);
|
||||
a("glVertexAttribPointer maPosition");
|
||||
GLES20.glEnableVertexAttribArray(this.h);
|
||||
a("glEnableVertexAttribArray maPositionHandle");
|
||||
this.a.position(3);
|
||||
GLES20.glVertexAttribPointer(this.i, 2, 5126, false, 20, (Buffer) this.a);
|
||||
a("glVertexAttribPointer maTextureHandle");
|
||||
GLES20.glEnableVertexAttribArray(this.i);
|
||||
a("glEnableVertexAttribArray maTextureHandle");
|
||||
GLES20.glUniformMatrix4fv(this.g, 1, false, this.c, 0);
|
||||
GLES20.glUniformMatrix4fv(this.f, 1, false, this.b, 0);
|
||||
GLES20.glDrawArrays(5, 0, 4);
|
||||
a("glDrawArrays");
|
||||
GLES20.glFinish();
|
||||
}
|
||||
|
||||
private int a(int i, String str) {
|
||||
int glCreateShader = GLES20.glCreateShader(i);
|
||||
a("glCreateShader type=" + i);
|
||||
GLES20.glShaderSource(glCreateShader, str);
|
||||
GLES20.glCompileShader(glCreateShader);
|
||||
int[] iArr = new int[1];
|
||||
GLES20.glGetShaderiv(glCreateShader, 35713, iArr, 0);
|
||||
if (iArr[0] != 0) {
|
||||
return glCreateShader;
|
||||
}
|
||||
GLES20.glDeleteShader(glCreateShader);
|
||||
return 0;
|
||||
}
|
||||
|
||||
private int a(String str, String str2) {
|
||||
int a;
|
||||
int a2 = a(35633, str);
|
||||
if (a2 == 0 || (a = a(35632, str2)) == 0) {
|
||||
return 0;
|
||||
}
|
||||
int glCreateProgram = GLES20.glCreateProgram();
|
||||
a("glCreateProgram");
|
||||
if (glCreateProgram == 0) {
|
||||
return 0;
|
||||
}
|
||||
GLES20.glAttachShader(glCreateProgram, a2);
|
||||
a("glAttachShader");
|
||||
GLES20.glAttachShader(glCreateProgram, a);
|
||||
a("glAttachShader");
|
||||
GLES20.glLinkProgram(glCreateProgram);
|
||||
int[] iArr = new int[1];
|
||||
GLES20.glGetProgramiv(glCreateProgram, 35714, iArr, 0);
|
||||
if (iArr[0] == 1) {
|
||||
return glCreateProgram;
|
||||
}
|
||||
GLES20.glDeleteProgram(glCreateProgram);
|
||||
return 0;
|
||||
}
|
||||
|
||||
public void a(String str) {
|
||||
int glGetError = GLES20.glGetError();
|
||||
if (glGetError == 0) {
|
||||
return;
|
||||
}
|
||||
throw new RuntimeException(str + ": glError " + glGetError);
|
||||
}
|
||||
}
|
59
sources/com/hw/videoprocessor/util/VideoProgressAve.java
Normal file
59
sources/com/hw/videoprocessor/util/VideoProgressAve.java
Normal file
@@ -0,0 +1,59 @@
|
||||
package com.hw.videoprocessor.util;
|
||||
|
||||
/* loaded from: classes.dex */
|
||||
public class VideoProgressAve {
|
||||
private VideoProgressListener a;
|
||||
private float b;
|
||||
private float c;
|
||||
private int d;
|
||||
private int e;
|
||||
private Float f;
|
||||
|
||||
public VideoProgressAve(VideoProgressListener videoProgressListener) {
|
||||
this.a = videoProgressListener;
|
||||
}
|
||||
|
||||
public void a(long j) {
|
||||
if (this.a == null) {
|
||||
return;
|
||||
}
|
||||
Float f = this.f;
|
||||
if (f != null) {
|
||||
j = (long) (j * f.floatValue());
|
||||
}
|
||||
this.b = ((j / 1000.0f) - this.d) / (this.e - r3);
|
||||
float f2 = this.b;
|
||||
if (f2 < 0.0f) {
|
||||
f2 = 0.0f;
|
||||
}
|
||||
this.b = f2;
|
||||
float f3 = this.b;
|
||||
if (f3 > 1.0f) {
|
||||
f3 = 1.0f;
|
||||
}
|
||||
this.b = f3;
|
||||
this.a.onProgress((this.b + this.c) / 2.0f);
|
||||
CL.c("mEncodeProgress:" + this.b, new Object[0]);
|
||||
}
|
||||
|
||||
public void b(int i) {
|
||||
this.d = i;
|
||||
}
|
||||
|
||||
public void a(float f) {
|
||||
this.c = f;
|
||||
VideoProgressListener videoProgressListener = this.a;
|
||||
if (videoProgressListener != null) {
|
||||
videoProgressListener.onProgress((this.b + this.c) / 2.0f);
|
||||
}
|
||||
CL.c("mAudioProgress:" + this.c, new Object[0]);
|
||||
}
|
||||
|
||||
public void a(int i) {
|
||||
this.e = i;
|
||||
}
|
||||
|
||||
public void a(Float f) {
|
||||
this.f = f;
|
||||
}
|
||||
}
|
@@ -0,0 +1,6 @@
|
||||
package com.hw.videoprocessor.util;
|
||||
|
||||
/* loaded from: classes.dex */
|
||||
public interface VideoProgressListener {
|
||||
void onProgress(float f);
|
||||
}
|
Reference in New Issue
Block a user