Initial commit

This commit is contained in:
2025-05-13 19:24:51 +02:00
commit a950f49678
10604 changed files with 932663 additions and 0 deletions

View File

@@ -0,0 +1,122 @@
package com.hw.videoprocessor.util;
import android.media.MediaCodec;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import com.hw.videoprocessor.VideoUtil;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Map;
/* loaded from: classes.dex */
public class AudioUtil {
private static final Map<Integer, Integer> a = new HashMap();
static {
a.put(96000, 0);
a.put(88200, 1);
a.put(64000, 2);
a.put(48000, 3);
a.put(44100, 4);
a.put(32000, 5);
a.put(24000, 6);
a.put(22050, 7);
a.put(16000, 8);
a.put(12000, 9);
a.put(11025, 10);
a.put(8000, 11);
a.put(7350, 12);
}
public static long a(MediaExtractor mediaExtractor, MediaMuxer mediaMuxer, int i, Integer num, Integer num2, VideoProgressListener videoProgressListener) throws IOException {
return a(mediaExtractor, mediaMuxer, i, num, num2, 0L, videoProgressListener);
}
public static int b(MediaFormat mediaFormat) {
if (mediaFormat.containsKey("max-input-size")) {
return mediaFormat.getInteger("max-input-size");
}
return 100000;
}
public static long a(MediaExtractor mediaExtractor, MediaMuxer mediaMuxer, int i, Integer num, Integer num2, long j, VideoProgressListener videoProgressListener) throws IOException {
int a2 = VideoUtil.a(mediaExtractor, true);
mediaExtractor.selectTrack(a2);
Integer num3 = num == null ? 0 : num;
mediaExtractor.seekTo(num3.intValue(), 2);
MediaFormat trackFormat = mediaExtractor.getTrackFormat(a2);
long j2 = trackFormat.getLong("durationUs");
ByteBuffer allocateDirect = ByteBuffer.allocateDirect(trackFormat.getInteger("max-input-size"));
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
long j3 = j;
while (true) {
long sampleTime = mediaExtractor.getSampleTime();
if (sampleTime == -1) {
break;
}
if (sampleTime >= num3.intValue()) {
if (num2 != null && sampleTime > num2.intValue()) {
break;
}
if (videoProgressListener != null) {
float intValue = (sampleTime - num3.intValue()) / (num2 == null ? j2 : num2.intValue() - num3.intValue());
if (intValue < 0.0f) {
intValue = 0.0f;
}
if (intValue > 1.0f) {
intValue = 1.0f;
}
videoProgressListener.onProgress(intValue);
}
bufferInfo.presentationTimeUs = (sampleTime - num3.intValue()) + j;
bufferInfo.flags = mediaExtractor.getSampleFlags();
bufferInfo.size = mediaExtractor.readSampleData(allocateDirect, 0);
if (bufferInfo.size < 0) {
break;
}
CL.c("writeAudioSampleData,time:" + (bufferInfo.presentationTimeUs / 1000.0f), new Object[0]);
mediaMuxer.writeSampleData(i, allocateDirect, bufferInfo);
long j4 = bufferInfo.presentationTimeUs;
mediaExtractor.advance();
j3 = j4;
} else {
mediaExtractor.advance();
}
}
return j3;
}
/* JADX WARN: Removed duplicated region for block: B:11:0x0126 A[Catch: all -> 0x011a, TryCatch #3 {all -> 0x011a, blocks: (B:45:0x0084, B:47:0x008a, B:52:0x00b9, B:11:0x0126, B:36:0x0134, B:33:0x0150, B:18:0x0166, B:21:0x01b3, B:24:0x016f, B:26:0x0193, B:29:0x01ab, B:30:0x01a1, B:53:0x00ce, B:54:0x0098, B:56:0x00a5, B:59:0x00ac), top: B:44:0x0084 }] */
/* JADX WARN: Removed duplicated region for block: B:148:0x0357 A[SYNTHETIC] */
/* JADX WARN: Removed duplicated region for block: B:52:0x00b9 A[Catch: all -> 0x011a, TryCatch #3 {all -> 0x011a, blocks: (B:45:0x0084, B:47:0x008a, B:52:0x00b9, B:11:0x0126, B:36:0x0134, B:33:0x0150, B:18:0x0166, B:21:0x01b3, B:24:0x016f, B:26:0x0193, B:29:0x01ab, B:30:0x01a1, B:53:0x00ce, B:54:0x0098, B:56:0x00a5, B:59:0x00ac), top: B:44:0x0084 }] */
/* JADX WARN: Removed duplicated region for block: B:53:0x00ce A[Catch: all -> 0x011a, TryCatch #3 {all -> 0x011a, blocks: (B:45:0x0084, B:47:0x008a, B:52:0x00b9, B:11:0x0126, B:36:0x0134, B:33:0x0150, B:18:0x0166, B:21:0x01b3, B:24:0x016f, B:26:0x0193, B:29:0x01ab, B:30:0x01a1, B:53:0x00ce, B:54:0x0098, B:56:0x00a5, B:59:0x00ac), top: B:44:0x0084 }] */
/* JADX WARN: Removed duplicated region for block: B:92:0x035b */
/*
Code decompiled incorrectly, please refer to instructions dump.
To view partially-correct code enable 'Show inconsistent code' option in preferences
*/
public static void a(android.content.Context r33, android.media.MediaExtractor r34, android.media.MediaMuxer r35, int r36, java.lang.Integer r37, java.lang.Integer r38, java.lang.Float r39, com.hw.videoprocessor.util.VideoProgressListener r40) throws java.lang.Exception {
/*
Method dump skipped, instructions count: 1224
To view this dump change 'Code comments level' option to 'DEBUG'
*/
throw new UnsupportedOperationException("Method not decompiled: com.hw.videoprocessor.util.AudioUtil.a(android.content.Context, android.media.MediaExtractor, android.media.MediaMuxer, int, java.lang.Integer, java.lang.Integer, java.lang.Float, com.hw.videoprocessor.util.VideoProgressListener):void");
}
public static int a(MediaFormat mediaFormat) {
if (mediaFormat.containsKey("bitrate")) {
return mediaFormat.getInteger("bitrate");
}
return 192000;
}
public static void a(MediaFormat mediaFormat, int i, int i2, int i3) {
int intValue = a.containsKey(Integer.valueOf(i2)) ? a.get(Integer.valueOf(i2)).intValue() : 4;
ByteBuffer allocate = ByteBuffer.allocate(2);
allocate.put(0, (byte) ((i << 3) | (intValue >> 1)));
allocate.put(1, (byte) (((intValue & 1) << 7) | (i3 << 3)));
mediaFormat.setByteBuffer("csd-0", allocate);
}
}

View File

@@ -0,0 +1,65 @@
package com.hw.videoprocessor.util;
import android.util.Log;
/* loaded from: classes.dex */
public class CL {
private static boolean a = false;
static class TagInfo {
String a;
String b;
int c;
TagInfo() {
}
}
private static String a(TagInfo tagInfo, String str, Object... objArr) {
return "[" + tagInfo.b + "():" + tagInfo.c + "]" + b(str, objArr);
}
private static String b(String str, Object... objArr) {
return objArr.length == 0 ? str : String.format(str, objArr);
}
public static void c(String str, Object... objArr) {
if (a) {
TagInfo a2 = a(new Throwable().getStackTrace());
Log.i(a2.a, a(a2, str, objArr));
}
}
public static void d(String str, Object... objArr) {
if (a) {
TagInfo a2 = a(new Throwable().getStackTrace());
Log.w(a2.a, a(a2, str, objArr));
}
}
private static TagInfo a(StackTraceElement[] stackTraceElementArr) {
TagInfo tagInfo = new TagInfo();
if (stackTraceElementArr.length > 1) {
tagInfo.a = stackTraceElementArr[1].getFileName();
if (tagInfo.a.endsWith(".java")) {
tagInfo.a = tagInfo.a.substring(0, r1.length() - 5);
}
tagInfo.b = stackTraceElementArr[1].getMethodName();
tagInfo.c = stackTraceElementArr[1].getLineNumber();
}
return tagInfo;
}
public static void a(String str, Object... objArr) {
if (a) {
TagInfo a2 = a(new Throwable().getStackTrace());
Log.e(a2.a, a(a2, str, objArr));
}
}
public static void a(Throwable th) {
if (a) {
Log.e(a(th.getStackTrace()).a, "", th);
}
}
}

View File

@@ -0,0 +1,39 @@
package com.hw.videoprocessor.util;
/* loaded from: classes.dex */
public class FrameDropper {
private int a;
private int b;
private boolean c;
private int d;
private int e;
public FrameDropper(int i, int i2) {
this.a = i;
this.b = i2;
if (i <= i2) {
CL.a("原始帧率:" + i + "小于目标帧率:" + i2 + ",不支持补帧", new Object[0]);
this.c = true;
}
}
public boolean a(int i) {
if (this.c) {
return false;
}
if (i == 0) {
this.e++;
return false;
}
float f = (r7 - this.b) / this.a;
int i2 = this.d;
int i3 = this.e;
boolean z = Math.abs((((float) (i2 + 1)) / ((float) (i2 + i3))) - f) < Math.abs((((float) i2) / ((float) ((i2 + i3) + 1))) - f);
if (z) {
this.d++;
} else {
this.e++;
}
return z;
}
}

View File

@@ -0,0 +1,95 @@
package com.hw.videoprocessor.util;
import android.annotation.TargetApi;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLExt;
import android.opengl.EGLSurface;
import android.view.Surface;
@TargetApi(18)
/* loaded from: classes.dex */
public class InputSurface {
private EGLDisplay a;
private EGLContext b;
private EGLSurface c;
private Surface d;
public InputSurface(Surface surface) {
if (surface == null) {
throw new NullPointerException();
}
this.d = surface;
d();
}
private void d() {
this.a = EGL14.eglGetDisplay(0);
EGLDisplay eGLDisplay = this.a;
if (eGLDisplay == EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("unable to get EGL14 display");
}
int[] iArr = new int[2];
if (!EGL14.eglInitialize(eGLDisplay, iArr, 0, iArr, 1)) {
this.a = null;
throw new RuntimeException("unable to initialize EGL14");
}
EGLConfig[] eGLConfigArr = new EGLConfig[1];
if (!EGL14.eglChooseConfig(this.a, new int[]{12324, 8, 12323, 8, 12322, 8, 12352, 4, 12610, 1, 12344}, 0, eGLConfigArr, 0, eGLConfigArr.length, new int[1], 0)) {
throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
}
this.b = EGL14.eglCreateContext(this.a, eGLConfigArr[0], EGL14.EGL_NO_CONTEXT, new int[]{12440, 2, 12344}, 0);
a("eglCreateContext");
if (this.b == null) {
throw new RuntimeException("null context");
}
this.c = EGL14.eglCreateWindowSurface(this.a, eGLConfigArr[0], this.d, new int[]{12344}, 0);
a("eglCreateWindowSurface");
if (this.c == null) {
throw new RuntimeException("surface was null");
}
}
public void a() {
EGLDisplay eGLDisplay = this.a;
EGLSurface eGLSurface = this.c;
if (!EGL14.eglMakeCurrent(eGLDisplay, eGLSurface, eGLSurface, this.b)) {
throw new RuntimeException("eglMakeCurrent failed");
}
}
public void b() {
if (EGL14.eglGetCurrentContext().equals(this.b)) {
EGLDisplay eGLDisplay = this.a;
EGLSurface eGLSurface = EGL14.EGL_NO_SURFACE;
EGL14.eglMakeCurrent(eGLDisplay, eGLSurface, eGLSurface, EGL14.EGL_NO_CONTEXT);
}
EGL14.eglDestroySurface(this.a, this.c);
EGL14.eglDestroyContext(this.a, this.b);
this.d.release();
this.a = null;
this.b = null;
this.c = null;
this.d = null;
}
public boolean c() {
return EGL14.eglSwapBuffers(this.a, this.c);
}
public void a(long j) {
EGLExt.eglPresentationTimeANDROID(this.a, this.c, j);
}
private void a(String str) {
boolean z = false;
while (EGL14.eglGetError() != 12288) {
z = true;
}
if (z) {
throw new RuntimeException("EGL error encountered (see log)");
}
}
}

View File

@@ -0,0 +1,96 @@
package com.hw.videoprocessor.util;
import android.annotation.TargetApi;
import android.graphics.SurfaceTexture;
import android.view.Surface;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.egl.EGLSurface;
@TargetApi(16)
/* loaded from: classes.dex */
public class OutputSurface implements SurfaceTexture.OnFrameAvailableListener {
private EGL10 a;
private SurfaceTexture e;
private Surface f;
private boolean h;
private TextureRenderer i;
private EGLDisplay b = null;
private EGLContext c = null;
private EGLSurface d = null;
private final Object g = new Object();
private int j = 0;
public OutputSurface() {
d();
}
private void d() {
this.i = new TextureRenderer(this.j);
this.i.b();
this.e = new SurfaceTexture(this.i.a());
this.e.setOnFrameAvailableListener(this);
this.f = new Surface(this.e);
}
public void a() {
synchronized (this.g) {
do {
if (this.h) {
this.h = false;
} else {
try {
this.g.wait(5000L);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
} while (this.h);
throw new RuntimeException("Surface frame wait timed out");
}
this.i.a("before updateTexImage");
this.e.updateTexImage();
}
public Surface b() {
return this.f;
}
public void c() {
EGL10 egl10 = this.a;
if (egl10 != null) {
if (egl10.eglGetCurrentContext().equals(this.c)) {
EGL10 egl102 = this.a;
EGLDisplay eGLDisplay = this.b;
EGLSurface eGLSurface = EGL10.EGL_NO_SURFACE;
egl102.eglMakeCurrent(eGLDisplay, eGLSurface, eGLSurface, EGL10.EGL_NO_CONTEXT);
}
this.a.eglDestroySurface(this.b, this.d);
this.a.eglDestroyContext(this.b, this.c);
}
this.f.release();
this.b = null;
this.c = null;
this.d = null;
this.a = null;
this.i = null;
this.f = null;
this.e = null;
}
@Override // android.graphics.SurfaceTexture.OnFrameAvailableListener
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
synchronized (this.g) {
if (this.h) {
throw new RuntimeException("mFrameAvailable already set, frame could be dropped");
}
this.h = true;
this.g.notifyAll();
}
}
public void a(boolean z) {
this.i.a(this.e, z);
}
}

View File

@@ -0,0 +1,52 @@
package com.hw.videoprocessor.util;
import android.media.AudioRecord;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
/* loaded from: classes.dex */
public class PcmToWavUtil {
private int a;
private int b;
private int c;
private int d;
private int e;
public PcmToWavUtil(int i, int i2, int i3, int i4) {
this.b = 8000;
this.c = 12;
this.d = 2;
this.e = 2;
this.b = i;
this.c = i2;
this.d = i3;
this.e = i4;
this.a = AudioRecord.getMinBufferSize(this.b, this.c, this.e);
}
public void a(String str, String str2) {
int i = this.b;
long j = i;
int i2 = this.d;
long j2 = ((i * 16) * i2) / 8;
byte[] bArr = new byte[this.a];
try {
FileInputStream fileInputStream = new FileInputStream(str);
FileOutputStream fileOutputStream = new FileOutputStream(str2);
long size = fileInputStream.getChannel().size();
a(fileOutputStream, size, size + 36, j, i2, j2);
while (fileInputStream.read(bArr) != -1) {
fileOutputStream.write(bArr);
}
fileInputStream.close();
fileOutputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private void a(FileOutputStream fileOutputStream, long j, long j2, long j3, int i, long j4) throws IOException {
fileOutputStream.write(new byte[]{82, 73, 70, 70, (byte) (j2 & 255), (byte) ((j2 >> 8) & 255), (byte) ((j2 >> 16) & 255), (byte) ((j2 >> 24) & 255), 87, 65, 86, 69, 102, 109, 116, 32, 16, 0, 0, 0, 1, 0, (byte) i, 0, (byte) (j3 & 255), (byte) ((j3 >> 8) & 255), (byte) ((j3 >> 16) & 255), (byte) ((j3 >> 24) & 255), (byte) (j4 & 255), (byte) ((j4 >> 8) & 255), (byte) ((j4 >> 16) & 255), (byte) ((j4 >> 24) & 255), 4, 0, 16, 0, 100, 97, 116, 97, (byte) (j & 255), (byte) ((j >> 8) & 255), (byte) ((j >> 16) & 255), (byte) ((j >> 24) & 255)}, 0, 44);
}
}

View File

@@ -0,0 +1,156 @@
package com.hw.videoprocessor.util;
import android.annotation.TargetApi;
import android.graphics.SurfaceTexture;
import android.opengl.GLES20;
import android.opengl.Matrix;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
@TargetApi(16)
/* loaded from: classes.dex */
public class TextureRenderer {
private static final float[] k = {-1.0f, -1.0f, 0.0f, 0.0f, 0.0f, 1.0f, -1.0f, 0.0f, 1.0f, 0.0f, -1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f};
private int d;
private int f;
private int g;
private int h;
private int i;
private int j;
private float[] b = new float[16];
private float[] c = new float[16];
private int e = -12345;
private FloatBuffer a = ByteBuffer.allocateDirect(k.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
public TextureRenderer(int i) {
this.j = 0;
this.j = i;
this.a.put(k).position(0);
Matrix.setIdentityM(this.c, 0);
}
public int a() {
return this.e;
}
public void b() {
this.d = a("uniform mat4 uMVPMatrix;\nuniform mat4 uSTMatrix;\nattribute vec4 aPosition;\nattribute vec4 aTextureCoord;\nvarying vec2 vTextureCoord;\nvoid main() {\n gl_Position = uMVPMatrix * aPosition;\n vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n}\n", "#extension GL_OES_EGL_image_external : require\nprecision mediump float;\nvarying vec2 vTextureCoord;\nuniform samplerExternalOES sTexture;\nvoid main() {\n gl_FragColor = texture2D(sTexture, vTextureCoord);\n}\n");
int i = this.d;
if (i == 0) {
throw new RuntimeException("failed creating program");
}
this.h = GLES20.glGetAttribLocation(i, "aPosition");
a("glGetAttribLocation aPosition");
if (this.h == -1) {
throw new RuntimeException("Could not get attrib location for aPosition");
}
this.i = GLES20.glGetAttribLocation(this.d, "aTextureCoord");
a("glGetAttribLocation aTextureCoord");
if (this.i == -1) {
throw new RuntimeException("Could not get attrib location for aTextureCoord");
}
this.f = GLES20.glGetUniformLocation(this.d, "uMVPMatrix");
a("glGetUniformLocation uMVPMatrix");
if (this.f == -1) {
throw new RuntimeException("Could not get attrib location for uMVPMatrix");
}
this.g = GLES20.glGetUniformLocation(this.d, "uSTMatrix");
a("glGetUniformLocation uSTMatrix");
if (this.g == -1) {
throw new RuntimeException("Could not get attrib location for uSTMatrix");
}
int[] iArr = new int[1];
GLES20.glGenTextures(1, iArr, 0);
this.e = iArr[0];
GLES20.glBindTexture(36197, this.e);
a("glBindTexture mTextureID");
GLES20.glTexParameterf(36197, 10241, 9728.0f);
GLES20.glTexParameterf(36197, 10240, 9729.0f);
GLES20.glTexParameteri(36197, 10242, 33071);
GLES20.glTexParameteri(36197, 10243, 33071);
a("glTexParameter");
Matrix.setIdentityM(this.b, 0);
int i2 = this.j;
if (i2 != 0) {
Matrix.rotateM(this.b, 0, i2, 0.0f, 0.0f, 1.0f);
}
}
public void a(SurfaceTexture surfaceTexture, boolean z) {
a("onDrawFrame start");
surfaceTexture.getTransformMatrix(this.c);
if (z) {
float[] fArr = this.c;
fArr[5] = -fArr[5];
fArr[13] = 1.0f - fArr[13];
}
GLES20.glUseProgram(this.d);
a("glUseProgram");
GLES20.glActiveTexture(33984);
GLES20.glBindTexture(36197, this.e);
this.a.position(0);
GLES20.glVertexAttribPointer(this.h, 3, 5126, false, 20, (Buffer) this.a);
a("glVertexAttribPointer maPosition");
GLES20.glEnableVertexAttribArray(this.h);
a("glEnableVertexAttribArray maPositionHandle");
this.a.position(3);
GLES20.glVertexAttribPointer(this.i, 2, 5126, false, 20, (Buffer) this.a);
a("glVertexAttribPointer maTextureHandle");
GLES20.glEnableVertexAttribArray(this.i);
a("glEnableVertexAttribArray maTextureHandle");
GLES20.glUniformMatrix4fv(this.g, 1, false, this.c, 0);
GLES20.glUniformMatrix4fv(this.f, 1, false, this.b, 0);
GLES20.glDrawArrays(5, 0, 4);
a("glDrawArrays");
GLES20.glFinish();
}
private int a(int i, String str) {
int glCreateShader = GLES20.glCreateShader(i);
a("glCreateShader type=" + i);
GLES20.glShaderSource(glCreateShader, str);
GLES20.glCompileShader(glCreateShader);
int[] iArr = new int[1];
GLES20.glGetShaderiv(glCreateShader, 35713, iArr, 0);
if (iArr[0] != 0) {
return glCreateShader;
}
GLES20.glDeleteShader(glCreateShader);
return 0;
}
private int a(String str, String str2) {
int a;
int a2 = a(35633, str);
if (a2 == 0 || (a = a(35632, str2)) == 0) {
return 0;
}
int glCreateProgram = GLES20.glCreateProgram();
a("glCreateProgram");
if (glCreateProgram == 0) {
return 0;
}
GLES20.glAttachShader(glCreateProgram, a2);
a("glAttachShader");
GLES20.glAttachShader(glCreateProgram, a);
a("glAttachShader");
GLES20.glLinkProgram(glCreateProgram);
int[] iArr = new int[1];
GLES20.glGetProgramiv(glCreateProgram, 35714, iArr, 0);
if (iArr[0] == 1) {
return glCreateProgram;
}
GLES20.glDeleteProgram(glCreateProgram);
return 0;
}
public void a(String str) {
int glGetError = GLES20.glGetError();
if (glGetError == 0) {
return;
}
throw new RuntimeException(str + ": glError " + glGetError);
}
}

View File

@@ -0,0 +1,59 @@
package com.hw.videoprocessor.util;
/* loaded from: classes.dex */
public class VideoProgressAve {
private VideoProgressListener a;
private float b;
private float c;
private int d;
private int e;
private Float f;
public VideoProgressAve(VideoProgressListener videoProgressListener) {
this.a = videoProgressListener;
}
public void a(long j) {
if (this.a == null) {
return;
}
Float f = this.f;
if (f != null) {
j = (long) (j * f.floatValue());
}
this.b = ((j / 1000.0f) - this.d) / (this.e - r3);
float f2 = this.b;
if (f2 < 0.0f) {
f2 = 0.0f;
}
this.b = f2;
float f3 = this.b;
if (f3 > 1.0f) {
f3 = 1.0f;
}
this.b = f3;
this.a.onProgress((this.b + this.c) / 2.0f);
CL.c("mEncodeProgress:" + this.b, new Object[0]);
}
public void b(int i) {
this.d = i;
}
public void a(float f) {
this.c = f;
VideoProgressListener videoProgressListener = this.a;
if (videoProgressListener != null) {
videoProgressListener.onProgress((this.b + this.c) / 2.0f);
}
CL.c("mAudioProgress:" + this.c, new Object[0]);
}
public void a(int i) {
this.e = i;
}
public void a(Float f) {
this.f = f;
}
}

View File

@@ -0,0 +1,6 @@
package com.hw.videoprocessor.util;
/* loaded from: classes.dex */
public interface VideoProgressListener {
void onProgress(float f);
}