| app/src/main/AndroidManifest.xml | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
| app/src/main/java/com/anyun/h264/AACEncoder.java | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
| app/src/main/java/com/anyun/h264/H264Encoder.java | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
| app/src/main/java/com/anyun/h264/JT1076ProtocolHelper.java | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
| app/src/main/java/com/anyun/h264/MainActivity.kt | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
| gradle/wrapper/gradle-wrapper.properties | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
| settings.gradle | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 |
app/src/main/AndroidManifest.xml
@@ -1,7 +1,15 @@ <?xml version="1.0" encoding="utf-8"?> <manifest xmlns:android="http://schemas.android.com/apk/res/android" android:sharedUserId="android.uid.system" xmlns:tools="http://schemas.android.com/tools"> <!-- 网络权限(用于UDP上传) --> <uses-permission android:name="android.permission.INTERNET" /> <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" /> <!-- USB摄像头权限 --> <uses-feature android:name="android.hardware.usb.host" /> <application android:allowBackup="true" android:dataExtractionRules="@xml/data_extraction_rules" app/src/main/java/com/anyun/h264/AACEncoder.java
New file @@ -0,0 +1,361 @@ package com.anyun.h264; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaCodec; import android.media.MediaCodecInfo; import android.media.MediaFormat; import android.util.Log; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.concurrent.atomic.AtomicBoolean; /** * AAC音频编码器 * 采集音频数据,进行AAC编码,并通过UDP按JT/T 1076-2016协议上传 */ public class AACEncoder { private static final String TAG = "AACEncoder"; private MediaCodec encoder; private AudioRecord audioRecord; private Thread encodeThread; private AtomicBoolean isRunning = new AtomicBoolean(false); // 音频参数 private int sampleRate = 16000; // 采样率 private int channelCount = 1; // 声道数(1=单声道,2=立体声) private int bitrate = 64000; // 比特率 private int audioFormat = AudioFormat.ENCODING_PCM_16BIT; // JT/T 1076-2016 协议工具类 private JT1076ProtocolHelper protocolHelper; private long lastFrameTime = 0; // 上一帧时间 // 编码回调 public interface OnFrameEncodedCallback { void onFrameEncoded(byte[] data); } private OnFrameEncodedCallback callback; public AACEncoder() { this.protocolHelper = new JT1076ProtocolHelper(); } /** * 设置音频参数 */ public void setAudioParams(int sampleRate, int channelCount, int bitrate) { this.sampleRate = sampleRate; this.channelCount = channelCount; this.bitrate = bitrate; } /** * 设置UDP服务器地址 */ public void setServerAddress(String ip, int port) { protocolHelper.setServerAddress(ip, port); } /** * 设置SIM卡号和逻辑通道号 */ public void setProtocolParams(String simCardNumber, byte logicalChannelNumber) { protocolHelper.setProtocolParams(simCardNumber, logicalChannelNumber); } /** * 设置编码回调 */ public void setOnFrameEncodedCallback(OnFrameEncodedCallback callback) { this.callback = callback; } /** * 初始化音频录制和编码器 */ public boolean initialize() { try { // 1. 初始化AudioRecord int channelConfig = channelCount == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO; int bufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat); if (bufferSize == AudioRecord.ERROR_BAD_VALUE || bufferSize == AudioRecord.ERROR) { Log.e(TAG, "Invalid audio parameters"); return false; } // 使用更大的缓冲区以避免欠载 bufferSize *= 4; audioRecord = new AudioRecord( android.media.MediaRecorder.AudioSource.MIC, sampleRate, channelConfig, audioFormat, bufferSize ); if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) { Log.e(TAG, "AudioRecord initialization failed"); return false; } Log.d(TAG, "AudioRecord initialized: sampleRate=" + sampleRate + ", channels=" + channelCount + ", bufferSize=" + bufferSize); // 2. 初始化AAC编码器 initEncoder(); // 3. 初始化UDP Socket if (!protocolHelper.initializeUdpSocket()) { return false; } return true; } catch (Exception e) { Log.e(TAG, "Initialize failed", e); return false; } } /** * 初始化AAC编码器 */ private void initEncoder() throws IOException { MediaFormat format = MediaFormat.createAudioFormat(MediaFormat.MIMETYPE_AUDIO_AAC, sampleRate, channelCount); format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC); format.setInteger(MediaFormat.KEY_BIT_RATE, bitrate); format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 4096); encoder = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_AUDIO_AAC); encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); encoder.start(); Log.d(TAG, "AAC encoder initialized"); } /** * 开始编码 */ public void start() { if (isRunning.get()) { Log.w(TAG, "Encoder is already running"); return; } if (audioRecord == null || encoder == null) { Log.e(TAG, "Encoder not initialized"); return; } isRunning.set(true); // 开始录音 audioRecord.startRecording(); // 启动编码线程 encodeThread = new Thread(new Runnable() { @Override public void run() { encodeLoop(); } }); encodeThread.start(); Log.d(TAG, "AAC encoder started"); } /** * 编码循环 */ private void encodeLoop() { // 读取缓冲区大小(1024个采样点) int inputBufferSize = sampleRate * channelCount * 2 / 25; // 40ms的音频数据 byte[] pcmBuffer = new byte[inputBufferSize]; MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); while (isRunning.get()) { try { // 从AudioRecord读取PCM数据 int bytesRead = audioRecord.read(pcmBuffer, 0, pcmBuffer.length); if (bytesRead < 0) { Log.w(TAG, "AudioRecord read error: " + bytesRead); Thread.sleep(10); continue; } // 将PCM数据送入编码器 long timestamp = System.currentTimeMillis(); encodeFrame(pcmBuffer, bytesRead, timestamp, bufferInfo); } catch (Exception e) { Log.e(TAG, "Encode loop error", e); try { Thread.sleep(10); } catch (InterruptedException ie) { break; } } } Log.d(TAG, "Encode loop exited"); } /** * 编码一帧数据 */ private void encodeFrame(byte[] pcmData, int dataSize, long timestamp, MediaCodec.BufferInfo bufferInfo) { try { // 获取输入缓冲区 int inputBufferIndex = encoder.dequeueInputBuffer(10000); if (inputBufferIndex >= 0) { ByteBuffer inputBuffer = encoder.getInputBuffer(inputBufferIndex); if (inputBuffer != null) { inputBuffer.clear(); inputBuffer.put(pcmData, 0, dataSize); encoder.queueInputBuffer(inputBufferIndex, 0, dataSize, timestamp * 1000, 0); } } // 获取输出数据 int outputBufferIndex = encoder.dequeueOutputBuffer(bufferInfo, 0); while (outputBufferIndex >= 0) { ByteBuffer outputBuffer = encoder.getOutputBuffer(outputBufferIndex); if (outputBuffer != null && bufferInfo.size > 0) { // 复制编码数据 byte[] encodedData = new byte[bufferInfo.size]; outputBuffer.position(bufferInfo.offset); outputBuffer.get(encodedData, 0, bufferInfo.size); // 发送编码数据 sendEncodedData(encodedData, timestamp); // 回调 if (callback != null) { callback.onFrameEncoded(encodedData); } } encoder.releaseOutputBuffer(outputBufferIndex, false); outputBufferIndex = encoder.dequeueOutputBuffer(bufferInfo, 0); } } catch (Exception e) { Log.e(TAG, "Encode frame error", e); } } /** * 发送编码后的数据(按JT/T 1076-2016协议打包) */ private void sendEncodedData(byte[] data, long timestamp) { try { // 计算时间间隔 long currentTime = System.currentTimeMillis(); long lastFrameInterval = (lastFrameTime > 0) ? (currentTime - lastFrameTime) : 0; lastFrameTime = currentTime; // 分包发送(如果数据超过最大包大小) int offset = 0; int totalPackets = (int) Math.ceil((double) data.length / JT1076ProtocolHelper.MAX_PACKET_SIZE); for (int i = 0; i < totalPackets; i++) { int packetDataSize = Math.min(JT1076ProtocolHelper.MAX_PACKET_SIZE, data.length - offset); byte[] packetData = Arrays.copyOfRange(data, offset, offset + packetDataSize); // 确定分包标记 int packetMark; if (totalPackets == 1) { packetMark = JT1076ProtocolHelper.PACKET_MARK_ATOMIC; } else if (i == 0) { packetMark = JT1076ProtocolHelper.PACKET_MARK_FIRST; } else if (i == totalPackets - 1) { packetMark = JT1076ProtocolHelper.PACKET_MARK_LAST; } else { packetMark = JT1076ProtocolHelper.PACKET_MARK_MIDDLE; } // 创建RTP包(音频不需要Last I Frame Interval和Last Frame Interval字段) byte[] rtpPacket = protocolHelper.createAudioRtpPacket( packetData, timestamp, JT1076ProtocolHelper.DATA_TYPE_AUDIO, packetMark); // 发送UDP包 protocolHelper.sendUdpPacket(rtpPacket); offset += packetDataSize; } } catch (Exception e) { Log.e(TAG, "Send encoded data error", e); } } /** * 停止编码 */ public void stop() { if (!isRunning.get()) { return; } isRunning.set(false); // 停止录音 if (audioRecord != null) { try { audioRecord.stop(); } catch (Exception e) { Log.e(TAG, "Stop AudioRecord error", e); } } // 等待编码线程结束 if (encodeThread != null) { try { encodeThread.join(2000); } catch (InterruptedException e) { Log.e(TAG, "Wait encode thread error", e); } } // 释放编码器 if (encoder != null) { try { encoder.stop(); encoder.release(); encoder = null; } catch (Exception e) { Log.e(TAG, "Release encoder error", e); } } // 释放AudioRecord if (audioRecord != null) { audioRecord.release(); audioRecord = null; } // 关闭UDP Socket if (protocolHelper != null) { protocolHelper.closeUdpSocket(); } Log.d(TAG, "AAC encoder stopped"); } /** * 释放资源 */ public void release() { stop(); } } app/src/main/java/com/anyun/h264/H264Encoder.java
New file @@ -0,0 +1,606 @@ package com.anyun.h264; import android.media.MediaCodec; import android.media.MediaCodecInfo; import android.media.MediaFormat; import android.util.Log; import com.anyun.libusbcamera.UsbCamera; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.concurrent.atomic.AtomicBoolean; /** * H264视频编码器 * 使用UsbCamera获取视频数据,进行H264编码,并通过UDP按JT/T 1076-2016协议上传 * * 使用示例: * <pre> * // 创建编码器 * H264Encoder encoder = new H264Encoder(); * * // 设置编码参数 * encoder.setEncoderParams(640, 480, 25, 2000000); * * // 设置输出文件(可选,用于保存H264编码数据,可用VLC播放验证) * // 使用应用外部存储目录(推荐) * File outputFile = new File(context.getExternalFilesDir(null), "test.h264"); * encoder.setOutputFile(outputFile.getAbsolutePath()); * encoder.setEnableFileOutput(true); // 启用文件输出,设置为false则不写入文件 * * // 设置UDP服务器地址(可选) * encoder.setServerAddress("192.168.1.100", 8888); * encoder.setProtocolParams("123456789012", (byte)1); * * // 初始化并启动 * int[] cameraIdRange = {0, 0}; * int[] resolution = {640, 480}; * if (encoder.initialize(cameraIdRange, "camera", resolution, false)) { * encoder.start(); * } * * // 停止编码 * encoder.stop(); * </pre> * * 生成的.h264文件可以用VLC播放器直接播放验证。 */ public class H264Encoder { private static final String TAG = "H264Encoder"; private UsbCamera usbCamera; private MediaCodec encoder; private Thread encodeThread; private AtomicBoolean isRunning = new AtomicBoolean(false); // 编码参数 private int width = 640; private int height = 480; private int frameRate = 25; private int bitrate = 2000000; // 2Mbps private int iFrameInterval = 1; // I帧间隔(秒) // JT/T 1076-2016 协议工具类 private JT1076ProtocolHelper protocolHelper; private long lastIFrameTime = 0; // 上一个I帧时间 private long lastFrameTime = 0; // 上一帧时间 // 文件输出 private FileOutputStream fileOutputStream; private String outputFilePath; private boolean enableFileOutput = false; // 是否启用文件输出 private boolean spsPpsWritten = false; // 标记SPS/PPS是否已写入 // 编码回调 public interface OnFrameEncodedCallback { void onFrameEncoded(byte[] data, boolean isKeyFrame); } private OnFrameEncodedCallback callback; public H264Encoder() { this.usbCamera = new UsbCamera(); this.protocolHelper = new JT1076ProtocolHelper(); } /** * 设置编码参数 */ public void setEncoderParams(int width, int height, int frameRate, int bitrate) { this.width = width; this.height = height; this.frameRate = frameRate; this.bitrate = bitrate; } /** * 设置UDP服务器地址 */ public void setServerAddress(String ip, int port) { protocolHelper.setServerAddress(ip, port); } /** * 设置SIM卡号和逻辑通道号 */ public void setProtocolParams(String simCardNumber, byte logicalChannelNumber) { protocolHelper.setProtocolParams(simCardNumber, logicalChannelNumber); } /** * 设置编码回调 */ public void setOnFrameEncodedCallback(OnFrameEncodedCallback callback) { this.callback = callback; } /** * 设置输出文件路径(用于保存H264编码数据) * @param filePath 文件路径,例如:"/sdcard/test.h264" 或使用Context.getExternalFilesDir() */ public void setOutputFile(String filePath) { this.outputFilePath = filePath; } /** * 设置是否启用文件输出 * @param enable true表示启用文件输出,false表示禁用 */ public void setEnableFileOutput(boolean enable) { this.enableFileOutput = enable; } /** * 初始化摄像头和编码器 */ public boolean initialize(int[] cameraIdRange, String cameraName, int[] resolution, boolean ayCamera) { try { // 1. setenv usbCamera.setenv(); // 2. prepareCamera (最多尝试3次:初始1次 + 重试2次) int[] actualResolution = new int[2]; actualResolution[0] = 640; actualResolution[1] = 480; System.arraycopy(resolution, 0, actualResolution, 0, 2); int result = -1; int maxRetries = 3; // 总共尝试3次 for (int attempt = 0; attempt < maxRetries; attempt++) { result = usbCamera.prepareCamera(cameraIdRange, cameraName, actualResolution, ayCamera); if (result == 0) { // 成功,跳出循环 if (attempt > 0) { Log.d(TAG, "prepareCamera succeeded on attempt " + (attempt + 1)); } break; } else { // 失败,记录日志 Log.w(TAG, "prepareCamera failed on attempt " + (attempt + 1) + ": " + result); if (attempt < maxRetries - 1) { Log.d(TAG, "Retrying prepareCamera..."); } } } if (result != 0) { Log.e(TAG, "prepareCamera failed after " + maxRetries + " attempts: " + result); return false; } // 更新实际分辨率 width = actualResolution[0]; height = actualResolution[1]; Log.d(TAG, "Camera initialized with resolution: " + width + "x" + height); // 3. 初始化H264编码器 initEncoder(); // 4. 初始化UDP Socket if (!protocolHelper.initializeUdpSocket()) { return false; } // 5. 初始化文件输出(仅创建文件,SPS/PPS在第一次输出时写入) if (enableFileOutput && outputFilePath != null && !outputFilePath.isEmpty()) { if (!initFileOutput()) { Log.w(TAG, "File output initialization failed, continuing without file output"); } } return true; } catch (Exception e) { Log.e(TAG, "Initialize failed", e); return false; } } /** * 初始化H264编码器 */ private void initEncoder() throws IOException { MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, width, height); format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible); format.setInteger(MediaFormat.KEY_BIT_RATE, bitrate); format.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval); encoder = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC); encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); encoder.start(); Log.d(TAG, "H264 encoder initialized"); } /** * 初始化文件输出 * @return 是否成功 */ private boolean initFileOutput() { try { File file = new File(outputFilePath); File parentDir = file.getParentFile(); if (parentDir != null && !parentDir.exists()) { boolean created = parentDir.mkdirs(); if (!created && !parentDir.exists()) { Log.e(TAG, "Failed to create parent directory: " + parentDir.getAbsolutePath()); return false; } } fileOutputStream = new FileOutputStream(file); spsPpsWritten = false; Log.d(TAG, "File output initialized: " + outputFilePath); return true; } catch (Exception e) { Log.e(TAG, "Initialize file output failed", e); if (fileOutputStream != null) { try { fileOutputStream.close(); } catch (IOException ie) { Log.e(TAG, "Close file output stream failed", ie); } fileOutputStream = null; } return false; } } /** * 写入SPS/PPS到文件(从CSD或关键帧数据中提取) */ private void writeSpsPpsToFile() { if (!enableFileOutput || fileOutputStream == null || spsPpsWritten) { return; } try { // 尝试从编码器输出格式中获取CSD MediaFormat format = encoder.getOutputFormat(); ByteBuffer spsBuffer = format.getByteBuffer("csd-0"); // SPS ByteBuffer ppsBuffer = format.getByteBuffer("csd-1"); // PPS if (spsBuffer != null && ppsBuffer != null) { // CSD格式通常是AVCC格式,需要转换为Annex-B byte[] sps = new byte[spsBuffer.remaining()]; byte[] pps = new byte[ppsBuffer.remaining()]; spsBuffer.get(sps); ppsBuffer.get(pps); // 写入SPS和PPS到文件(Annex-B格式) byte[] nalStartCode = {0x00, 0x00, 0x00, 0x01}; // CSD数据通常是AVCC格式:前4字节是大端格式的长度,后面是NAL数据 // 检查并跳过长度前缀 int spsOffset = 0; int ppsOffset = 0; int spsLength = sps.length; int ppsLength = pps.length; // 检查是否为AVCC格式(前4字节是长度,通常不会是0x00000001) if (sps.length > 4 && (sps[0] != 0x00 || sps[1] != 0x00 || sps[2] != 0x00 || sps[3] != 0x01)) { // AVCC格式:前4字节是长度(大端) spsOffset = 4; spsLength = sps.length - 4; } if (pps.length > 4 && (pps[0] != 0x00 || pps[1] != 0x00 || pps[2] != 0x00 || pps[3] != 0x01)) { // AVCC格式:前4字节是长度(大端) ppsOffset = 4; ppsLength = pps.length - 4; } // 写入SPS fileOutputStream.write(nalStartCode); fileOutputStream.write(sps, spsOffset, spsLength); // 写入PPS fileOutputStream.write(nalStartCode); fileOutputStream.write(pps, ppsOffset, ppsLength); fileOutputStream.flush(); spsPpsWritten = true; Log.d(TAG, "SPS/PPS written to file, SPS size: " + spsLength + ", PPS size: " + ppsLength); } else { Log.w(TAG, "SPS/PPS not found in CSD, will extract from first key frame"); } } catch (Exception e) { Log.e(TAG, "Write SPS/PPS to file error", e); } } /** * 开始编码 */ public void start() { if (isRunning.get()) { Log.w(TAG, "Encoder is already running"); return; } isRunning.set(true); // 启动编码线程 encodeThread = new Thread(new Runnable() { @Override public void run() { encodeLoop(); } }); encodeThread.start(); Log.d(TAG, "H264 encoder started"); } /** * 编码循环 */ private void encodeLoop() { // YUV420P格式: Y平面 + U平面 + V平面 // Y平面大小 = width * height // U平面大小 = width * height / 4 // V平面大小 = width * height / 4 // 总大小 = width * height * 3 / 2 byte[] yuvBuffer = new byte[width * height * 3 / 2]; MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); while (isRunning.get()) { try { // processCamera - 读取一帧 int processResult = usbCamera.processCamera(); if (processResult != 0) { Log.w(TAG, "processCamera returned: " + processResult); Thread.sleep(10); continue; } // 获取RGBA数据 (type=1 表示推流,输出YUV420P格式) usbCamera.rgba(1, yuvBuffer); // 将YUV420P数据送入编码器 long timestamp = System.currentTimeMillis(); encodeFrame(yuvBuffer, timestamp, bufferInfo); } catch (Exception e) { Log.e(TAG, "Encode loop error", e); try { Thread.sleep(100); } catch (InterruptedException ie) { break; } } } Log.d(TAG, "Encode loop exited"); } /** * 编码一帧数据 */ private void encodeFrame(byte[] yuvData, long timestamp, MediaCodec.BufferInfo bufferInfo) { try { // 获取输入缓冲区 int inputBufferIndex = encoder.dequeueInputBuffer(10000); if (inputBufferIndex >= 0) { ByteBuffer inputBuffer = encoder.getInputBuffer(inputBufferIndex); if (inputBuffer != null) { inputBuffer.clear(); inputBuffer.put(yuvData); encoder.queueInputBuffer(inputBufferIndex, 0, yuvData.length, timestamp * 1000, 0); } } // 获取输出数据 int outputBufferIndex = encoder.dequeueOutputBuffer(bufferInfo, 0); while (outputBufferIndex >= 0) { ByteBuffer outputBuffer = encoder.getOutputBuffer(outputBufferIndex); Log.i(TAG,"1111"); if (outputBuffer != null && bufferInfo.size > 0) { // 检查是否为关键帧 boolean isKeyFrame = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0; Log.i(TAG,"2222"); // 复制编码数据 byte[] encodedData = new byte[bufferInfo.size]; outputBuffer.position(bufferInfo.offset); outputBuffer.get(encodedData, 0, bufferInfo.size); // 写入文件 writeToFile(encodedData, isKeyFrame); // 发送编码数据 sendEncodedData(encodedData, timestamp, isKeyFrame); // 回调 if (callback != null) { callback.onFrameEncoded(encodedData, isKeyFrame); } } encoder.releaseOutputBuffer(outputBufferIndex, false); outputBufferIndex = encoder.dequeueOutputBuffer(bufferInfo, 0); } } catch (Exception e) { Log.e(TAG, "Encode frame error", e); } } /** * 将编码数据写入H264文件 */ private void writeToFile(byte[] data, boolean isKeyFrame) { if (!enableFileOutput || fileOutputStream == null) { return; } try { // 如果是第一个关键帧,确保SPS/PPS已写入 if (isKeyFrame && !spsPpsWritten) { writeSpsPpsToFile(); // 如果从CSD获取失败,尝试从关键帧数据中提取 // MediaCodec输出的关键帧通常已经包含SPS/PPS,但为了确保文件完整性, // 我们已经从CSD写入,这里直接写入关键帧数据即可 if (!spsPpsWritten) { Log.d(TAG, "SPS/PPS will be included in key frame data"); } } // MediaCodec输出的H264数据已经是Annex-B格式(包含0x00000001分隔符) // 直接写入文件即可 fileOutputStream.write(data); fileOutputStream.flush(); } catch (IOException e) { Log.e(TAG, "Write to file error", e); } } /** * 发送编码后的数据(按JT/T 1076-2016协议打包) */ private void sendEncodedData(byte[] data, long timestamp, boolean isKeyFrame) { try { // 计算时间间隔 long currentTime = System.currentTimeMillis(); long lastIFrameInterval = (lastIFrameTime > 0) ? (currentTime - lastIFrameTime) : 0; long lastFrameInterval = (lastFrameTime > 0) ? (currentTime - lastFrameTime) : 0; if (isKeyFrame) { lastIFrameTime = currentTime; } lastFrameTime = currentTime; // 判断帧类型 int dataType = isKeyFrame ? JT1076ProtocolHelper.DATA_TYPE_I_FRAME : JT1076ProtocolHelper.DATA_TYPE_P_FRAME; // 分包发送(如果数据超过最大包大小) int offset = 0; int totalPackets = (int) Math.ceil((double) data.length / JT1076ProtocolHelper.MAX_PACKET_SIZE); for (int i = 0; i < totalPackets; i++) { int packetDataSize = Math.min(JT1076ProtocolHelper.MAX_PACKET_SIZE, data.length - offset); byte[] packetData = Arrays.copyOfRange(data, offset, offset + packetDataSize); // 确定分包标记 int packetMark; if (totalPackets == 1) { packetMark = JT1076ProtocolHelper.PACKET_MARK_ATOMIC; } else if (i == 0) { packetMark = JT1076ProtocolHelper.PACKET_MARK_FIRST; } else if (i == totalPackets - 1) { packetMark = JT1076ProtocolHelper.PACKET_MARK_LAST; } else { packetMark = JT1076ProtocolHelper.PACKET_MARK_MIDDLE; } // 创建RTP包 byte[] rtpPacket = protocolHelper.createVideoRtpPacket( packetData, timestamp, dataType, packetMark, lastIFrameInterval, lastFrameInterval); // 发送UDP包 protocolHelper.sendUdpPacket(rtpPacket); offset += packetDataSize; } } catch (Exception e) { Log.e(TAG, "Send encoded data error", e); } } /** * 停止编码 */ public void stop() { if (!isRunning.get()) { return; } isRunning.set(false); // 等待编码线程结束 if (encodeThread != null) { try { encodeThread.join(2000); } catch (InterruptedException e) { Log.e(TAG, "Wait encode thread error", e); } } // 停止摄像头 if (usbCamera != null) { usbCamera.stopCamera(); } // 释放编码器 if (encoder != null) { try { encoder.stop(); encoder.release(); encoder = null; } catch (Exception e) { Log.e(TAG, "Release encoder error", e); } } // 关闭UDP Socket if (protocolHelper != null) { protocolHelper.closeUdpSocket(); } // 关闭文件输出 closeFileOutput(); Log.d(TAG, "H264 encoder stopped"); } /** * 关闭文件输出 */ private void closeFileOutput() { if (fileOutputStream != null) { try { fileOutputStream.flush(); fileOutputStream.close(); Log.d(TAG, "File output closed: " + outputFilePath); } catch (IOException e) { Log.e(TAG, "Close file output error", e); } finally { fileOutputStream = null; spsPpsWritten = false; } } } /** * 释放资源 */ public void release() { stop(); } byte[] ret = null; // YYYYYYYY UVUV(nv12)--> YYYYYYYY VUVU(nv21) private byte[] nv12ToNV21(byte[] nv12, int width, int height) { // Log.i(TAG,"nv12toNv21:"+width+"height:"+height); if (ret == null){ ret = new byte[width * height * 3 /2]; } int framesize = width * height; int i = 0, j = 0; // 拷贝Y分量 System.arraycopy(nv12, 0,ret , 0, framesize); // 拷贝UV分量 for (j = framesize; j < nv12.length; j += 2) { ret[j] = nv12[j+1]; ret[j+1] = nv12[j]; } return ret; } } app/src/main/java/com/anyun/h264/JT1076ProtocolHelper.java
New file @@ -0,0 +1,303 @@ package com.anyun.h264; import android.util.Log; import java.io.IOException; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; import java.nio.ByteBuffer; /** * JT/T 1076-2016 协议工具类 * 提供UDP发送、SIM卡号BCD转换、RTP包创建等公共功能 */ public class JT1076ProtocolHelper { private static final String TAG = "JT1076ProtocolHelper"; // JT/T 1076-2016 RTP协议常量 public static final byte[] FRAME_HEADER = {0x30, 0x31, 0x63, 0x64}; // 帧头标识 public static final int MAX_PACKET_SIZE = 950; // 数据体最大长度 // 视频数据类型 public static final int DATA_TYPE_I_FRAME = 0x00; // I帧 public static final int DATA_TYPE_P_FRAME = 0x10; // P帧 public static final int DATA_TYPE_B_FRAME = 0x20; // B帧 // 音频数据类型 public static final int DATA_TYPE_AUDIO = 0x30; // 音频帧 // 分包处理标记 public static final int PACKET_MARK_ATOMIC = 0x00; // 原子包 public static final int PACKET_MARK_FIRST = 0x01; // 首包 public static final int PACKET_MARK_LAST = 0x02; // 末包 public static final int PACKET_MARK_MIDDLE = 0x03; // 中间包 // RTP负载类型 public static final int RTP_PAYLOAD_TYPE_VIDEO = 96; // 视频负载类型 public static final int RTP_PAYLOAD_TYPE_AUDIO = 97; // 音频负载类型 // UDP参数 private String serverIp; private int serverPort; private DatagramSocket udpSocket; private InetAddress serverAddress; // RTP协议参数 private String simCardNumber = "123456789012"; // 12位SIM卡号 private byte logicalChannelNumber = 1; // 逻辑通道号 private short sequenceNumber = 0; // 包序号(自动递增) /** * 设置UDP服务器地址 */ public void setServerAddress(String ip, int port) { this.serverIp = ip; this.serverPort = port; } /** * 设置SIM卡号和逻辑通道号 */ public void setProtocolParams(String simCardNumber, byte logicalChannelNumber) { this.simCardNumber = simCardNumber; this.logicalChannelNumber = logicalChannelNumber; } /** * 初始化UDP Socket */ public boolean initializeUdpSocket() { try { if (serverIp == null || serverIp.isEmpty()) { Log.e(TAG, "Server IP not set"); return false; } udpSocket = new DatagramSocket(); serverAddress = InetAddress.getByName(serverIp); Log.d(TAG, "UDP socket initialized, target: " + serverIp + ":" + serverPort); return true; } catch (Exception e) { Log.e(TAG, "Initialize UDP socket failed", e); return false; } } /** * 关闭UDP Socket */ public void closeUdpSocket() { if (udpSocket != null) { try { udpSocket.close(); } catch (Exception e) { Log.e(TAG, "Close UDP socket error", e); } udpSocket = null; } serverAddress = null; } /** * 发送UDP包 */ public void sendUdpPacket(byte[] packet) { try { if (udpSocket != null && serverAddress != null) { DatagramPacket datagramPacket = new DatagramPacket( packet, packet.length, serverAddress, serverPort); udpSocket.send(datagramPacket); } else { Log.w(TAG, "UDP socket not initialized"); } } catch (Exception e) { Log.e(TAG, "Send UDP packet error", e); } } /** * 将SIM卡号转换为BCD格式(6字节) */ public byte[] convertSimToBCD(String simNumber) { byte[] bcd = new byte[6]; // 确保SIM卡号为12位数字,不足前面补0 StringBuilder sim = new StringBuilder(); if (simNumber != null) { // 只保留数字字符 String digits = simNumber.replaceAll("[^0-9]", ""); sim.append(digits); } // 补齐或截断到12位 while (sim.length() < 12) { sim.insert(0, '0'); } if (sim.length() > 12) { sim.setLength(12); } // 转换为BCD格式 String simStr = sim.toString(); for (int i = 0; i < 6; i++) { int high = Character.digit(simStr.charAt(i * 2), 10); int low = Character.digit(simStr.charAt(i * 2 + 1), 10); if (high < 0) high = 0; if (low < 0) low = 0; bcd[i] = (byte) ((high << 4) | low); } return bcd; } /** * 获取下一个包序号(自动递增) */ public short getNextSequenceNumber() { return sequenceNumber++; } /** * 重置包序号 */ public void resetSequenceNumber() { sequenceNumber = 0; } /** * 创建视频RTP包(JT/T 1076-2016协议格式) * @param dataBody 数据体 * @param timestamp 时间戳(毫秒) * @param dataType 数据类型(I帧/P帧/B帧) * @param packetMark 分包处理标记 * @param lastIFrameInterval 距上一个I帧的时间间隔(毫秒) * @param lastFrameInterval 距上一帧的时间间隔(毫秒) * @param payloadType RTP负载类型(默认96) * @return RTP包数据 */ public byte[] createVideoRtpPacket(byte[] dataBody, long timestamp, int dataType, int packetMark, long lastIFrameInterval, long lastFrameInterval, int payloadType) { // 计算包大小:帧头(4) + RTP头(2) + 序号(2) + SIM卡号(6) + 逻辑通道(1) + // 数据类型(1) + 时间戳(8) + Last I Frame Interval(2) + Last Frame Interval(2) + // 数据体长度(2) + 数据体 int packetSize = 4 + 2 + 2 + 6 + 1 + 1 + 8 + 2 + 2 + 2 + dataBody.length; ByteBuffer buffer = ByteBuffer.allocate(packetSize); buffer.order(java.nio.ByteOrder.BIG_ENDIAN); // 1. 帧头标识 (4 bytes) buffer.put(FRAME_HEADER); // 2. RTP头部 (2 bytes) // V=2 (2 bits), P=0 (1 bit), X=0 (1 bit), CC=1 (4 bits), M=0/1 (1 bit), PT (7 bits) byte rtpHeaderByte1 = (byte) 0x81; // V=2, P=0, X=0, CC=1 byte rtpHeaderByte2 = (byte) (payloadType & 0x7F); // M=0, PT buffer.put(rtpHeaderByte1); buffer.put(rtpHeaderByte2); // 3. 包序号 (2 bytes) buffer.putShort(getNextSequenceNumber()); // 4. SIM卡号 (6 bytes BCD格式) byte[] simBytes = convertSimToBCD(simCardNumber); buffer.put(simBytes, 0, 6); // 5. 逻辑通道号 (1 byte) buffer.put(logicalChannelNumber); // 6. 数据类型 + 分包处理标记 (1 byte) byte dataTypeAndMark = (byte) ((dataType & 0xF0) | (packetMark & 0x0F)); buffer.put(dataTypeAndMark); // 7. 时间戳 (8 bytes, 毫秒) buffer.putLong(timestamp); // 8. Last I Frame Interval (2 bytes, 毫秒) buffer.putShort((short) lastIFrameInterval); // 9. Last Frame Interval (2 bytes, 毫秒) buffer.putShort((short) lastFrameInterval); // 10. 数据体长度 (2 bytes) buffer.putShort((short) dataBody.length); // 11. 数据体 buffer.put(dataBody); return buffer.array(); } /** * 创建视频RTP包(使用默认视频负载类型96) */ public byte[] createVideoRtpPacket(byte[] dataBody, long timestamp, int dataType, int packetMark, long lastIFrameInterval, long lastFrameInterval) { return createVideoRtpPacket(dataBody, timestamp, dataType, packetMark, lastIFrameInterval, lastFrameInterval, RTP_PAYLOAD_TYPE_VIDEO); } /** * 创建音频RTP包(JT/T 1076-2016协议格式) * 注意:音频帧不包含Last I Frame Interval和Last Frame Interval字段 * @param dataBody 数据体 * @param timestamp 时间戳(毫秒) * @param dataType 数据类型(音频帧) * @param packetMark 分包处理标记 * @param payloadType RTP负载类型(默认97) * @return RTP包数据 */ public byte[] createAudioRtpPacket(byte[] dataBody, long timestamp, int dataType, int packetMark, int payloadType) { // 计算包大小(音频:不包含Last I Frame Interval和Last Frame Interval) // 帧头(4) + RTP头(2) + 序号(2) + SIM卡号(6) + 逻辑通道(1) + // 数据类型(1) + 时间戳(8) + 数据体长度(2) + 数据体 int packetSize = 4 + 2 + 2 + 6 + 1 + 1 + 8 + 2 + dataBody.length; ByteBuffer buffer = ByteBuffer.allocate(packetSize); buffer.order(java.nio.ByteOrder.BIG_ENDIAN); // 1. 帧头标识 (4 bytes) buffer.put(FRAME_HEADER); // 2. RTP头部 (2 bytes) // V=2 (2 bits), P=0 (1 bit), X=0 (1 bit), CC=1 (4 bits), M=0/1 (1 bit), PT (7 bits) byte rtpHeaderByte1 = (byte) 0x81; // V=2, P=0, X=0, CC=1 byte rtpHeaderByte2 = (byte) (payloadType & 0x7F); // M=0, PT buffer.put(rtpHeaderByte1); buffer.put(rtpHeaderByte2); // 3. 包序号 (2 bytes) buffer.putShort(getNextSequenceNumber()); // 4. SIM卡号 (6 bytes BCD格式) byte[] simBytes = convertSimToBCD(simCardNumber); buffer.put(simBytes, 0, 6); // 5. 逻辑通道号 (1 byte) buffer.put(logicalChannelNumber); // 6. 数据类型 + 分包处理标记 (1 byte) byte dataTypeAndMark = (byte) ((dataType & 0xF0) | (packetMark & 0x0F)); buffer.put(dataTypeAndMark); // 7. 时间戳 (8 bytes, 毫秒) buffer.putLong(timestamp); // 注意:音频帧不包含Last I Frame Interval和Last Frame Interval字段 // 8. 数据体长度 (2 bytes) buffer.putShort((short) dataBody.length); // 9. 数据体 buffer.put(dataBody); return buffer.array(); } /** * 创建音频RTP包(使用默认音频负载类型97) */ public byte[] createAudioRtpPacket(byte[] dataBody, long timestamp, int dataType, int packetMark) { return createAudioRtpPacket(dataBody, timestamp, dataType, packetMark, RTP_PAYLOAD_TYPE_AUDIO); } } app/src/main/java/com/anyun/h264/MainActivity.kt
@@ -1,47 +1,165 @@ package com.anyun.h264 import android.os.Bundle import android.util.Log import androidx.activity.ComponentActivity import androidx.activity.compose.setContent import androidx.activity.enableEdgeToEdge import androidx.compose.foundation.layout.fillMaxSize import androidx.compose.foundation.layout.padding import androidx.compose.material3.Scaffold import androidx.compose.material3.Text import androidx.compose.runtime.Composable import androidx.compose.foundation.layout.* import androidx.compose.material3.* import androidx.compose.runtime.* import androidx.compose.ui.Alignment import androidx.compose.ui.Modifier import androidx.compose.ui.tooling.preview.Preview import androidx.compose.ui.unit.dp import com.anyun.h264.ui.theme.MyApplicationTheme import java.io.File class MainActivity : ComponentActivity() { private var h264Encoder: H264Encoder? = null override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) enableEdgeToEdge() setContent { var isRunning by remember { mutableStateOf(false) } MyApplicationTheme { Scaffold(modifier = Modifier.fillMaxSize()) { innerPadding -> Greeting( name = "Android", modifier = Modifier.padding(innerPadding) MainScreen( modifier = Modifier.padding(innerPadding), isRunning = isRunning, onStartH264Click = { val success = startH264Encoder() if (success) { isRunning = true } }, onStopH264Click = { stopH264Encoder() isRunning = false } ) } } } } override fun onDestroy() { super.onDestroy() stopH264Encoder() } private fun startH264Encoder(): Boolean { if (h264Encoder != null) { Log.w("MainActivity", "H264Encoder is already running") return false } try { // 创建编码器 h264Encoder = H264Encoder() // 设置编码参数 h264Encoder?.setEncoderParams(640, 480, 25, 2000000) // 设置输出文件(可选) val outputFile = File(getExternalFilesDir(null), "test.h264") h264Encoder?.setOutputFile(outputFile.absolutePath) h264Encoder?.setEnableFileOutput(true) // 启用文件输出 // 设置UDP服务器地址(可选) h264Encoder?.setServerAddress("192.168.1.100", 8888) h264Encoder?.setProtocolParams("123456789012", 1) // 初始化并启动 val cameraIdRange = intArrayOf(1, 2) val resolution = intArrayOf(640, 480) if (h264Encoder?.initialize(cameraIdRange, null, resolution, false) == true) { h264Encoder?.start() Log.d("MainActivity", "H264Encoder started successfully") Log.d("MainActivity", "Output file: ${outputFile.absolutePath}") return true } else { Log.e("MainActivity", "Failed to initialize H264Encoder") h264Encoder = null return false } } catch (e: Exception) { Log.e("MainActivity", "Failed to start H264Encoder", e) h264Encoder = null return false } } private fun stopH264Encoder() { h264Encoder?.let { encoder -> try { encoder.stop() Log.d("MainActivity", "H264Encoder stopped") } catch (e: Exception) { Log.e("MainActivity", "Failed to stop H264Encoder", e) } h264Encoder = null } } } @Composable fun Greeting(name: String, modifier: Modifier = Modifier) { Text( text = "Hello $name!", fun MainScreen( modifier: Modifier = Modifier, isRunning: Boolean, onStartH264Click: () -> Unit, onStopH264Click: () -> Unit ) { Column( modifier = modifier ) } @Preview(showBackground = true) @Composable fun GreetingPreview() { MyApplicationTheme { Greeting("Android") .fillMaxSize() .padding(16.dp), horizontalAlignment = Alignment.CenterHorizontally, verticalArrangement = Arrangement.Center ) { Text( text = "H264 编码器", style = MaterialTheme.typography.headlineMedium ) Spacer(modifier = Modifier.height(32.dp)) Button( onClick = onStartH264Click, enabled = !isRunning, modifier = Modifier .fillMaxWidth() .height(56.dp) ) { Text("启动 H264") } Spacer(modifier = Modifier.height(16.dp)) Button( onClick = onStopH264Click, enabled = isRunning, modifier = Modifier .fillMaxWidth() .height(56.dp), colors = ButtonDefaults.buttonColors( containerColor = MaterialTheme.colorScheme.error ) ) { Text("停止 H264") } Spacer(modifier = Modifier.height(32.dp)) if (isRunning) { Text( text = "编码器运行中...", style = MaterialTheme.typography.bodyMedium, color = MaterialTheme.colorScheme.primary ) } } } gradle/wrapper/gradle-wrapper.properties
@@ -1,6 +1,6 @@ #Sat Nov 29 11:22:04 CST 2025 #Sat Nov 29 14:15:17 CST 2025 distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists distributionUrl=https\://services.gradle.org/distributions/gradle-8.10.2-bin.zip distributionUrl=file:///D:/config/gradle-8.10.2-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists settings.gradle
@@ -15,6 +15,7 @@ repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS) repositories { google() mavenCentral() } }