[i=s] 本帖最后由 lijrgj 于 2025-4-11 22:19 编辑 [/i]
开发板
#include "mmf2_siso.h"
#include "module_video.h"
#include "mmf2_pro2_video_config.h"
#include "video_example_media_framework.h"
#include "sensor.h"
#include "video_boot.h"
///////////////
#include "FreeRTOS.h"
#include "task.h"
#include <stdio.h>
#include <sockets.h>
#include "avcodec.h"
#include "string.h"
#define V1_CHANNEL 0
#if USE_SENSOR == SENSOR_GC4653
#define V1_RESOLUTION VIDEO_2K
#define V1_FPS 15
#define V1_GOP 15
#else
#define V1_RESOLUTION VIDEO_VGA
#define V1_FPS 30
#define V1_GOP 30
#endif
#define V1_BPS 2*1024*1024
#define V1_RCMODE 2 // 1: CBR, 2: VBR
#define USE_H265 0
#if USE_H265
#include "sample_h265.h"
#define VIDEO_TYPE VIDEO_HEVC
#define VIDEO_CODEC AV_CODEC_ID_H265
#else
#include "sample_h264.h"
#define VIDEO_TYPE VIDEO_H264
#define VIDEO_CODEC AV_CODEC_ID_H264
#endif
#if V1_RESOLUTION == VIDEO_VGA
#define V1_WIDTH 640
#define V1_HEIGHT 480
#elif V1_RESOLUTION == VIDEO_HD
#define V1_WIDTH 1280
#define V1_HEIGHT 720
#elif V1_RESOLUTION == VIDEO_FHD
#define V1_WIDTH 1920
#define V1_HEIGHT 1080
#elif V1_RESOLUTION == VIDEO_2K
#define V1_WIDTH 2560
#define V1_HEIGHT 1440
#endif
//////////////
static mm_context_t *video_h264_ctx = NULL;
#define V1_BPS 2*1024*1024
#define max 8000
static int soc = -1;
static _mutex H264_mutex;
static _sema H264_sema;
//initparm
static video_params_t vp={
.stream_id = V1_CHANNEL,
.type = VIDEO_H264,
.resolution = VIDEO_VGA,
.width = V1_WIDTH,
.height = V1_HEIGHT,
.bps = V1_BPS,
.fps = 30,
.gop = 3,
.rc_mode = 1,
.direct_output = 1,
.meta_enable=0,
};
static xQueueHandle muxer_queue;
#define VIDEO_IDLE 0x00
#define VIDEO_START 0X01
#define VIDEO_WRITE 0X02
#define VIDEO_STOP 0x03
#define VIDEO_QUIT 0x04
static int video_status = 0;
static int video_record = VIDEO_IDLE;
#define AD_PAGE_SIZE 320 //20ms
struct sockaddr_in soc_in;
typedef struct {
uint32_t data_addr;
uint32_t type;
uint32_t timestamp;
uint32_t size;
uint32_t channel;
} mp4_muxer_ctx;
typedef struct h264_video{
uint32_t *data_addr;
uint32_t size;
uint32_t channel;
}h264;
int indexe=0;
#define max_bit_size 1024
static void video_meta_cb(void *meta){
video_meta_t *m=(video_meta_t*)meta;
printf("enc2out-----------------%d\r\n",m->user_buf_len);
}
void send_h264(h264 h){
// rtw_mutex_get(&H264_mutex);
if (soc>-1){
uint32_t *data_buf=h.data_addr;
uint32_t data_len=h.size;
if (data_len>0){
int send= sendto(soc, data_buf,data_len, 0, (struct sockaddr *) &soc_in, sizeof(soc_in));
printf("send_...........\r\n");
printf("h264_send--send=%d------\r\n",send);
vTaskDelay(2);
printf("soc----video_output_cb_ok---------%d\r\n",soc);
goto exit;
} else{
printf("soc----video_output_cb_data_len---------%d\r\n",data_len);
goto exit;
}
} else{
printf("soc----video_output_cb_error---------%d\r\n",soc);
goto exit;
}
////
exit:
printf("soc----st---------%d\r\n",soc);
}
static void video_output_cb(void *param1, void *param2, uint32_t arg)
{
rtw_mutex_get(&H264_mutex);
enc2out_t *enc2out = (enc2out_t *)param1;
// if((enc2out->codec & (CODEC_H264 | CODEC_HEVC | CODEC_JPEG)) != 0) {
if(enc2out->codec==CODEC_H264) {
uint32_t *en_ad=(uint32_t *)enc2out->enc_addr;
uint32_t en_len= enc2out->enc_len;
printf("en_ad-----------------==%d\r\n",(uint8_t )en_ad[en_len-1]);
h264 h;
h.data_addr=en_ad;
h.size= en_len;
h.channel=enc2out->ch;
send_h264(h);
dcache_invalidate_by_addr((uint32_t *)enc2out->enc_addr, enc2out->enc_len);
video_encbuf_release(enc2out->ch, enc2out->codec, enc2out->enc_len);
}else if ((enc2out->codec & (CODEC_NV12 | CODEC_RGB | CODEC_NV16)) != 0) {
dcache_invalidate_by_addr((uint32_t *)enc2out->enc_addr, enc2out->enc_len);
video_encbuf_release(enc2out->ch, enc2out->codec, enc2out->enc_len);
}
rtw_mutex_put(&H264_mutex);
rtw_up_sema(&H264_sema);
rtw_down_timeout_sema(&H264_sema, portMAX_DELAY);
}
int stat=1;
static TaskHandle_t pz=NULL;
int soc_stat=1;
void mmf_h264_video(void)
{
int iq_addr, sensor_addr, ret;
rtw_init_sema(&H264_sema, 0);
rtw_mutex_init(&H264_mutex);
//////////////////////////////////VIDEO INIT//////////////////////////////////////////
int voe_heap_size = video_voe_presetting(1, V1_WIDTH, V1_HEIGHT, V1_BPS, 0,
0, 0, 0, 0, 0,
0, 0, 0, 0, 0,
0, 0, 0);
printf("voe_heap_size---------%d\r\n",voe_heap_size);
voe_get_sensor_info(2, &iq_addr, &sensor_addr);
video_init(iq_addr, sensor_addr);
ret= video_open(&vp,video_output_cb,NULL);
if (ret>=0){
while (soc_stat){
printf("socket_init-----------\r\n");
soc = socket(AF_INET, SOCK_DGRAM, 0);//udp
if (soc < 0) {
printf("soc_init_error=%d\r\n", errno);
continue;
}
soc_stat=0;
printf("socket_init---ok\r\n");
printf("udp_start--------udp---------\r\n");
printf("soc==============%d\r\n", soc);
soc_in.sin_family = AF_INET;
soc_in.sin_port = htons(9091);
//soc_in.sin_addr.s_addr= inet_addr("192.168.158.9");
//soc_in.sin_addr.s_addr= inet_addr("192.168.211.9");
soc_in.sin_addr.s_addr= inet_addr("192.168.150.9");
}
}
}
static void example_deinit(void)
{
//Stop module
mm_module_ctrl(video_h264_ctx, CMD_VIDEO_STREAM_STOP,STREAM_V1 );
//Close module
video_h264_ctx = mm_module_close(video_h264_ctx);
//Video Deinit
video_deinit();
}
android端重要
需要判断帧类型 比如IPPP GOP就是4,也就是关键帧间隔4 P帧是无法单独解码需要依赖前面的关键帧,I帧可以单独解码是包含了完整图像
mediaCodec =MediaCodec.createDecoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
MediaFormat mediaFormat=MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC,640,480);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE,30);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE,2*1024*1024);
// mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE,0);
mediaFormat.setInteger(MediaCodec.PARAMETER_KEY_LOW_LATENCY,1);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL,3);
mediaCodec.configure(mediaFormat,sh.getSurface(),null,0);
mediaCodec.start();
new Thread(()->{
while (true){
byte[] bytes=null;
try {
Thread.sleep(5);
ByteBuffer inputBuffer=null;
if (h264_queue.size()>0){
System.out.println("000-----------------"+h264_queue.size());
long l = System.nanoTime();
System.out.println("开始解码");
bytes=h264_queue.poll();
int i = mediaCodec.dequeueInputBuffer(0);
if (i>=0){
inputBuffer = mediaCodec.getInputBuffer(i);
inputBuffer.clear();
inputBuffer.put(bytes,0,bytes.length);
mediaCodec.queueInputBuffer(i,0,bytes.length,0,0);
}
MediaCodec.BufferInfo bufferInfo=new MediaCodec.BufferInfo();
int i1=-1;
while ((i1=mediaCodec.dequeueOutputBuffer(bufferInfo, 0))>=0){
mediaCodec.releaseOutputBuffer(i1,0);
System.out.println("解码结束");
}
i1=mediaCodec.dequeueOutputBuffer(bufferInfo,0);
long l1 = System.nanoTime();
System.out.println("总用时---"+(l1-l)/1000000);
try {
// condition.await();
} catch (Exception e) {
e.printStackTrace();
}
//l.unlock();
}
} catch (Exception e) {
System.out.println(e.getMessage());
}
}
}).start();
} catch (IOException ioException) {
ioException.printStackTrace();
}
}
ByteArrayOutputStream byteArrayOutputStream = null;
// ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
ConcurrentLinkedQueue<byte[]>h264_queue=new ConcurrentLinkedQueue<>();
boolean h64_stat=false;
long l11=0;
long l12=0;
@RequiresApi(api = Build.VERSION_CODES.P)
void startUdpServer() throws IOException, InterruptedException {
datagramSocket = new DatagramSocket(3333);
Log.d("udp-----", "udp启动");
String data1 = "1111";
DatagramPacket datagramPacket2 = new DatagramPacket(data1.getBytes(StandardCharsets.UTF_8), data1.length(), InetAddress.getByAddress(serip), port);
System.out.println(datagramSocket + "------------");
datagramSocket.send(datagramPacket2);
byte[] bytes = new byte[100000];
DatagramPacket datagramPacket = new DatagramPacket(bytes, bytes.length);
while (aBoolean) {
datagramSocket.receive(datagramPacket);
byte[] data = datagramPacket.getData();
if (data != null && data.length != 0 && datagramPacket.getLength() != 0) {
/* if (datagramPacket.getLength()==5){
if (data[0]==115&&data[1]==116&&data[2]==97&&data[3]==114&&data[4]==116){
System.out.println("数据开始------"+datagramPacket.getLength());
h64_stat=false;
continue;
}
}
if (datagramPacket.getLength()==3){
if (data[0]==101&&data[1]==110&&data[2]==100){
System.out.println("接受完毕------"+datagramPacket.getLength());
fps++;
}
}else{*/
if (datagramPacket.getData()[4]==39&&!h264IP_isRsv_arr[0]){//这是I帧
l11 = System.nanoTime();
byteArrayOutputStream.write(data,0,datagramPacket.getLength());
h264IP_isRsv_arr[h264IP_isRsv_arr_index]=true;
h264IP_isRsv_arr_index++;
h64_stat=false;
System.out.println("iz="+byteArrayOutputStream.size());
Log.d("udp-----","进来了是I帧开头"+data[datagramPacket.getLength()-1]+"-----"+ h264IP_isRsv_arr_index+"数据长度---"+datagramPacket.getLength());
// hc++;
}
if (datagramPacket.getData()[4]==33&&h264IP_isRsv_arr[0]) {//P帧
if (h264IP_isRsv_arr[h264IP_isRsv_arr_index-1]&&h264IP_isRsv_arr_index<3){
byteArrayOutputStream.write(data,0,datagramPacket.getLength());
h264IP_isRsv_arr[h264IP_isRsv_arr_index]=true;
h264IP_isRsv_arr_index++;
System.out.println("pz="+byteArrayOutputStream.size());
Log.d("udp-----","进来了是P帧开头"+data[datagramPacket.getLength()-1]+"-----"+ h264IP_isRsv_arr_index+"数据长度---"+datagramPacket.getLength());
}if (h264IP_isRsv_arr_index>=3){
System.out.println( Arrays.toString(h264IP_isRsv_arr));
// l.lock();
System.out.println("准备解码");
System.out.println("zlen="+byteArrayOutputStream.size());
h264_queue.add(byteArrayOutputStream.toByteArray());
l12=System.nanoTime();
System.out.println("用时------------------"+(l12-l11)/1000000);
byteArrayOutputStream.reset();
h64_stat=true;
// condition.signal();
// l.unlock();
// Thread.sleep(10);
Arrays.fill(h264IP_isRsv_arr, false);
h264IP_isRsv_arr_index=0;
}
}
}
}
}