环境搭建
看我之间发的这里不多阐述直接上代码 注意三端交互确保线程安全
BW21代码
#include "mmf2_siso.h"
#include "module_video.h"
#include "mmf2_pro2_video_config.h"
#include "video_example_media_framework.h"
#include "sensor.h"
#include "video_boot.h"
#include "isp_ctrl_api.h"
///////////////
#include "FreeRTOS.h"
#include "task.h"
#include <stdio.h>
#include <sockets.h>
#include <audio_api.h>
//#include "avcodec.h"
#include "string.h"
#include "rtsp/rtsp_api.h"
//音频
//
#define V1_CHANNEL 0
#define V1_RESOLUTION VIDEO_FHD
#define V1_FPS 30
#define V1_GOP 15
#define V1_RCMODE 2 // 1: CBR, 2: VBR
#if V1_RESOLUTION == VIDEO_VGA
#define V1_WIDTH 640
#define V1_HEIGHT 480
#elif V1_RESOLUTION == VIDEO_HD
#define V1_WIDTH 1280
#define V1_HEIGHT 720
#elif V1_RESOLUTION == VIDEO_FHD
#define V1_WIDTH 1920
#define V1_HEIGHT 1080
#elif V1_RESOLUTION == VIDEO_2K
#define V1_WIDTH 2560
#define V1_HEIGHT 1440
#endif
#define V1_BPS 1*1024*1024
typedef long long chang;
//#define V640_480_BPS 1200000
//-DVIDEO_EXAMPLE=on //不用任何示例
//使用-DEXAMPLE=media_video_to_storage示例
//////////////
typedef struct h264_video{
uint32_t data_index;
uint32_t *data_addr;
uint32_t size;
uint32_t channel;
}h264;
void send_h264(h264 h );
static mm_context_t *video_h264_ctx = NULL;
#define max 8000
static int soc = -1;
static _mutex H264_mutex;
static _sema H264_sema;
static _mutex audio_mutex;
static _sema audio_sema;
static video_params_t vp={
.stream_id = V1_CHANNEL,
.type = VIDEO_HEVC,
.resolution = V1_RESOLUTION,
.width = V1_WIDTH,
.height = V1_HEIGHT,
.bps = V1_BPS,
.fps = 30,
.gop =4,
.rc_mode = 2,
};
typedef struct {
uint32_t data_addr;
uint32_t type;
uint32_t timestamp;
uint32_t size;
uint32_t channel;
} mp4_muxer_ctx;
int audioa=0;
struct sockaddr_in soc_in;
#define AD_PAGE_SIZE 320 //20ms
#define TX_AD_PAGE_SIZE AD_PAGE_SIZE
#define RX_AD_PAGE_SIZE AD_PAGE_SIZE
#define DMA_AD_PAGE_NUM 4
#define MUXER_QUEUE_DEPTH (20)
#define AUDIO_BUF_QUEUE_DEPTH (20)
//initaudio
static audio_t audio_obj;
static uint8_t ad_dma_txdata[TX_AD_PAGE_SIZE * DMA_AD_PAGE_NUM]__attribute__((aligned(0x20)));
static uint8_t ad_dma_rxdata[RX_AD_PAGE_SIZE * DMA_AD_PAGE_NUM]__attribute__((aligned(0x20)));
static xQueueHandle receve_queue;
static xQueueHandle h26x_queue;
static uint32_t audio_buf_temp[AUDIO_BUF_QUEUE_DEPTH];
static void audio_rx_irq(uint32_t arg, uint8_t *pbuf)
{
audio_t *obj = (audio_t *)arg;
BaseType_t xTaskWokenByReceive = pdFALSE;
BaseType_t xHigherPriorityTaskWoken;
// UBaseType_t count=uxQueueMessagesWaitingFromISR(receve_queue);
mp4_muxer_ctx ctx;
unsigned char *buf;
memcpy((void *)buf, (void *)pbuf, TX_AD_PAGE_SIZE);
ctx.data_addr = (uint32_t)buf;
ctx.channel = 1;
ctx.size = TX_AD_PAGE_SIZE;
ctx.type=22;
xQueueSendFromISR(receve_queue,&ctx,&xHigherPriorityTaskWoken);
// printf("count=%d\r\n",count);
audio_set_rx_page(obj); // submit a new page for receive
if (xHigherPriorityTaskWoken || xTaskWokenByReceive) {
taskYIELD();
}
}
static void setting_audio_amic(void)
{
uint32_t i;
printf("Start audio loop example: Use AMic\r\n");
//Audio Init
audio_init(&audio_obj, OUTPUT_SINGLE_EDNED, MIC_SINGLE_EDNED, AUDIO_CODEC_2p8V);
audio_set_param(&audio_obj, ASR_8KHZ, WL_16BIT);
audio_set_dma_buffer(&audio_obj, ad_dma_txdata, ad_dma_rxdata, AD_PAGE_SIZE, DMA_AD_PAGE_NUM);
//Init RX dma
audio_rx_irq_handler(&audio_obj, (audio_irq_handler)audio_rx_irq, (uint32_t *)&audio_obj);
/* Use (DMA page count -1) because occur RX interrupt in first */
for (i = 0; i < (DMA_AD_PAGE_NUM - 1); i++) {
audio_set_rx_page(&audio_obj);
}
audio_mic_analog_gain(&audio_obj, ENABLE, MIC_20DB);
audio_rx_start(&audio_obj);
}
extern uint8_t encodeA(short pcm_val);
void audio_thread(void*p){
mp4_muxer_ctx ctx;
unsigned char *audio_buf = malloc(AD_PAGE_SIZE / 2);
while (1){
BaseType_t ba= xQueueReceive(receve_queue,(void *)&ctx,20);
if (ba==pdTRUE){
if (audio_buf!=NULL){
short *input_buf = (short *)ctx.data_addr;
for (int i = 0; i < AD_PAGE_SIZE / sizeof(short); i++) { //For G711 encode
audio_buf[i] = encodeA(input_buf[i]);
}
int send_to= sendto(soc, audio_buf,TX_AD_PAGE_SIZE/2, 0, (struct sockaddr *) &soc_in, sizeof(soc_in));
printf("send--to audio---------%d\r\n",send_to);
}
} else{
printf("Queue receive timed out\r\n");
continue;
}
}
}
#define buf_size 1024;
typedef unsigned char byte;
byte data_bufer[1024];
static void recv_fun(void*test){
while (1){
int d=recvfrom(soc, data_bufer, 1024, 0, (struct sockaddr *)&soc_in, (socklen_t*)sizeof(soc_in));
if (d>0){
if (data_bufer[0]==12){
rtw_mutex_get(&H264_mutex);
video_ctrl(0,VIDEO_FORCE_IFRAME,1);
vTaskDelay(10);
printf("out_IFRAME-------------------------------------------------\r\n");
rtw_mutex_put(&H264_mutex);
rtw_up_sema(&H264_sema);
rtw_down_timeout_sema(&H264_sema, portMAX_DELAY);
}
}
}
}
void send_h264(h264 h ){
rtw_mutex_get(&H264_mutex);
uint32_t *data_buf=NULL;
uint32_t data_len=0;
if (soc>-1){
data_buf=h.data_addr;
data_len=h.size;
if (data_len>0){
int send_to= sendto(soc, data_buf,data_len, 0, (struct sockaddr *) &soc_in, sizeof(soc_in));
// printf("send--to---------%d\r\n",send_to);
goto exit;
}
} else{
printf("soc----video_output_cb_error---------%d\r\n",soc);
goto exit;
}
////
exit:
rtw_mutex_put(&H264_mutex);
rtw_up_sema(&H264_sema);
rtw_down_timeout_sema(&H264_sema, portMAX_DELAY);
}
void send_h264test(h264 h ){
rtw_mutex_get(&H264_mutex);
uint32_t *data_buf=NULL;
uint32_t data_len=0;
if (soc>-1){
data_buf=h.data_addr;
data_len=h.size;
if (data_len>0){
// int ex_time;
// isp_get_exposure_time(&ex_time);
// printf("ex_time---------%d\r\n",ex_time);
// uint32_t count_size=data_len+ sizeof(uint32_t);
uint32_t total_u32_count = (data_len + sizeof(uint32_t) - 1) / sizeof(uint32_t) + 1;
uint32_t *tem = (uint32_t *)malloc(total_u32_count * sizeof(uint32_t));
if (tem!=NULL){
memcpy(tem,data_buf,data_len);
tem[total_u32_count - 1] = 100;
int send_to= sendto(soc, tem,total_u32_count * sizeof(uint32_t), 0, (struct sockaddr *) &soc_in, sizeof(soc_in));
if (send_to>0){
// printf("send--ok---------%d\r\n",send_to);
} else{
printf("send--error---------%d\r\n",send_to);
video_ctrl(0,VIDEO_FORCE_IFRAME,1);
printf("out_IFRAME-------------------------------------------------\r\n");
}
free(tem);
tem=NULL;
} else{
printf("malloc...error\r\n");
}
goto exit;
}
} else{
printf("soc----video_output_cb_error---------%d\r\n",soc);
goto exit;
}
////
exit:
rtw_mutex_put(&H264_mutex);
rtw_up_sema(&H264_sema);
rtw_down_timeout_sema(&H264_sema, portMAX_DELAY);
}
static void video_output_cb(void *param1, void *param2, uint32_t arg)
{
enc2out_t *enc2out = (enc2out_t *)param1;
if((enc2out->codec & (CODEC_H264 | CODEC_HEVC | CODEC_JPEG)) != 0) {
dcache_invalidate_by_addr((uint32_t *)enc2out->enc_addr, enc2out->enc_len);
uint8_t *ptr = (uint8_t *)enc2out->enc_addr;
if (ptr[0] != 0 || ptr[1] != 0) {
printf("error--------ptr[0]==%d--ptr[1]==%d\r\n",ptr[0],ptr[1]);
video_encbuf_release(enc2out->ch, enc2out->codec, enc2out->enc_len);
video_ctrl(0,VIDEO_FORCE_IFRAME,1);
return;
}
uint32_t *en_ad=(uint32_t *)enc2out->enc_addr;
uint32_t en_len= enc2out->enc_len;
h264 h;
h.data_addr=en_ad;
h.size= en_len;
h.channel=enc2out->ch;
//BaseType_t xTaskWokenByReceive = pdFALSE;
// BaseType_t xHigherPriorityTaskWoken;
BaseType_t is_ok=pdFALSE;
if (xQueueSend(h26x_queue,&h,10)==pdTRUE){
// printf("ok------------------\r\n");
video_encbuf_release(enc2out->ch, enc2out->codec, enc2out->enc_len);
} else{
printf("error------------------\r\n");
video_encbuf_release(enc2out->ch, enc2out->codec, enc2out->enc_len);
}
// if (xQueueSendFromISR(h26x_queue,&h,&xHigherPriorityTaskWoken)!=pdTRUE){
// video_encbuf_release(enc2out->ch, enc2out->codec, enc2out->enc_len);
// }
//
// if (xHigherPriorityTaskWoken) {
// taskYIELD();
// }
//send_h264test(h);
}else if ((enc2out->codec & (CODEC_NV12 | CODEC_RGB | CODEC_NV16)) != 0) {
dcache_invalidate_by_addr((uint32_t *)enc2out->enc_addr, enc2out->enc_len);
video_encbuf_release(enc2out->ch, enc2out->codec, enc2out->enc_len);
}
}
void h26x_thread(void *d){
h264 h264_t;
while (1){
BaseType_t ba= xQueueReceive(h26x_queue,&h264_t,10);
if (ba==pdTRUE){
send_h264test(h264_t);
}
}
}
static TaskHandle_t pz=NULL;
static TaskHandle_t au=NULL;
static TaskHandle_t h26x=NULL;
int soc_stat=1;
void mmf_h264_video(void)
{
receve_queue = xQueueCreate(AUDIO_BUF_QUEUE_DEPTH, sizeof(unsigned char *));
h26x_queue = xQueueCreate(20, sizeof(h264));
if (h26x_queue!=NULL){
printf("h26x_queue_create----ok\r\n");
}
if (receve_queue!=NULL){
printf("receve_create----ok\r\n");
}
setting_audio_amic();
int iq_addr, sensor_addr, ret;
rtw_init_sema(&H264_sema, 0);
rtw_mutex_init(&H264_mutex);
rtw_init_sema(&audio_sema, 0);
rtw_mutex_init(&audio_mutex);
//////////////////////////////////VIDEO INIT//////////////////////////////////////////
int voe_heap_size = video_voe_presetting(1, V1_WIDTH, V1_HEIGHT, V1_BPS, 0,
0, 0, 0, 0, 0,
0, 0, 0, 0, 0,
0, 0, 0);
printf("voe_heap_size---------%d\r\n",voe_heap_size);
voe_get_sensor_info(1, &iq_addr, &sensor_addr);
video_init(iq_addr, sensor_addr);
int t2=isp_set_exposure_time(10000);
printf("t3---------%d\r\n",t2);
vTaskDelay(1000);
ret= video_open(&vp,video_output_cb,NULL);
if (ret>=0){
while (soc_stat){
printf("socket_init-----------\r\n");
soc = socket(AF_INET, SOCK_DGRAM, 0);//udp
if (soc < 0) {
printf("soc_init_error=%d\r\n", errno);
continue;
}
soc_stat=0;
printf("socket_init---ok\r\n");
printf("udp_start--------udp---------\r\n");
printf("soc==============%d\r\n", soc);
soc_in.sin_family = AF_INET;
soc_in.sin_port = htons(9091);
//soc_in.sin_addr.s_addr= inet_addr("192.168.158.9");写自己的也可以写远程服务器的
uint8_t s[]={99};
int in= sendto(soc, s, 1, 0, (struct sockaddr *) &soc_in, sizeof(soc_in));
printf("in===----------------------%d\r\n",in);
audioa=1;
xTaskCreate(recv_fun,"recv_fun",1024,NULL,tskIDLE_PRIORITY + 1,&pz);
xTaskCreate(audio_thread,"audio_thread",1024,NULL,tskIDLE_PRIORITY + 1,&au);
xTaskCreate(h26x_thread,"video_thread",1024,NULL,tskIDLE_PRIORITY + 1,&h26x);
}
}
}
补一句连接wifi三个函数
extern void fATW0(void *arg);
extern void fATW1(void *arg);
extern void fATWC(void *arg);
调用顺序
fATW0(wifi_name);
fATW1(wifi_pass);
fATWC("ATWC");
Androd
这里只发解码音视频
MediaCodec audio_g711;
private void g711_doc() throws IOException {
audio_g711=MediaCodec.createDecoderByType(MediaFormat.MIMETYPE_AUDIO_G711_ALAW);
MediaFormat mediaFormat=MediaFormat.createAudioFormat(MediaFormat.MIMETYPE_AUDIO_G711_ALAW,8000,2);
audio_g711.configure(mediaFormat, null, null,0);
audio_g711.start();
int minBufferSize = AudioTrack.getMinBufferSize(
8000, // 采样率
AudioFormat.CHANNEL_OUT_MONO, // 声道
AudioFormat.ENCODING_PCM_16BIT // 编码格式
);
AudioTrack audioTrack = new AudioTrack(
AudioManager.STREAM_MUSIC,
8000,
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT,
minBufferSize,
AudioTrack.MODE_STREAM
);
new Thread(()->{
while (Decoder_stat){
if (audio_g711s_queue.size()>0){
Audio_G711 poll = audio_g711s_queue.poll();
if (poll!=null){
int ind= audio_g711.dequeueInputBuffer(5000);
if (ind>=0){
ByteBuffer inputBuffer = audio_g711.getInputBuffer(ind);
inputBuffer.clear();
inputBuffer.put(poll.getData(),0,poll.getLen());
audio_g711.queueInputBuffer(ind,0,poll.getLen(),0,0);
MediaCodec.BufferInfo bufferInfo=new MediaCodec.BufferInfo();
int out_ind = audio_g711.dequeueOutputBuffer(bufferInfo, 5000);
while (out_ind>=0){
ByteBuffer outputBuffer = audio_g711.getOutputBuffer(out_ind);
byte[] decodedData = new byte[bufferInfo.size];
outputBuffer.get(decodedData);
audioTrack.write(decodedData, 0, decodedData.length);
audioTrack.play();
audio_g711.releaseOutputBuffer(out_ind,false);
out_ind = audio_g711.dequeueOutputBuffer(bufferInfo, 5000);
}
}
}
}
}
}).start();
}
MediaFormat mediaFormat;
boolean Decoder_stat=false;
boolean decoder_handler=true;
@RequiresApi(api = Build.VERSION_CODES.R)
private void h265_Decoder_Thread() throws IOException {
mediaCodec =MediaCodec.createDecoderByType(MediaFormat.MIMETYPE_VIDEO_HEVC);
MediaCodecInfo codecInfo = mediaCodec.getCodecInfo();
Log.d(tag, "h264_Decoder_Thread() 使用解码器 "+codecInfo.getName());
mediaFormat=MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_HEVC,Video_F.fhd.getW(), Video_F.fhd.getH());
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE,30);
mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE,2*1024*1024);
// mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE,4*1024*1024);
mediaFormat.setInteger(MediaCodec.PARAMETER_KEY_LOW_LATENCY,1);
mediaCodec.configure(mediaFormat,sh.getSurface(),null,0);
Log.d(tag, "h264_Decoder_Thread() mediaCodec.configure(mediaFormat,sh.getSurface(),null,0)");
mediaCodec.start();
Log.d(tag, "h264_Decoder_Thread() mediaCodec.start()");
Decoder_stat=true;
new Thread(()->{
while (Decoder_stat){
Data_Video d_v=null;
try {
if (h265_queue.size()>0){
System.out.println("-------quw=="+h265_queue.size());
d_v=h265_queue.poll();
if (d_v!=null){
// System.out.println("-----------------=="+d_v.getData()[4]);
decoder_handler=true;
int i=-1;
i = mediaCodec.dequeueInputBuffer(5000);
if (i>=0){
ByteBuffer inputBuffer = mediaCodec.getInputBuffer(i);
inputBuffer.clear();
inputBuffer.put(d_v.getData(),0, d_v.getLen());
mediaCodec.queueInputBuffer(i,0, d_v.getLen(),0,0);
MediaCodec.BufferInfo bufferInfo=new MediaCodec.BufferInfo();
int i1=mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
while (i1>=0){
if ((bufferInfo.flags&MediaCodec.BUFFER_FLAG_END_OF_STREAM)!=0){
decoder_handler=false;
break;
}
mediaCodec.releaseOutputBuffer(i1,0);
i1=mediaCodec.dequeueOutputBuffer(bufferInfo,0);
}
}else {
// thread_lock.lock();
// h265_queue.clear();
// h265_I_stat=false;
// mediaCodec.flush();
// mediaCodec.stop();
// mediaCodec.configure(mediaFormat,sh.getSurface(),null,0);
// mediaCodec.start();
// thread_lock.unlock();
//Thread.sleep(50);
continue;
}
decoder_handler=false;
}
}
decoder_handler=false;
} catch (MediaCodec.CodecException e) {
if (e.isRecoverable()){
thread_lock.lock();
//mediaCodec.flush();
mediaCodec.stop();
mediaCodec.configure(mediaFormat,sh.getSurface(),null,0);
mediaCodec.start();
h265_I_stat=false;
h265_queue.clear();
byteArrayOutputStream.reset();
thread_lock.unlock();
System.out.println("----------------------------------------------------");
}
if (e.isTransient()){
System.out.println("h264_Decoder_Thread----------e.isTransient()----------"+e.getMessage());
}
// System.out.println("h264_Decoder_Thread----------e.getMessage()----------"+e.getMessage());
// System.out.println("h264_Decoder_Thread----e.getErrorCode()----------------"+e.getErrorCode());
// System.out.println("h264_Decoder_Thread----------e.getDiagnosticInfo()----------"+e.getDiagnosticInfo());
e.printStackTrace();
}catch (IllegalStateException is){
Log.d(tag, "h264_Decoder_Thread() IllegalStateException "+is.getLocalizedMessage());
is.printStackTrace();
}
}
}).start();
}
注意一定要先解码关键帧不然无法解码或者花屏
JAVA_SERVER
@Override
public void startUdpServer_h26x(int port) throws Exception {
System.out.println("udp_start..........");
h26x_udp_server= new DatagramSocket(port);
while (true){
byte[] data =new byte[data_len];
h26x_receive_pack=new DatagramPacket(data,data_len);
h26x_udp_server.receive(h26x_receive_pack);
byte[] data_pack = h26x_receive_pack.getData();
int length = h26x_receive_pack.getLength();
// System.out.println(data_pack[length-4]+"=="+data_pack[length-3]+"=="+data_pack[length-2]+"=="+data_pack[length-1]);
if (data_pack != null && data_pack.length != 0 && length != 0) {
System.out.println("h26x_quer_size===="+h26x_video_que.size());
if (udpClientMap.size()>0&&length==160){
// 创建音频输入流
ByteArrayInputStream bais = new ByteArrayInputStream(data_pack,0,length);
AudioInputStream audioInputStream = new AudioInputStream(bais, audioFormat, length / audioFormat.getFrameSize());
// 检查音频格式是否符合 G.711 编码的特征
if (audioFormat.getSampleRate() == 8000.0f && audioFormat.getSampleSizeInBits() == 8 && audioFormat.getChannels() == 1) {
// System.out.println("音频数据符合 G.711 编码的特征");
Audio_G711 audioG711 = new Audio_G711();
audioG711.setData(data_pack);
audioG711.setLen(length);
audioG711.setServer_udp(h26x_udp_server);
audioG711.setDatagramPacket(h26x_receive_pack);
audioG711s.add(audioG711);
// System.out.println(audioG711s.size());
} else {
//System.out.println("音频数据不符合 G.711 编码的特征");
}
}else {
if (audioG711s.size()>0){
audioG711s.clear();
}
}
if ((data_pack[0]==0&&data_pack[1]==0&&data_pack[2]==0&&data_pack[3]==1&&data_pack[4]==h265_I)
||(data_pack[0]==0&&data_pack[1]==0&&data_pack[2]==0&&data_pack[3]==1&&data_pack[4]==h265_P)
||(data_pack[0]==0&&data_pack[1]==0&&data_pack[2]==0&&data_pack[3]==1&&data_pack[4]==h264_I)
||(data_pack[0]==0&&data_pack[1]==0&&data_pack[2]==0&&data_pack[3]==1&&data_pack[4]==h264_P)
){
if (udpClientMap.size()>0){
if (data_pack[length-4]==100){
H26x_Video_Mode h26xVideoMode = new H26x_Video_Mode(data_pack, length);
h26xVideoMode.setServer_udp(h26x_udp_server);
h26xVideoMode.setDatagramPacket(h26x_receive_pack);
h26x_video_que.add(h26xVideoMode);
}
}else {
if (h26x_video_que.size()>0){
h26x_video_que.clear();
}
}
}
else {
if (length==160){
continue;
}
OtherData otherData = new OtherData();
otherData.setDatagramPacket(h26x_receive_pack);
otherData.setData(data_pack);
otherData.setLen(length);
otherData.setServer_udp(this.h26x_udp_server);
otherDataMap.add(otherData);
}
}
}
注意由于用了三个线程处理 注意线程安全 每个线程处理一个队列