Android

关注公众号 jb51net

关闭
首页 > 软件编程 > Android > Android实现Unity3D下RTMP推送

Android实现Unity3D下RTMP推送的示例

作者:音视频牛哥

像Unity3D下的RTMP或RTSP播放器一样,好多开发者苦于在Unity环境下,如何高效率低延迟的把数据采集并编码实时推送到流媒体服务器,实现Unity场景下的低延迟推拉流方案。本文介绍几种RTMP推送的方案

关于屏幕采集,有两种方案:

1. 直接封装Android原生的屏幕采集工程,在unity提供接口,拿到屏幕权限后,获取屏幕数据并推送;

2. 如果只需要拿到Unity的窗体或摄像机数据推出去,可在Unity下获取到需要推送的原始数据,然后封装原生的RTMP推流接口,调用原生SDK实现数据推送,这种做法的好处是,可以自定义需要采集的数据内容,只要按照原生SDK提供的接口,完成数据对接即可,具体实现参看本文。

本文以Android平台为例,介绍下Unity环境下的Android平台RTMP推流,数据采集在Unity完成,数据编码推送,调用大牛直播SDK(官方)Android平台RTMP直播推送SDK原生库对外二次封装的接口,高效率的实现RTMP推送。废话多说,先上图看效果。

下图系Android平台Unity环境下采集屏幕,编码推送到RTMP服务器,然后Windows平台播放器拉取RTMP流播放,为了方便看到延迟效果,特地在Android端的Unity窗口显示了当前时间,可以看到,整体延迟在毫秒级:

数据采集推送

unity数据采集相对简单,可以很轻松的拿到RGB24的数据:

texture_ = new Texture2D(video_width_, video_height_, TextureFormat.RGB24, false);

texture_.ReadPixels(new Rect(0, 0, video_width_, video_height_), 0, 0, false);

texture_.Apply();
  

然后通过调用texture_.GetRawTextureData(); 获取到数据即可。

拿到数据后,调用原生SDK封装的NT_PB_U3D_OnCaptureVideoRGB24PtrData()接口,完成数据投递。

简单调用流程

    private void Start()
    {
        game_object_ = this.gameObject.name;

        AndroidJavaClass android_class = new AndroidJavaClass("com.unity3d.player.UnityPlayer");
        java_obj_cur_activity_ = android_class.GetStatic<AndroidJavaObject>("currentActivity");
        pusher_obj_ = new AndroidJavaObject("com.daniulive.smartpublisher.SmartPublisherUnity3d");

        NT_PB_U3D_Init();

        //NT_U3D_SetSDKClientKey("", "", 0);

        btn_encode_mode_.onClick.AddListener(OnEncodeModeBtnClicked);

        btn_pusher_.onClick.AddListener(OnPusherBtnClicked);

        btn_mute_.onClick.AddListener(OnMuteBtnClicked);
    }

完成接口初始化后,调用Push()接口

    public void Push()
    {
        if (is_running)
        {
            Debug.Log("已推送..");   
            return;
        }

        if (texture_ != null)
        {
            UnityEngine.Object.Destroy(texture_);
            texture_ = null;
        }

        video_width_ = Screen.width;
        video_height_ = Screen.height;

        scale_width_ = (video_width_ + 1) / 2;
        scale_height_ = (video_height_ + 1) / 2;

        if (scale_width_ % 2 != 0)
        {
            scale_width_ = scale_width_ + 1;
        }

        if (scale_height_ % 2 != 0)
        {
            scale_height_ = scale_height_ + 1;
        }

        texture_ = new Texture2D(video_width_, video_height_, TextureFormat.RGB24, false);

        //获取输入框的url
        string url = input_url_.text.Trim();

        if (!url.StartsWith("rtmp://"))
        {
            push_url_ = "rtmp://192.168.0.199:1935/hls/stream1";
        }
        else
        {
            push_url_ = url;
        }

        OpenPusher();

        if (pusher_handle_ == 0)
            return;

        NT_PB_U3D_Set_Game_Object(pusher_handle_, game_object_);

        /* ++ 推送前参数配置可加在此处 ++ */

        InitAndSetConfig();

        NT_PB_U3D_SetPushUrl(pusher_handle_, push_url_);
        /* -- 推送前参数配置可加在此处 -- */

        int flag = NT_PB_U3D_StartPublisher(pusher_handle_);

        if (flag  == DANIULIVE_RETURN_OK)
        {
            Debug.Log("推送成功..");
        }
        else
        {
            Debug.LogError("推送失败..");
        }

        is_running = true;
    }

调用OpenPusher()

    private void OpenPusher()
    {
        if ( java_obj_cur_activity_ == null )
        {
            Debug.LogError("getApplicationContext is null");
            return;
        }

        int audio_opt = 1;
        int video_opt = 1;

        pusher_handle_ = NT_PB_U3D_Open(audio_opt, video_opt, video_width_, video_height_);

        if (pusher_handle_ != 0)
            Debug.Log("NT_PB_U3D_Open success");
        else
            Debug.LogError("NT_PB_U3D_Open fail");
    }

InitAndSetConfig()

    private void InitAndSetConfig()
    {
        if (is_hw_encode_)
        {
            int h264HWKbps = setHardwareEncoderKbps(true, video_width_, video_height_);

            Debug.Log("h264HWKbps: " + h264HWKbps);

            int isSupportH264HWEncoder = NT_PB_U3D_SetVideoHWEncoder(pusher_handle_, h264HWKbps);

            if (isSupportH264HWEncoder == 0)
            {
                Debug.Log("Great, it supports h.264 hardware encoder!");
            }
        }
        else {
            if (is_sw_vbr_mode_) //H.264 software encoder
            {
                int is_enable_vbr = 1;
                int video_quality = CalVideoQuality(video_width_, video_height_, true);
                int vbr_max_bitrate = CalVbrMaxKBitRate(video_width_, video_height_);

                NT_PB_U3D_SetSwVBRMode(pusher_handle_, is_enable_vbr, video_quality, vbr_max_bitrate);
                //NT_PB_U3D_SetSWVideoEncoderSpeed(pusher_handle_, 2);
            }
        }

        NT_PB_U3D_SetAudioCodecType(pusher_handle_, 1);

        NT_PB_U3D_SetFPS(pusher_handle_, 25);

        NT_PB_U3D_SetGopInterval(pusher_handle_, 25*2);

        //NT_PB_U3D_SetSWVideoBitRate(pusher_handle_, 600, 1200);
    }

ClosePusher()

    private void ClosePusher()
    {
        if (texture_ != null)
        {
            UnityEngine.Object.Destroy(texture_);
            texture_ = null;
        }

        int flag = NT_PB_U3D_StopPublisher(pusher_handle_);
        
        if (flag == DANIULIVE_RETURN_OK)
        {
            Debug.Log("停止成功..");
        }
        else
        {
            Debug.LogError("停止失败..");
        }

        flag = NT_PB_U3D_Close(pusher_handle_);

        if (flag == DANIULIVE_RETURN_OK)
        {
            Debug.Log("关闭成功..");
        }
        else
        {
            Debug.LogError("关闭失败..");
        }

        pusher_handle_ = 0;

        NT_PB_U3D_UnInit();

        is_running = false;
    }

为了便于测试,Update()刷新下当前时间:

    private void Update()
    {
        //获取当前时间
        hour = DateTime.Now.Hour;
        minute = DateTime.Now.Minute;
        millisecond = DateTime.Now.Millisecond;
        second = DateTime.Now.Second;
        year = DateTime.Now.Year;
        month = DateTime.Now.Month;
        day = DateTime.Now.Day;

        GameObject.Find("Canvas/Panel/LableText").GetComponent<Text>().text = string.Format("{0:D2}:{1:D2}:{2:D2}:{3:D2} " + "{4:D4}/{5:D2}/{6:D2}", hour, minute, second, millisecond, year, month, day);
    }

相关Event处理

 public void onNTSmartEvent(string param)
    {
        if (!param.Contains(","))
        {
            Debug.Log("[onNTSmartEvent] android传递参数错误");
            return;
        }

       string[] strs = param.Split(',');

       string player_handle =strs[0];
       string code = strs[1];
       string param1 = strs[2];
       string param2 = strs[3];
       string param3 = strs[4];
       string param4 = strs[5];
        
       Debug.Log("[onNTSmartEvent] code: 0x" + Convert.ToString(Convert.ToInt32(code), 16));

        String publisher_event = "";

        switch (Convert.ToInt32(code))
        {
            case EVENTID.EVENT_DANIULIVE_ERC_PUBLISHER_STARTED:
                publisher_event = "开始..";
                break;
            case EVENTID.EVENT_DANIULIVE_ERC_PUBLISHER_CONNECTING:
                publisher_event = "连接中..";
                break;
            case EVENTID.EVENT_DANIULIVE_ERC_PUBLISHER_CONNECTION_FAILED:
                publisher_event = "连接失败..";
                break;
            case EVENTID.EVENT_DANIULIVE_ERC_PUBLISHER_CONNECTED:
                publisher_event = "连接成功..";
                break;
            case EVENTID.EVENT_DANIULIVE_ERC_PUBLISHER_DISCONNECTED:
                publisher_event = "连接断开..";
                break;
            case EVENTID.EVENT_DANIULIVE_ERC_PUBLISHER_STOP:
                publisher_event = "关闭..";
                break;
            case EVENTID.EVENT_DANIULIVE_ERC_PUBLISHER_RECORDER_START_NEW_FILE:
                publisher_event = "开始一个新的录像文件 : " + param3;
                break;
            case EVENTID.EVENT_DANIULIVE_ERC_PUBLISHER_ONE_RECORDER_FILE_FINISHED:
                publisher_event = "已生成一个录像文件 : " + param3;
                break;

            case EVENTID.EVENT_DANIULIVE_ERC_PUBLISHER_SEND_DELAY:
                publisher_event = "发送时延: " + param1 + " 帧数:" + param2;
                break;

            case EVENTID.EVENT_DANIULIVE_ERC_PUBLISHER_CAPTURE_IMAGE:
                publisher_event = "快照: " + param1 + " 路径:" + param3;

                if (Convert.ToInt32(param1) == 0)
                {
                    publisher_event = publisher_event + "截取快照成功..";
                }
                else
                {
                    publisher_event = publisher_event + "截取快照失败..";
                }
                break;
            case EVENTID.EVENT_DANIULIVE_ERC_PUBLISHER_RTSP_URL:
                publisher_event = "RTSP服务URL: " + param3;
                break;
            case EVENTID.EVENT_DANIULIVE_ERC_PUSH_RTSP_SERVER_RESPONSE_STATUS_CODE:
                publisher_event = "RTSP status code received, codeID: " + param1 + ", RTSP URL: " + param3;
                break;
            case EVENTID.EVENT_DANIULIVE_ERC_PUSH_RTSP_SERVER_NOT_SUPPORT:
                publisher_event = "服务器不支持RTSP推送, 推送的RTSP URL: " + param3;
                break;
        }

        Debug.Log(publisher_event);

    }

总结

通过以上流程,可以实现Unity环境下屏幕或摄像机数据,毫秒级体验的RTMP推送和播放,感兴趣的开发者可酌情参考。

以上就是Android实现Unity3D下RTMP推送的示例的详细内容,更多关于Android实现Unity3D下RTMP推送的资料请关注脚本之家其它相关文章!

您可能感兴趣的文章:
阅读全文