Android如何实现Unity3D下RTMP推送

发布时间:2021-06-21 10:31:02 作者:小新
来源:亿速云 阅读:193

小编给大家分享一下Android如何实现Unity3D下RTMP推送,相信大部分人都还不怎么了解,因此分享这篇文章给大家参考一下,希望大家阅读完这篇文章后大有收获,下面让我们一起去了解一下吧!

关于屏幕采集,有两种方案:

1. 直接封装Android原生的屏幕采集工程,在unity提供接口,拿到屏幕权限后,获取屏幕数据并推送;

2. 如果只需要拿到Unity的窗体或摄像机数据推出去,可在Unity下获取到需要推送的原始数据,然后封装原生的RTMP推流接口,调用原生SDK实现数据推送,这种做法的好处是,可以自定义需要采集的数据内容,只要按照原生SDK提供的接口,完成数据对接即可,具体实现参看本文。

本文以Android平台为例,介绍下Unity环境下的Android平台RTMP推流,数据采集在Unity完成,数据编码推送,调用大牛直播SDK(官方)Android平台RTMP直播推送SDK原生库对外二次封装的接口,高效率的实现RTMP推送。废话多说,先上图看效果。

下图系Android平台Unity环境下采集屏幕,编码推送到RTMP服务器,然后Windows平台播放器拉取RTMP流播放,为了方便看到延迟效果,特地在Android端的Unity窗口显示了当前时间,可以看到,整体延迟在毫秒级:

Android如何实现Unity3D下RTMP推送

数据采集推送

unity数据采集相对简单,可以很轻松的拿到RGB24的数据:

texture_ = new Texture2D(video_width_, video_height_, TextureFormat.RGB24, false);

texture_.ReadPixels(new Rect(0, 0, video_width_, video_height_), 0, 0, false);

texture_.Apply();

然后通过调用texture_.GetRawTextureData(); 获取到数据即可。

拿到数据后,调用原生SDK封装的NT_PB_U3D_OnCaptureVideoRGB24PtrData()接口,完成数据投递。

简单调用流程

private void Start()
    {
        game_object_ = this.gameObject.name;

        AndroidJavaClass android_class = new AndroidJavaClass("com.unity3d.player.UnityPlayer");
        java_obj_cur_activity_ = android_class.GetStatic<AndroidJavaObject>("currentActivity");
        pusher_obj_ = new AndroidJavaObject("com.daniulive.smartpublisher.SmartPublisherUnity3d");

        NT_PB_U3D_Init();

        //NT_U3D_SetSDKClientKey("", "", 0);

        btn_encode_mode_.onClick.AddListener(OnEncodeModeBtnClicked);

        btn_pusher_.onClick.AddListener(OnPusherBtnClicked);

        btn_mute_.onClick.AddListener(OnMuteBtnClicked);
    }

完成接口初始化后,调用Push()接口

public void Push()
    {
        if (is_running)
        {
            Debug.Log("已推送..");   
            return;
        }

        if (texture_ != null)
        {
            UnityEngine.Object.Destroy(texture_);
            texture_ = null;
        }

        video_width_ = Screen.width;
        video_height_ = Screen.height;

        scale_width_ = (video_width_ + 1) / 2;
        scale_height_ = (video_height_ + 1) / 2;

        if (scale_width_ % 2 != 0)
        {
            scale_width_ = scale_width_ + 1;
        }

        if (scale_height_ % 2 != 0)
        {
            scale_height_ = scale_height_ + 1;
        }

        texture_ = new Texture2D(video_width_, video_height_, TextureFormat.RGB24, false);

        //获取输入框的url
        string url = input_url_.text.Trim();

        if (!url.StartsWith("rtmp://"))
        {
            push_url_ = "rtmp://192.168.0.199:1935/hls/stream1";
        }
        else
        {
            push_url_ = url;
        }

        OpenPusher();

        if (pusher_handle_ == 0)
            return;

        NT_PB_U3D_Set_Game_Object(pusher_handle_, game_object_);

        /* ++ 推送前参数配置可加在此处 ++ */

        InitAndSetConfig();

        NT_PB_U3D_SetPushUrl(pusher_handle_, push_url_);
        /* -- 推送前参数配置可加在此处 -- */

        int flag = NT_PB_U3D_StartPublisher(pusher_handle_);

        if (flag  == DANIULIVE_RETURN_OK)
        {
            Debug.Log("推送成功..");
        }
        else
        {
            Debug.LogError("推送失败..");
        }

        is_running = true;
    }

调用OpenPusher()

private void OpenPusher()
    {
        if ( java_obj_cur_activity_ == null )
        {
            Debug.LogError("getApplicationContext is null");
            return;
        }

        int audio_opt = 1;
        int video_opt = 1;

        pusher_handle_ = NT_PB_U3D_Open(audio_opt, video_opt, video_width_, video_height_);

        if (pusher_handle_ != 0)
            Debug.Log("NT_PB_U3D_Open success");
        else
            Debug.LogError("NT_PB_U3D_Open fail");
    }

InitAndSetConfig()

private void InitAndSetConfig()
    {
        if (is_hw_encode_)
        {
            int h364HWKbps = setHardwareEncoderKbps(true, video_width_, video_height_);

            Debug.Log("h364HWKbps: " + h364HWKbps);

            int isSupportH264HWEncoder = NT_PB_U3D_SetVideoHWEncoder(pusher_handle_, h364HWKbps);

            if (isSupportH264HWEncoder == 0)
            {
                Debug.Log("Great, it supports h.264 hardware encoder!");
            }
        }
        else {
            if (is_sw_vbr_mode_) //H.264 software encoder
            {
                int is_enable_vbr = 1;
                int video_quality = CalVideoQuality(video_width_, video_height_, true);
                int vbr_max_bitrate = CalVbrMaxKBitRate(video_width_, video_height_);

                NT_PB_U3D_SetSwVBRMode(pusher_handle_, is_enable_vbr, video_quality, vbr_max_bitrate);
                //NT_PB_U3D_SetSWVideoEncoderSpeed(pusher_handle_, 2);
            }
        }

        NT_PB_U3D_SetAudioCodecType(pusher_handle_, 1);

        NT_PB_U3D_SetFPS(pusher_handle_, 25);

        NT_PB_U3D_SetGopInterval(pusher_handle_, 25*2);

        //NT_PB_U3D_SetSWVideoBitRate(pusher_handle_, 600, 1200);
    }

ClosePusher()

private void ClosePusher()
    {
        if (texture_ != null)
        {
            UnityEngine.Object.Destroy(texture_);
            texture_ = null;
        }

        int flag = NT_PB_U3D_StopPublisher(pusher_handle_);
        
        if (flag == DANIULIVE_RETURN_OK)
        {
            Debug.Log("停止成功..");
        }
        else
        {
            Debug.LogError("停止失败..");
        }

        flag = NT_PB_U3D_Close(pusher_handle_);

        if (flag == DANIULIVE_RETURN_OK)
        {
            Debug.Log("关闭成功..");
        }
        else
        {
            Debug.LogError("关闭失败..");
        }

        pusher_handle_ = 0;

        NT_PB_U3D_UnInit();

        is_running = false;
    }

为了便于测试,Update()刷新下当前时间:

private void Update()
    {
        //获取当前时间
        hour = DateTime.Now.Hour;
        minute = DateTime.Now.Minute;
        millisecond = DateTime.Now.Millisecond;
        second = DateTime.Now.Second;
        year = DateTime.Now.Year;
        month = DateTime.Now.Month;
        day = DateTime.Now.Day;

        GameObject.Find("Canvas/Panel/LableText").GetComponent<Text>().text = string.Format("{0:D2}:{1:D2}:{2:D2}:{3:D2} " + "{4:D4}/{5:D2}/{6:D2}", hour, minute, second, millisecond, year, month, day);
    }

相关Event处理

 public void onNTSmartEvent(string param)
    {
        if (!param.Contains(","))
        {
            Debug.Log("[onNTSmartEvent] android传递参数错误");
            return;
        }

       string[] strs = param.Split(',');

       string player_handle =strs[0];
       string code = strs[1];
       string param1 = strs[2];
       string param2 = strs[3];
       string param3 = strs[4];
       string param4 = strs[5];
        
       Debug.Log("[onNTSmartEvent] code: 0x" + Convert.ToString(Convert.ToInt32(code), 16));

        String publisher_event = "";

        switch (Convert.ToInt32(code))
        {
            case EVENTID.EVENT_DANIULIVE_ERC_PUBLISHER_STARTED:
                publisher_event = "开始..";
                break;
            case EVENTID.EVENT_DANIULIVE_ERC_PUBLISHER_CONNECTING:
                publisher_event = "连接中..";
                break;
            case EVENTID.EVENT_DANIULIVE_ERC_PUBLISHER_CONNECTION_FAILED:
                publisher_event = "连接失败..";
                break;
            case EVENTID.EVENT_DANIULIVE_ERC_PUBLISHER_CONNECTED:
                publisher_event = "连接成功..";
                break;
            case EVENTID.EVENT_DANIULIVE_ERC_PUBLISHER_DISCONNECTED:
                publisher_event = "连接断开..";
                break;
            case EVENTID.EVENT_DANIULIVE_ERC_PUBLISHER_STOP:
                publisher_event = "关闭..";
                break;
            case EVENTID.EVENT_DANIULIVE_ERC_PUBLISHER_RECORDER_START_NEW_FILE:
                publisher_event = "开始一个新的录像文件 : " + param3;
                break;
            case EVENTID.EVENT_DANIULIVE_ERC_PUBLISHER_ONE_RECORDER_FILE_FINISHED:
                publisher_event = "已生成一个录像文件 : " + param3;
                break;

            case EVENTID.EVENT_DANIULIVE_ERC_PUBLISHER_SEND_DELAY:
                publisher_event = "发送时延: " + param1 + " 帧数:" + param2;
                break;

            case EVENTID.EVENT_DANIULIVE_ERC_PUBLISHER_CAPTURE_IMAGE:
                publisher_event = "快照: " + param1 + " 路径:" + param3;

                if (Convert.ToInt32(param1) == 0)
                {
                    publisher_event = publisher_event + "截取快照成功..";
                }
                else
                {
                    publisher_event = publisher_event + "截取快照失败..";
                }
                break;
            case EVENTID.EVENT_DANIULIVE_ERC_PUBLISHER_RTSP_URL:
                publisher_event = "RTSP服务URL: " + param3;
                break;
            case EVENTID.EVENT_DANIULIVE_ERC_PUSH_RTSP_SERVER_RESPONSE_STATUS_CODE:
                publisher_event = "RTSP status code received, codeID: " + param1 + ", RTSP URL: " + param3;
                break;
            case EVENTID.EVENT_DANIULIVE_ERC_PUSH_RTSP_SERVER_NOT_SUPPORT:
                publisher_event = "服务器不支持RTSP推送, 推送的RTSP URL: " + param3;
                break;
        }

        Debug.Log(publisher_event);

    }

以上是“Android如何实现Unity3D下RTMP推送”这篇文章的所有内容,感谢各位的阅读!相信大家都有了一定的了解,希望分享的内容对大家有所帮助,如果还想学习更多知识,欢迎关注亿速云行业资讯频道!

推荐阅读:
  1. Unity3D在Android编辑模式下使用lightmap
  2. [unity3d]Unity3D与android交互----

免责声明:本站发布的内容(图片、视频和文字)以原创、转载和分享为主,文章观点不代表本网站立场,如果涉及侵权请联系站长邮箱:is@yisu.com进行举报,并提供相关证据,一经查实,将立刻删除涉嫌侵权内容。

android unity3d rtmp

上一篇:python的else在循环语句执行中的作用是什么

下一篇:Flutter如何生成图片保存至相册

相关阅读

您好,登录后才能下订单哦!

密码登录
登录注册
其他方式登录
点击 登录注册 即表示同意《亿速云用户服务条款》