1. Download our Official Android App: Forums for Android!

Apps how to integrate a video streamer module with a video recorder module??

Discussion in 'Android Development' started by vineet_davy, Jul 26, 2011.

  1. vineet_davy

    vineet_davy Lurker
    Thread Starter
    Rank:
    None
    Points:
    25
    Posts:
    3
    Joined:
    Jul 26, 2011

    Jul 26, 2011
    3
    0
    25
    hi everyone, since i am new here(n in developing ) , i would love to have help from you......
    my first project is Implementation of an Android Phone Based Video Streamer......... i am able to record the video n save it in sdcard(through programming) but i m not able to stream it over a wifi network....... i have the code for streamer but i don't know how to integrate it with the video recorder module....need some urgent help......
    any help would be greatly appreciated..........
    thanks
     

    Advertisement

  2. vineet_davy

    vineet_davy Lurker
    Thread Starter
    Rank:
    None
    Points:
    25
    Posts:
    3
    Joined:
    Jul 26, 2011

    Jul 26, 2011
    3
    0
    25
    MY VIDEO RECORDER CODE IS :

    package ob.android;

    import java.io.IOException;

    import jnix.Pipe;

    import ob.android.model.Model;
    import ob.android.model.ModelUtility;
    import ob.android.view.CameraPreview;
    import ob.android.view.ViewFactory;

    import android.app.Activity;
    import android.content.Context;
    import android.hardware.Sensor;
    import android.hardware.SensorEvent;
    import android.hardware.SensorEventListener;
    import android.hardware.SensorManager;
    import android.location.Location;
    import android.location.LocationListener;
    import android.location.LocationManager;
    import android.media.MediaRecorder;
    import android.os.Bundle;
    import android.view.Gravity;
    import android.view.Window;
    import android.view.WindowManager;
    import android.widget.FrameLayout;
    import android.widget.LinearLayout;

    import static ob.android.Constants.*;

    public class MainActivity
    extends Activity
    {
    static
    {
    try
    {
    System.loadLibrary("jnix");
    System.loadLibrary("streamer");
    }
    catch(Throwable e)
    {
    error(e.getMessage(), e);
    throw new RuntimeException(e);
    }
    }

    private CameraPreview cameraPreview;

    private Pipe videoPipe = new Pipe();

    private Pipe audioPipe = new Pipe();

    private MediaRecorder videoRecorder;

    private MediaRecorder audioRecorder;

    @Override
    public void onCreate(Bundle savedInstanceState)
    {
    super.onCreate(savedInstanceState);
    debug("onCreate");

    cameraPreview = new CameraPreview(this);

    LinearLayout controlLayout = new LinearLayout(this);
    controlLayout.setOrientation(LinearLayout.VERTICAL);

    final Model model = new Model("android1");
    final ModelUtility modelUtility = new ModelUtility();

    LocationManager locationManager = (LocationManager)getSystemService(Context.LOCATION_SERVICE);
    locationManager.requestLocationUpdates(
    LocationManager.GPS_PROVIDER,
    0,
    0,
    new LocationListener()
    {
    public void onStatusChanged(String provider, int status, Bundle extras){}

    public void onProviderEnabled(String provider){}

    public void onProviderDisabled(String provider){}

    public void onLocationChanged(Location location)
    {
    modelUtility.updateLocation(location, model);
    }
    });

    SensorManager sensorManager = (SensorManager)getSystemService(Context.SENSOR_SERVICE);
    sensorManager.registerListener(
    new SensorEventListener()
    {
    public void onSensorChanged(SensorEvent event)
    {
    modelUtility.updateOrientation(
    new Double(event.values[0]),
    new Double(event.values[1]),
    new Double(event.values[2]),
    model);
    }

    public void onAccuracyChanged(Sensor sensor, int accuracy){}
    },
    sensorManager.getDefaultSensor(Sensor.TYPE_ORIENTATION),
    SensorManager.SENSOR_DELAY_NORMAL);

    ViewFactory viewFactory = new ViewFactory(model, this);

    controlLayout.addView(viewFactory.getGPSView());
    controlLayout.addView(viewFactory.getCompassView());
    controlLayout.addView(viewFactory.getButtonView(new Action()
    {
    public void doAction()
    {
    MainActivity.this.startRecording();
    }
    }, new Action()
    {

    public void doAction()
    {
    MainActivity.this.stopRecording();
    }
    }));

    LinearLayout controlLayer = new LinearLayout(this);
    controlLayer.setGravity(Gravity.RIGHT);
    controlLayer.addView(controlLayout);

    FrameLayout mainLayout = new FrameLayout(this);
    mainLayout.addView(cameraPreview);
    mainLayout.addView(controlLayer);

    requestWindowFeature(Window.FEATURE_NO_TITLE);
    getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
    WindowManager.LayoutParams.FLAG_FULLSCREEN);

    setContentView(mainLayout);
    }

    @Override
    protected void onDestroy()
    {
    super.onDestroy();
    debug("onDestroy");
    }

    @Override
    protected void onPause()
    {
    super.onPause();
    debug("onPause");
    System.exit(0);
    }

    @Override
    protected void onRestart()
    {
    super.onRestart();
    debug("onRestart");
    }

    @Override
    protected void onResume()
    {
    super.onResume();
    debug("onResume");
    try
    {
    cameraPreview.startPreview();
    }
    catch(IOException e)
    {
    error("Unable to start the camera preview", e);
    throw new RuntimeException(e);
    }
    }

    @Override
    protected void onStart()
    {
    super.onStart();
    debug("onStart");

    }

    @Override
    protected void onStop()
    {
    super.onStop();
    debug("onStop");
    }



    private void startRecording()
    {
    Thread t = new Thread(new Stream(videoPipe, audioPipe));
    t.setPriority(Thread.MAX_PRIORITY);
    t.start();

    try
    {
    cameraPreview.stopPreview();

    videoRecorder = new MediaRecorder();
    videoRecorder.setPreviewDisplay(cameraPreview.getHolder().getSurface());
    videoRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
    videoRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
    videoRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.MPEG_4_SP);
    videoRecorder.setVideoSize(320, 240);
    videoRecorder.setVideoFrameRate(15);
    videoRecorder.setOutputFile("/sdcard/recordtest.m4e");
    videoRecorder.prepare();
    videoRecorder.start();

    audioRecorder = new MediaRecorder();
    audioRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
    audioRecorder.setOutputFormat(MediaRecorder.OutputFormat.RAW_AMR);
    audioRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
    audioRecorder.setOutputFile("/sdcard/recordtest1.amr");
    audioRecorder.prepare();
    audioRecorder.start();
    }
    catch(Exception e)
    {
    error("Unable to start recording", e);
    }
    }

    private void stopRecording()
    {
    videoRecorder.stop();
    videoRecorder.release();
    videoPipe.closeOutput();
    audioPipe.closeOutput();
    try
    {
    cameraPreview.startPreview();
    } catch (IOException e)
    {
    error("Unable to start camera preview", e);
    }
    }
    }



    AND MY VIDEO STREAMER CODE IS :

    #include "ob_android_Stream.h"
    #include "config.h"
    #include "AMRAudioFileDescriptorSource.hh"

    #include <liveMedia.hh>
    #include <BasicUsageEnvironment.hh>
    #include <UsageEnvironment.hh>

    extern "C"
    {
    #include <jnix.h>
    }

    Boolean awaitConfigInfo(RTPSink *sink);
    void play();

    static char doneFlag = 0;
    UsageEnvironment *uenv;
    MPEG4VideoStreamFramer *videoSource;
    AMRAudioFileDescriptorSource *audioSource;
    RTPSink *rtpVideoSink;
    RTPSink *rtpAudioSink;
    FILE* videoFile;
    FILE* audioFile;

    void Java_ob_android_Stream_stream(JNIEnv *env, jobject obj)
    {
    jclass streamClazz = env->GetObjectClass(obj);
    if(streamClazz == NULL)
    return;

    jobject videoPipe, audioPipe;
    if(!getObjectField(env, obj, "video", "Ljnix/Pipe;", &videoPipe))
    return;
    if(!getObjectField(env, obj, "audio", "Ljnix/Pipe;", &audioPipe))
    return;

    jobject videoInputFD, audioInputFD;
    if(!getObjectField(env, videoPipe, "input", "Ljava/io/FileDescriptor;", &videoInputFD))
    return;
    if(!getObjectField(env, audioPipe, "input", "Ljava/io/FileDescriptor;", &audioInputFD))
    return;

    jint vfd, afd;
    if(!getIntField(env, videoInputFD, "descriptor", &vfd))
    return;
    if(!getIntField(env, audioInputFD, "descriptor", &afd))
    return;

    if((videoFile = fdopen(vfd, "rb")) == NULL)
    {
    throwException(env, "java/lang/RuntimeException", "Unable to open the video pipe as a file");
    return;
    }
    logDebug("Video file descriptor opened as a file");
    if((audioFile = fdopen(afd, "rb")) == NULL)
    {
    throwException(env, "java/lang/RuntimeException", "Unable to open the video pipe as a file");
    return;
    }
    logDebug("Audio file descriptor opened as a file");

    logDebug("Starting to stream");
    BasicTaskScheduler* scheduler = BasicTaskScheduler::createNew();
    logDebug("Loaded scheduler");
    uenv = BasicUsageEnvironment::createNew(*scheduler);
    logDebug("Loaded environment");
    DarwinInjector* injector = DarwinInjector::createNew(*uenv, "streamer");
    logDebug("Loaded Darwin injector");

    struct in_addr dummyDestAddress;
    dummyDestAddress.s_addr = 0;
    Groupsock rtpGroupsockVideo(*uenv, dummyDestAddress, 0, 0);
    Groupsock rtcpGroupsockVideo(*uenv, dummyDestAddress, 0, 0);
    Groupsock rtpGroupsockAudio(*uenv, dummyDestAddress, 0, 0);
    Groupsock rtcpGroupsockAudio(*uenv, dummyDestAddress, 0, 0);
    logDebug("Created group sockets");

    // Create an 'MPEG-4 Video RTP' sink from the RTP 'groupsock':
    rtpVideoSink = MPEG4ESVideoRTPSink::createNew(*uenv, &rtpGroupsockVideo, 96);
    rtpAudioSink = AMRAudioRTPSink::createNew(*uenv, &rtpGroupsockVideo, 97);

    logDebug("Created a video sink");
    logDebug("Created an audio sink");

    logDebug("Beginning to play");
    play();

    if(!awaitConfigInfo(rtpVideoSink))
    {
    *uenv << "Failed to get MPEG-4 'config' information from input file: "
    << uenv->getResultMsg() << "\n";
    exit(1);
    }

    // Create (and start) a 'RTCP instance' for this RTP sink:
    const unsigned estimatedSessionBandwidthVideo = 200; // in kbps; for RTCP b/w share
    const unsigned maxCNAMElen = 100;
    unsigned char CNAME[maxCNAMElen+1];
    gethostname((char*)CNAME, maxCNAMElen);
    CNAME[maxCNAMElen] = '\0'; // just in case
    logDebug((const char*)CNAME);
    RTCPInstance* videoRTCP =
    RTCPInstance::createNew(*uenv, &rtcpGroupsockVideo,
    estimatedSessionBandwidthVideo, CNAME,
    rtpVideoSink, NULL /* we're a server */);
    RTCPInstance* audioRTCP =
    RTCPInstance::createNew(*uenv, &rtcpGroupsockAudio,
    estimatedSessionBandwidthVideo, CNAME,
    rtpAudioSink, NULL /* we're a server */);
    // Note: This starts RTCP running automatically
    // Add these to our 'Darwin injector':
    injector->addStream(rtpVideoSink, videoRTCP);
    injector->addStream(rtpAudioSink, audioRTCP);
    if(!injector->setDestination(
    "192.168.1.100",
    "hero.sdp",
    "herosession",
    "",
    554,
    "broadcast",
    "broadcast"))
    {
    *uenv << "injector->setDestination() failed: " << uenv->getResultMsg() << "\n";
    exit(1);
    }

    *uenv << "Play this stream (from the Darwin Streaming Server) using the URL:\n"
    << "\trtsp://" << "localhost" << "/" << "test.sdp" << "\n";
    uenv->taskScheduler().doEventLoop();
    }

    void afterPlaying(void* clientData)
    {
    logDebug("...done reading from file");
    Medium::close(videoSource);
    }

    void play()
    {
    // Open the input file as a 'byte-stream file source':
    ByteStreamFileSource* videoFileSource
    = ByteStreamFileSource::createNew(*uenv, videoFile);
    if(videoFileSource == NULL)
    {
    logError("Unable to open video file");
    exit(1);
    }

    FramedSource* videoES = videoFileSource;
    // Create a framer for the Video Elementary Stream:
    videoSource = MPEG4VideoStreamFramer::createNew(*uenv, videoES);
    audioSource = AMRAudioFileDescriptorSource::createNew(*uenv, audioFile);
    // Finally, start playing:
    logDebug("Beginning to read from file...");
    rtpVideoSink->startPlaying(*videoSource, afterPlaying, rtpVideoSink);
    rtpAudioSink->startPlaying(*audioSource, afterPlaying, rtpAudioSink);
    }

    static void checkForAuxSDPLine(void* clientData)
    {
    RTPSink* sink = (RTPSink*)clientData;
    if (sink->auxSDPLine() != NULL)
    {
    // Signal the event loop that we're done:
    doneFlag = ~0;
    }
    else
    {
    // No luck yet. Try again, after a brief delay:
    int uSecsToDelay = 100000; // 100 ms
    uenv->taskScheduler().scheduleDelayedTask(uSecsToDelay, (TaskFunc*)checkForAuxSDPLine, sink);
    }
    }

    Boolean awaitConfigInfo(RTPSink* sink)
    {
    // Check whether the sink's 'auxSDPLine()' is ready:
    checkForAuxSDPLine(sink);
    uenv->taskScheduler().doEventLoop(&doneFlag);
    char const* auxSDPLine = sink->auxSDPLine();
    return auxSDPLine != NULL;
    }



    I AM MAKING THE LIBRARY OF STREAMER CODE WITH THE HELP OF ANDROID NDK AND INTEGRATING IT WITH RECORDER MODULE........ BUT THE APP IS NOT WORKING, IT IS NOT GETTING INSTALLED............. PLEASE HELP .......... AM I MAKING MISTAKE IN READING THE RECORDED FILE...............
     

Share This Page

Loading...