Use streams

Instead of using a viewfinder window (Screen window) created by the Camera library and joining it with a Screen window group from your application, you can use the Stream object instead.

A Stream object gives you access to a screen_buffer_t object. Using a Stream object allows you to share the buffers within the same application or process, as well as between different applications and processes. Applications can create consumer streams that can read the Stream object provided by the Camera library (producer of the stream). This is useful when you don't need to show the image buffers from the camera on a display, you need to use the GPU to render the image buffers to display, or if you want use an external engine to composite the image buffers, instead of using Screen's composition features.

For example, you could use OpenGL to display the contents of the image buffer. For more information about working with consumer streams, see the Consumer in the Screen Developer's Guide.

To use Stream object, your application must disable the default behavior of creating a viewfinder window. To do so, call camera_set_vf_property() with the CAMERA_IMGPROP_CREATEWINDOW set to zero before you call camera_start_viewfinder(). After you start the viewfinder, use Screen event handing (see Screen Events) and call screen_get_event_property_pv() to a reference to the Stream object. After you have a reference to the Screen object, you use that stream as a screen_buffer_t object in the manner suitable for your application's requirements.

Here are the steps to use streams:
  1. Open the camera.
  2. Set the viewfinder mode for camera
  3. Create a Screen context, window, and buffers. For more information, see Using Streams in the Resource Sharing chapter of the Screen Developer's Guide.
  4. Start viewfinder and configure camera settings.
  5. Stop viewfinder and close camera
For example, you may want to use process the image buffer in some manner to run algorithms on it, and then use EGL and OpenGL to post the content to the display. Here's a code snippet to illustrate:
...
...
camera_handle_t cameraHandle;
screen_context_t context;
screen_window_t window;

// Application's (consumer) stream handle
screen_stream_t cstream;
  
// Camera library (producer) stream handle
screen_stream_t pstream;

//For working with EGL
texture_t *texture_list = NULL;
screen_window_t screen_win;
int bufferCount = 0;
EGLDisplay egl_display = NULL;
EGLSurface egl_surface = NULL;



    
// Create a screen context and a window
screen_create_context(&context, 0);
screen_create_window(&window, context);


//Connect to the camera and set the viewfinder mode
camera_open(CAMERA_UNIT_1, CAMERA_MODE_RW | CAMERA_MODE_ROLL, &cameraHandle);
camera_set_vf_mode(cameraHandle, CAMERA_VFMODE_VIDEO);

//Disable the viewfinder window creation
camera_set_vf_property(cameraHandle, CAMERA_IMGPROP_CREATEWINDOW, 0);

//Start the viewfinder
camera_start_viewfinder(cameraHandle, NULL, statusCallback, NULL);

// Need to catch the SCREEN_CREATE event. Catching it
// gives us the handle to the viewfinder window that
// the Camera library creates for you.
screen_event_t screenEvent;
bool gotEvent = false;
screen_stream_t *streamHandle = NULL;

if (streamHandle){
   *streamHandle = NULL;
}


// Application is created to hold the event so its local (client)
if (screen_create_event(&screenEvent) == -1) {
    err = errno;
    printf("Failed to create screen event: err = %d\n", err);
    return err;
}

// Handling the Screen events
while (screen_get_event(context, screenEvent, -1) == 0) {
   int eventType;
   int objectType;

   if (screen_get_event_property_iv(screenEvent, SCREEN_PROPERTY_TYPE, &eventType) == -1) {
       printf("Failed to get type of screen event: err = %d\n", err);
       screen_destroy_event(screenEvent);
       break;
   }
   if (eventType == SCREEN_EVENT_PROPERTY) {
      screen_get_event_property_iv(screenEvent, SCREEN_PROPERTY_OBJECT_TYPE, &objectType);
   }
   else if (eventType == SCREEN_EVENT_CREATE) {
       // Got the Create event
       screen_get_event_property_iv(screenEvent, SCREEN_PROPERTY_OBJECT_TYPE, &objectType);
       //Get a reference to the Stream object from the Camera service
           if (streamHandle && (objectType == SCREEN_OBJECT_TYPE_STREAM)) {
               screen_get_event_property_pv(screenEvent, SCREEN_PROPERTY_STREAM, (void**)streamHandle);
           }

        //You now have references to the memory that Screen allocated for you.
        //You're responsible for cleaning this up!
        if ((!streamHandle || *streamHandle) && (!viewfinderHandle || *viewfinderHandle)) {
            gotEvent = true;
            break; 
        }
    } // end else if
    else if (eventType == SCREEN_EVENT_CLOSE) {
                // Got a close event, make sure it is for a window close
                if (screen_get_event_property_iv(screenEvent,
                                                 SCREEN_PROPERTY_OBJECT_TYPE,
                                                 &objectType) == - 1) {
                    err = errno;
                    printf("Failed to get object type of screen event: err = %d\n", err);
                    break;
                }
                if (objectType == SCREEN_OBJECT_TYPE_WINDOW) {
                    // Get child window handle
                    if (screen_get_event_property_pv(screenEvent, SCREEN_PROPERTY_WINDOW,
                                                     (void **)&viewfinderHandle) == -1) {
                        err = errno;
                        printf("Failed to get screen event window: err = %d\n", err);
                        break;
                    }
                    // Call destroy on this window handle to free a small bit
                    // of memory allocated on our behalf or you'll have a
                    // memory leak
                    if (screen_destroy_window(*viewfinderHandle) == -1) {
                        err = errno;
                        printf("Failed to destroy window remnants: err = %d\n", err);
                        break;
                    }
                }// end else if
    }
} // end while

//Create a consumer stream to "look" at the stream
screen_buffer_t sbuffer = NULL;
screen_create_stream(&cstream, context);

const char *id = "mystreamexample";
screen_set_stream_property_cv(cstream, SCREEN_PROPERTY_ID_STRING, strlen(id), id);
    
const int usage = SCREEN_USAGE_NATIVE;
screen_set_stream_property_iv(cstream, SCREEN_PROPERTY_USAGE, &usage);
// It is possible that this code runs before the Sensor service creates the necessary
// buffers. If this code runes before the buffers are created, delay and try again later.
while ((rc = screen_consume_stream_buffers(cstream, 0, pstream)) != 0) {
        printf("screen_consume_stream_buffers() ret %d, errno=%d\n", rc, errno);
        sleep(1);
}
    
//Your application should be connected to the Camera library's viewfinder stream now
    int count;
    uint64_t frameCount = 0;
    struct timespec currentTime;
    struct timespec previousTime;
    // preset starting time
    clock_gettime(CLOCK_MONOTONIC, &previousTime);
    do {
        screen_buffer_t prev = sbuffer;
        count = 0;
        int *dirty = NULL;

        while (screen_acquire_buffer(&sbuffer, cstream,
                                     &count, &dirty, NULL, 0)) {
            printf("screen_acquire_buffer() failed, err=%d\n", errno);
        }

        if (prev && prev != sbuffer) {
            screen_release_buffer(prev);
        }

        if (count > 0 && dirty != NULL) {
            frameCount++;
            // get the current time
            clock_gettime(CLOCK_MONOTONIC, &currentTime);
            if (currentTime.tv_sec >= (previousTime.tv_sec + STREAM_FRAME_RATE_LOG_INTERVAL)) {
                // calculate the framerate since last time
                time_t intervalTime = (currentTime.tv_sec*1000 + currentTime.tv_nsec/1000000) -
                                      (previousTime.tv_sec*1000 + previousTime.tv_nsec/1000000);
                if (intervalTime) {
                    printf("framerate %lld\n", frameCount*1000/intervalTime);
                }
                memcpy(&previousTime, &currentTime, sizeof(struct timespec));
                frameCount = 0;
            }
        } else if (count > 0) {
            printf("acquired buffer count = %d\n", count);
        } else if (sbuffer) {
            printf("acquired buffer\n");
        }
        if (dirty) {
            free(dirty);
        }

    } while (count);
...
...