Using stream mode

Streams give you access to a screen_buffer_t objects (buffers).

The advantage of using a screen_buffer_t object is that it allows you to more easily use it with the Screen API. This allows you to easily work with render APIs, such as OpenGL. To do so, your application can consume the stream that's being produced by the Sensor service.

When you use a streams, your application acts in a consumer role, therefore, your application is responsible acquiring the buffers from the Camera Service (producer). To acquire the buffer, your application calls screen_acquire_buffer(). After acquiring the buffer, your application can render and/or process the acquired buffer as necessary, and then it must call screen_release_buffer() to release the acquired buffers back to the Sensor service. To consume streams, you require an understanding of how to work with streams from the Screen Graphics Subsystem. For more information, see Using streams in the Screen Developer's Guide.

To use streams, your application must disable the default behavior of creating a viewfinder window. To do so, call camera_set_vf_property() with the CAMERA_IMGPROP_CREATEWINDOW set to zero before you call camera_start_viewfinder(). After you start the viewfinder, use Screen event handing (see Screen Events) and call screen_get_event_property_pv() to retrieve a reference to the stream. After you have a reference to the stream, you can consume from that stream in a manner suitable for your application's requirements.

Here's code snippet that shows you how to access a stream and simply get time stamps and print them out:
...
...
camera_handle_t cameraHandle;
screen_context_t context;
screen_window_t window;

// Application's (consumer) stream handle
screen_stream_t cstream;
  
// Camera library (producer) stream handle
screen_stream_t pstream;

    
// Create a screen context and a window
screen_create_context(&context, 0);
screen_create_window(&window, context);


//Connect to the camera and set the viewfinder mode
camera_open(CAMERA_UNIT_1, CAMERA_MODE_RW | CAMERA_MODE_ROLL, &cameraHandle);
camera_set_vf_mode(cameraHandle, CAMERA_VFMODE_VIDEO);

//Disable the viewfinder window creation
camera_set_vf_property(cameraHandle, CAMERA_IMGPROP_CREATEWINDOW, 0);

//Start the viewfinder
camera_start_viewfinder(cameraHandle, NULL, statusCallback, NULL);

// Need to catch the Create event. Catching it
// gives us the consume from the stream from the
// the Camera library (producer).
screen_event_t screenEvent;
bool gotEvent = false;
screen_stream_t *streamHandle = NULL;

if (streamHandle){
   *streamHandle = NULL;
}


// Application is created to hold the event so its local (client)
if (screen_create_event(&screenEvent) == -1) {
    err = errno;
    printf("Failed to create screen event: err = %d\n", err);
    return err;
}

// Handling the Screen events
while (screen_get_event(context, screenEvent, -1) == 0) {
   int eventType;
   int objectType;

   if (screen_get_event_property_iv(screenEvent, SCREEN_PROPERTY_TYPE, &eventType) == -1) {
       printf("Failed to get type of screen event: err = %d\n", err);
       screen_destroy_event(screenEvent);
       break;
   }
   if (eventType == SCREEN_EVENT_PROPERTY) {
      screen_get_event_property_iv(screenEvent, SCREEN_PROPERTY_OBJECT_TYPE, 
                                                &objectType);
   }
   else if (eventType == SCREEN_EVENT_CREATE) {
       // Got the Create event
       screen_get_event_property_iv(screenEvent, SCREEN_PROPERTY_OBJECT_TYPE,
                                                 &objectType);
       //Get a reference to the Stream from the Sensor service
           if (streamHandle && (objectType == SCREEN_OBJECT_TYPE_STREAM)) {
               screen_get_event_property_pv(screenEvent, SCREEN_PROPERTY_STREAM, 
                                                        (void**)pstream);
           }
        //You now have references to the memory that Screen allocated for you.
        //You're responsible for cleaning this up!
        if ((!pstream || *pstream) &&) {
            gotEvent = true;
            break; 
        }
    } // end else if
    else if (eventType == SCREEN_EVENT_CLOSE) {
        // Got a close event, make sure it is for a window close
        if (screen_get_event_property_iv(screenEvent,
                                         SCREEN_PROPERTY_OBJECT_TYPE,
                                         &objectType) == - 1) {
             err = errno;
             printf("Failed to get object type of screen event: err = %d\n", err);
             break;
         }
         if (objectType == SCREEN_OBJECT_TYPE_WINDOW) {
             // Get child window handle
             if (screen_get_event_property_pv(screenEvent, SCREEN_PROPERTY_WINDOW,
                                                     (void **)&viewfinderHandle) == -1) {
                 err = errno;
                 printf("Failed to get screen event window: err = %d\n", err);
                 break;
             }
             // Call destroy on this window handle to free a small bit
             // of memory allocated on our behalf or you'll have a
             // memory leak
             if (screen_destroy_window(*viewfinderHandle) == -1) {
                 err = errno;
                 printf("Failed to destroy window remnants: err = %d\n", err);
                 break;
             }
         )
     }// end else if
} // end while

//Create a consumer stream to share and access the producer stream
screen_buffer_t sbuffer = NULL;
screen_create_stream(&cstream, context);

const char *id = "mystreamexample";
screen_set_stream_property_cv(cstream, SCREEN_PROPERTY_ID_STRING, strlen(id), id);
    
const int usage = SCREEN_USAGE_NATIVE;
screen_set_stream_property_iv(cstream, SCREEN_PROPERTY_USAGE, &usage);

while ((rc = screen_consume_stream_buffers(cstream, 0, pstream))) {
        printf("screen_consume_stream_buffers() ret %d, errno=%d\n", rc, errno);
        sleep(1);
}
    
// Use the stream
    int count;
    uint64_t frameCount = 0;
    struct timespec currentTime;
    struct timespec previousTime;
    // preset starting time
    clock_gettime(CLOCK_MONOTONIC, &previousTime);
    do {
        screen_buffer_t prev = sbuffer;
        count = 0;
        int *dirty = NULL;
        while (screen_acquire_buffer(&sbuffer, cstream, &count, 
                                     &dirty, NULL, 0)) {
            printf("screen_acquire_buffer() failed, err=%d\n", errno);
        }

        if (prev && prev != sbuffer) {
            screen_release_buffer(prev);
        }

        if (count > 0 && dirty != NULL) {
            frameCount++;
            // Get the current time.
            clock_gettime(CLOCK_MONOTONIC, &currentTime);
            if (currentTime.tv_sec >= (previousTime.tv_sec + 
                                       STREAM_FRAME_RATE_LOG_INTERVAL)) {
                // Calculate the framerate since last time.
                time_t intervalTime = (currentTime.tv_sec*1000 +
                                       currentTime.tv_nsec/1000000) -
                                      (previousTime.tv_sec*1000 + 
                                       previousTime.tv_nsec/1000000);
                if (intervalTime) {
                    printf("framerate %lld\n", frameCount*1000/intervalTime);
                }
                memcpy(&previousTime, &currentTime, sizeof(struct timespec));
                frameCount = 0;
            }
        } else if (count > 0) {
            printf("Acquired buffer count = %d\n", count);
        } else if (sbuffer) {
            printf("acquired buffer\n");
        }
        if (dirty) {
            free(dirty);
        }

    } while (count);
...
...