Using stream mode

Updated: April 19, 2023

Streams give you access to screen_buffer_t objects (Screen buffers).

The advantage of using screen_buffer_t objects is that you can easily use them with the Screen API. This allows you to easily work with rendering APIs, such as OpenGL. To do so, your application must consume the stream that's being produced by the Sensor service.

When you use streams, your application acts as a consumer and is therefore responsible for acquiring the buffers from the Sensor service (the producer). To acquire a buffer, your application must call screen_acquire_buffer(). After doing so, your application can render and/or process this buffer as necessary, then call screen_release_buffer() to release it back to the Sensor service. To consume streams, you require an understanding of how to work with streams from the Screen Graphics Subsystem. For more information, see Using streams in the Screen Developer's Guide.

To use streams, your application must disable the default behavior of creating a viewfinder window. To do so, it can call camera_set_vf_property() with CAMERA_IMGPROP_CREATEWINDOW set to 0 (zero) before calling camera_start_viewfinder(). After starting the viewfinder, your application can use Screen event-handing and call screen_get_event_property_pv() to retrieve a reference to the stream. Using this reference, your application can consume from the stream in a manner suitable for its requirements.

Here's a code snippet that shows how to access a stream, and get time stamps and print them:
...
...
camera_handle_t cameraHandle;
screen_context_t context;
screen_window_t window;

// Application's (consumer) stream handle
screen_stream_t cstream;
  
// Camera library (producer) stream handle
screen_stream_t pstream;

// Create a screen context and a window
screen_create_context(&context, 0);
screen_create_window(&window, context);

// Connect to the camera and set the viewfinder mode
camera_open(CAMERA_UNIT_1, CAMERA_MODE_RW | CAMERA_MODE_ROLL, &cameraHandle);
camera_set_vf_mode(cameraHandle, CAMERA_VFMODE_VIDEO);

// Disable the viewfinder window creation
camera_set_vf_property(cameraHandle, CAMERA_IMGPROP_CREATEWINDOW, 0);

// Start the viewfinder
camera_start_viewfinder(cameraHandle, NULL, statusCallback, NULL);

// We need to catch the Create event. Catching it lets us consume
// from the stream coming from the the Camera library (producer).
screen_event_t screenEvent;
bool gotEvent = false;
screen_stream_t *streamHandle = NULL;

if (streamHandle) {
   *streamHandle = NULL;
}

// Application is created to hold the event so it's local (client)
if (screen_create_event(&screenEvent) == -1) {
    err = errno;
    printf("Failed to create screen event: err = %d\n", err);
    return err;
}

// Handle the Screen events
while (screen_get_event(context, screenEvent, -1) == 0) {
   int eventType;
   int objectType;

   if (screen_get_event_property_iv(screenEvent, SCREEN_PROPERTY_TYPE, &eventType) == -1) {
       printf("Failed to get type of screen event: err = %d\n", err);
       screen_destroy_event(screenEvent);
       break;
   }
   if (eventType == SCREEN_EVENT_PROPERTY) {
      screen_get_event_property_iv(screenEvent, SCREEN_PROPERTY_OBJECT_TYPE, 
                                                &objectType);
   }
   else if (eventType == SCREEN_EVENT_CREATE) {
      // Got the Create event
      screen_get_event_property_iv(screenEvent, SCREEN_PROPERTY_OBJECT_TYPE,
                                                 &objectType);
      // Get a reference to the Stream from the Sensor service
      if (streamHandle && (objectType == SCREEN_OBJECT_TYPE_STREAM)) {
               screen_get_event_property_pv(screenEvent, SCREEN_PROPERTY_STREAM, 
                                                        (void**)pstream);
      }
      // You now have references to the memory that Screen allocated for you.
      // You're responsible for cleaning this up!
      if ((!pstream || *pstream) &&) {
            gotEvent = true;
            break; 
      }
   } // end else if
   else if (eventType == SCREEN_EVENT_CLOSE) {
      // Got a close event, make sure it is for a window close
      if (screen_get_event_property_iv(screenEvent,
                                       SCREEN_PROPERTY_OBJECT_TYPE,
                                       &objectType) == - 1) {
         err = errno;
         printf("Failed to get object type of screen event: err = %d\n", err);
         break;
      }
      if (objectType == SCREEN_OBJECT_TYPE_WINDOW) {
         // Get child window handle
         if (screen_get_event_property_pv(screenEvent, SCREEN_PROPERTY_WINDOW,
                                                 (void **)&viewfinderHandle) == -1) {
            err = errno;
            printf("Failed to get screen event window: err = %d\n", err);
            break;
         }
         // Call destroy on this window handle to free a small bit
         // of memory allocated on our behalf or you'll have a memory leak
         if (screen_destroy_window(*viewfinderHandle) == -1) {
            err = errno;
            printf("Failed to destroy window remnants: err = %d\n", err);
            break;
         }
      }
   }// end else if
} // end while

// Create a consumer stream to share and access the producer stream
screen_buffer_t sbuffer = NULL;
screen_create_stream(&cstream, context);

const char *id = "mystreamexample";
screen_set_stream_property_cv(cstream, SCREEN_PROPERTY_ID_STRING, strlen(id), id);
    
const int usage = SCREEN_USAGE_NATIVE;
screen_set_stream_property_iv(cstream, SCREEN_PROPERTY_USAGE, &usage);

while ((rc = screen_consume_stream_buffers(cstream, 0, pstream))) {
      printf("screen_consume_stream_buffers() ret %d, errno=%d\n", rc, errno);
      sleep(1);
}
    
// Use the stream
    int count;
    uint64_t frameCount = 0;
    struct timespec currentTime;
    struct timespec previousTime;
    // Preset starting time
    clock_gettime(CLOCK_MONOTONIC, &previousTime);
    do {
        screen_buffer_t prev = sbuffer;
        count = 0;
        int *dirty = NULL;
        while (screen_acquire_buffer(&sbuffer, cstream, &count, 
                                     &dirty, NULL, 0)) {
            printf("screen_acquire_buffer() failed, err=%d\n", errno);
        }

        if (prev && prev != sbuffer) {
            screen_release_buffer(prev);
        }

        if (count > 0 && dirty != NULL) {
            frameCount++;
            // Get the current time
            clock_gettime(CLOCK_MONOTONIC, &currentTime);
            if (currentTime.tv_sec >= (previousTime.tv_sec + 
                                       STREAM_FRAME_RATE_LOG_INTERVAL)) {
                // Calculate the framerate since last time.
                time_t intervalTime = (currentTime.tv_sec*1000 +
                                       currentTime.tv_nsec/1000000) -
                                      (previousTime.tv_sec*1000 + 
                                       previousTime.tv_nsec/1000000);
                if (intervalTime) {
                    printf("framerate %lld\n", frameCount*1000/intervalTime);
                }
                memcpy(&previousTime, &currentTime, sizeof(struct timespec));
                frameCount = 0;
            }
        } else if (count > 0) {
            printf("Acquired buffer count = %d\n", count);
        } else if (sbuffer) {
            printf("acquired buffer\n");
        }
        if (dirty) {
            free(dirty);
        }

    } while (count);
...
...