/*
Copyright (c) 2010 Martin Lucina
This file is part of zeromq-examples.
zeromq-examples is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
zeromq-examples is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see .
*/
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#define FOURCC(a,b,c,d) (unsigned int)((((unsigned int)d)<<24)+\
(((unsigned int)c)<<16)+(((unsigned int)b)<<8)+a)
/* inproc endpoint used to provide the "local loopback" view of the camera */
const char local_camera[] = "inproc://local-camera";
/* Sender thread initialiser */
struct sender_args_t
{
const char *endpoint;
void *ctx;
};
/*
* Sender thread
*/
void *sender_thread (void *arg)
{
struct sender_args_t *sender_args;
void *s;
int rc;
unicap_handle_t handle;
unicap_device_t device;
unicap_format_t src_format;
unicap_format_t dest_format;
unicap_data_buffer_t src_buffer;
unicap_data_buffer_t dest_buffer;
unicap_data_buffer_t *returned_buffer;
sender_args = (struct sender_args_t *)arg;
/* Create a ZMQ_PUB socket for sending video data and connect it to both
an inproc endpoint for the local loopback view and bind to the endpoint
specified by the user. */
s = zmq_socket (sender_args->ctx, ZMQ_PUB);
assert (s);
rc = zmq_bind (s, sender_args->endpoint);
if (rc != 0) {
fprintf (stderr, "zmq_bind (\"%s\"): %s\n", sender_args->endpoint,
zmq_strerror (errno));
exit (1);
}
rc = zmq_connect (s, local_camera);
if (rc != 0) {
fprintf (stderr, "zmq_connect (\"%s\"): %s\n", local_camera,
zmq_strerror (errno));
exit (1);
}
/* Open first available video capture device. */
if (!SUCCESS (unicap_enumerate_devices (NULL, &device, 0))) {
fprintf (stderr, "Could not enumerate devices\n");
exit (1);
}
if (!SUCCESS (unicap_open (&handle, &device))) {
fprintf (stderr, "Failed to open device: %s\n", device.identifier);
exit (1);
}
printf( "Opened video capture device: %s\n", device.identifier );
/* Find a suitable video format that we can convert to RGB24. */
int conversion_found = 0;
int index = 0;
while (SUCCESS (unicap_enumerate_formats (handle, NULL, &src_format,
index))) {
printf ("Trying video format: %s\n", src_format.identifier);
if (ucil_conversion_supported (FOURCC ('R', 'G', 'B', '3'),
src_format.fourcc)) {
conversion_found = 1;
break;
}
index++;
}
if (!conversion_found) {
fprintf (stderr, "Could not find a suitable video format\n");
exit (1);
}
src_format.buffer_type = UNICAP_BUFFER_TYPE_USER;
if (!SUCCESS (unicap_set_format (handle, &src_format))) {
fprintf (stderr, "Failed to set video format\n");
exit (1);
}
printf ("Using video format: %s [%dx%d]\n",
src_format.identifier,
src_format.size.width,
src_format.size.height);
/* Clone destination format with equal dimensions, but RGB24 colorspace. */
unicap_copy_format (&dest_format, &src_format);
strcpy (dest_format.identifier, "RGB 24bpp");
dest_format.fourcc = FOURCC ('R', 'G', 'B', '3');
dest_format.bpp = 24;
dest_format.buffer_size = dest_format.size.width *
dest_format.size.height * 3;
/* Initialise image buffers. */
memset (&src_buffer, 0, sizeof (unicap_data_buffer_t));
src_buffer.data = (unsigned char*) malloc (src_format.buffer_size);
src_buffer.buffer_size = src_format.buffer_size;
memset (&dest_buffer, 0, sizeof (unicap_data_buffer_t));
dest_buffer.data = (unsigned char*) malloc (dest_format.buffer_size);
dest_buffer.buffer_size = dest_format.buffer_size;
dest_buffer.format = dest_format;
/* Start video capture. */
if (!SUCCESS (unicap_start_capture (handle))) {
fprintf (stderr, "Failed to start capture on device: %s\n",
device.identifier);
exit (1);
}
/* Loop, publising one message per raw video frame. */
while (1) {
zmq_msg_t msg;
size_t msg_size;
uint32_t image_width, image_height;
unsigned char *data;
/* Queue buffer for video capture. */
if (!SUCCESS (unicap_queue_buffer (handle, &src_buffer))) {
fprintf (stderr, "Failed to queue a buffer on device: %s\n",
device.identifier);
exit (1);
}
/* Wait until buffer is ready. */
if (!SUCCESS (unicap_wait_buffer (handle, &returned_buffer))) {
fprintf (stderr, "Failed to wait for buffer on device: %s\n",
device.identifier);
exit (1);
}
/* Convert colorspace. */
if (!SUCCESS (ucil_convert_buffer (&dest_buffer, &src_buffer))) {
/* TODO: This fails sometimes for unknown reasons,
just skip the frame for now. */
fprintf (stderr, "Failed to convert video buffer\n");
}
/* Create 0MQ message and fill in data. */
msg_size = dest_format.buffer_size + (2 * sizeof (uint32_t));
rc = zmq_msg_init_size (&msg, msg_size);
assert (rc == 0);
data = (unsigned char *)zmq_msg_data (&msg);
/* Image width (uint32_t in network byte order) */
image_width = htonl (dest_format.size.width);
memcpy (data, &image_width, sizeof (uint32_t));
data += sizeof (uint32_t);
/* Image height (uint32_t in network byte order) */
image_height = htonl (dest_format.size.height);
memcpy (data, &image_height, sizeof (uint32_t));
data += sizeof (uint32_t);
/* RGB24 image data. */
memcpy (data, dest_buffer.data, dest_format.buffer_size);
/* Send message to network. */
rc = zmq_send (s, &msg, 0);
assert (rc == 0);
zmq_msg_close (&msg);
}
return NULL;
}
/*
* Print usage and exit.
*/
void err_usage(void)
{
const char usage[] = \
"Usage: zmq-camera -r | -s ENDPOINT\n" \
"Sends or receives video using 0MQ.\n" \
"\n" \
" -r receive video from ENDPOINT\n" \
" -s send video to ENDPOINT\n" \
"\n" \
"ENDPOINT is any 0MQ endpoint valid for a ZMQ_PUB/ZMQ_SUB socket.\n"
"\n" \
"Examples:\n" \
"\n" \
" zmq-camera -s tcp://eth0:5555\n" \
" zmq-camera -r tcp://eth0:5555\n";
fprintf (stderr, "%s", usage);
exit (1);
}
/*
* Main thread
*/
int main (int argc, char *argv [])
{
void *ctx, *s;
int rc;
int is_sender = 0;
char *endpoint = NULL;
SDL_Surface *screen = NULL;
SDL_Surface *rgb_surface = NULL;
uint32_t image_width, image_height;
int sdl_initialised = 0;
int quit = 0;
/* Parse command line. */
if (argc != 3)
err_usage();
if (strcmp (argv [1], "-r") == 0)
is_sender = 0;
else if (strcmp (argv [1], "-s") == 0)
is_sender = 1;
else
err_usage();
endpoint = argv [2];
/* Initialise 0MQ infrastructure for 2 application threads and
a single I/O thread */
ctx = zmq_init (2, 1, 0);
assert (ctx);
/* Create a ZMQ_SUB socket to receive video data. If we're sending video,
bind to an inproc: endpoint that the sender thread will connect to,
otherwise connect to the endpoint the user specified. */
s = zmq_socket (ctx, ZMQ_SUB);
assert (s);
if (is_sender) {
rc = zmq_bind (s, local_camera);
if (rc != 0) {
fprintf (stderr, "zmq_bind (\"%s\"): %s\n", local_camera,
zmq_strerror (errno));
exit (1);
}
/* Start the sender thread after binding to the inproc: endpoint
since this must exist for it to connect. */
struct sender_args_t sender_args = {endpoint, ctx};
pthread_t sender;
rc = pthread_create (&sender, NULL, sender_thread,
(void*) &sender_args);
assert (rc == 0);
}
else {
rc = zmq_connect (s, endpoint);
if (rc != 0) {
fprintf (stderr, "zmq_connect (\"%s\"): %s\n", endpoint,
zmq_strerror (errno));
exit (1);
}
}
/* Subscribe to all messages on socket. */
rc = zmq_setsockopt (s, ZMQ_SUBSCRIBE, "", 0);
assert (rc == 0);
/* Display video until user asks to quit. */
while (!quit) {
zmq_msg_t msg;
unsigned char *data;
SDL_Event event;
/* Receive single message. */
rc = zmq_msg_init (&msg);
assert (rc == 0);
rc = zmq_recv (s, &msg, 0);
assert (rc == 0);
/* Parse message data. */
data = (unsigned char*) zmq_msg_data (&msg);
/* Sanity check that we have at least the width, height in
the message data */
assert (zmq_msg_size (&msg) >= sizeof (uint32_t) + sizeof (uint32_t));
/* Get image width in pixels. */
memcpy (&image_width, data, sizeof (uint32_t));
image_width = ntohl (image_width);
data += sizeof (uint32_t);
/* Get image height in pixels. */
memcpy (&image_height, data, sizeof (uint32_t));
image_height = ntohl (image_height);
data += sizeof (uint32_t);
/* data now points to RGB24 pixel data. */
if (!sdl_initialised) {
/* Initialise SDL if not already done.
We need to have received at least one message, so that we
know what the image size being sent is. */
if (SDL_Init (SDL_INIT_VIDEO) < 0)
{
fprintf (stderr, "Failed to initialize SDL: %s\n",
SDL_GetError());
exit (1);
}
screen = SDL_SetVideoMode (image_width, image_height, 32,
SDL_HWSURFACE);
if (screen == NULL) {
fprintf (stderr, "Unable to set video mode: %s\n",
SDL_GetError ());
SDL_Quit ();
exit (1);
}
/* Set window title to endpoint we are receiving from */
if (is_sender)
SDL_WM_SetCaption (local_camera, local_camera);
else
SDL_WM_SetCaption (endpoint, endpoint);
sdl_initialised = 1;
}
/* Create RGB surface. */
rgb_surface = SDL_CreateRGBSurfaceFrom (
data, /* Pixel data */
image_width, /* Width */
image_height, /* Height */
24, /* Depth */
image_width * 3, /* Scanline pitch */
0, 0, 0, 0); /* TODO: RGBA mask */
/* Blit surface to screen. */
SDL_BlitSurface (rgb_surface, NULL, screen, NULL);
SDL_UpdateRect (screen, 0, 0, 0, 0);
SDL_FreeSurface (rgb_surface);
/* Free zmq_msg we received */
zmq_msg_close (&msg);
/* Check if user asked to quit. */
while (SDL_PollEvent (&event))
{
if (event.type == SDL_QUIT)
quit = 1;
}
}
/* TODO: Send a 'stop' message to sender thread rather than killing it
forcefully by terminating the main thread. */
/* Cleanup */
SDL_Quit ();
return 0;
}